From 5032434b101f25fa44d2e1f8dc8393e30af1ed4f Mon Sep 17 00:00:00 2001 From: "Stone, Avi (as206k)" Date: Thu, 12 Apr 2018 15:46:31 +0300 Subject: DCAE-D be initial commit DCAE-D be initial commit Issue-ID: SDC-1218 Change-Id: Id18ba96c499e785aa9ac395fbaf32d57f08c281b Signed-off-by: Stone, Avi (as206k) --- .../java/org/onap/sdc/dcae/checker/Catalog.java | 444 +++ .../onap/sdc/dcae/checker/CatalogException.java | 14 + .../java/org/onap/sdc/dcae/checker/Checker.java | 3643 ++++++++++++++++++++ .../onap/sdc/dcae/checker/CheckerException.java | 18 + .../org/onap/sdc/dcae/checker/CommonLocator.java | 144 + .../java/org/onap/sdc/dcae/checker/Construct.java | 22 + .../main/java/org/onap/sdc/dcae/checker/Data.java | 895 +++++ .../main/java/org/onap/sdc/dcae/checker/Facet.java | 37 + .../main/java/org/onap/sdc/dcae/checker/JSP.java | 624 ++++ .../java/org/onap/sdc/dcae/checker/Process.java | 29 + .../org/onap/sdc/dcae/checker/ProcessBuilder.java | 24 + .../java/org/onap/sdc/dcae/checker/Processor.java | 11 + .../onap/sdc/dcae/checker/ProcessorException.java | 28 + .../java/org/onap/sdc/dcae/checker/Report.java | 102 + .../java/org/onap/sdc/dcae/checker/Repository.java | 50 + .../java/org/onap/sdc/dcae/checker/Target.java | 80 + .../org/onap/sdc/dcae/checker/TargetError.java | 43 + .../java/org/onap/sdc/dcae/checker/TargetInfo.java | 20 + .../org/onap/sdc/dcae/checker/TargetLocator.java | 20 + .../java/org/onap/sdc/dcae/checker/Workflows.java | 120 + .../dcae/checker/annotations/.Validates.java.swp | Bin 0 -> 12288 bytes .../sdc/dcae/checker/annotations/Catalogs.java | 14 + .../onap/sdc/dcae/checker/annotations/Checks.java | 19 + .../sdc/dcae/checker/annotations/Validates.java | 15 + .../org/onap/sdc/dcae/checker/package-info.java | 101 + .../main/resources/tosca/tosca-common-types.yaml | 665 ++++ .../main/resources/tosca/tosca-examples-types.yaml | 117 + .../main/resources/tosca/tosca-network-types.yaml | 103 + .../src/main/resources/tosca/tosca-nfv-types.yaml | 143 + .../resources/tosca/tosca_simple_yaml_1_0.grammar | 1262 +++++++ .../resources/tosca/tosca_simple_yaml_1_1.grammar | 1583 +++++++++ 31 files changed, 10390 insertions(+) create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CatalogException.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CheckerException.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Facet.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/JSP.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Process.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessBuilder.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Processor.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessorException.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Report.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Repository.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetError.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetInfo.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetLocator.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Workflows.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/.Validates.java.swp create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Catalogs.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Checks.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Validates.java create mode 100644 dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/package-info.java create mode 100644 dcaedt_validator/checker/src/main/resources/tosca/tosca-common-types.yaml create mode 100644 dcaedt_validator/checker/src/main/resources/tosca/tosca-examples-types.yaml create mode 100644 dcaedt_validator/checker/src/main/resources/tosca/tosca-network-types.yaml create mode 100644 dcaedt_validator/checker/src/main/resources/tosca/tosca-nfv-types.yaml create mode 100644 dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar create mode 100644 dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar (limited to 'dcaedt_validator/checker/src') diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java new file mode 100644 index 0000000..1512e56 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java @@ -0,0 +1,444 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Iterator; +import java.util.Collection; +import java.util.Comparator; +import java.util.Set; +import java.util.Map; +import java.util.List; +import java.util.EnumMap; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.ArrayList; +import java.util.Collections; + +import java.util.stream.Collectors; + +import java.net.URI; + +import com.google.common.base.Predicate; +import com.google.common.base.Function; +import com.google.common.collect.Iterators; +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +/* + * Oddball: tracking inputs as data templates could be seen as rather + * odd but we see them as instances of data types, in the same way node + * templates are instances of node types. + */ +public class Catalog { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + /* Type hierarchies are stored as maps from a type name to its definition + * Not the best but easy to follow hierarchies towards their root .. + */ + private EnumMap> types = + new EnumMap>(Construct.class); + /* track templates: we track templates (tye instances) first per target then per contruct. + * This allows us to share the catalog among multiple templates sharign the same type set + */ + private Map>> templates = + new HashMap>>(); + + private Catalog parent; + + public Catalog(Catalog theParent) { + this.parent = theParent; + /* there are no requirement types, they are the same as capability types */ + types.put(Construct.Data, new LinkedHashMap()); + types.put(Construct.Capability, new LinkedHashMap()); + types.put(Construct.Relationship, new LinkedHashMap()); + types.put(Construct.Artifact, new LinkedHashMap()); + types.put(Construct.Interface, new LinkedHashMap()); + types.put(Construct.Node, new LinkedHashMap()); + types.put(Construct.Group, new LinkedHashMap()); + types.put(Construct.Policy, new LinkedHashMap()); + + } + + public Catalog() { + this(null); + } + + public boolean addType(Construct theConstruct, String theName, Map theDef) { + if (hasType(theConstruct, theName)) { + return false; + } + getConstructTypes(theConstruct).put(theName, theDef); + return true; + } + + public Map getTypeDefinition(Construct theConstruct, String theName) { + Map constructTypes = getConstructTypes(theConstruct); + Map typeDef = constructTypes.get(theName); + if (typeDef == null && this.parent != null) { + return this.parent.getTypeDefinition(theConstruct, theName); + } + return typeDef; + } + + public boolean hasType(Construct theConstruct, String theName) { + Map constructTypes = getConstructTypes(theConstruct); + boolean res = constructTypes.containsKey(theName); + if (!res && this.parent != null) { + res = this.parent.hasType(theConstruct, theName); + } + return res; + } + + protected Map getConstructTypes(Construct theConstruct) { + Map constructTypes = this.types.get(theConstruct); + if (null == constructTypes) { + throw new RuntimeException("Something worse is cooking here!", + new CatalogException("No types for construct " + theConstruct)); + } + return constructTypes; + } + + protected Iterator> + typesIterator(Construct theConstruct) { + List> constructTypes = + new ArrayList>( + this.types.get(theConstruct).entrySet()); + Collections.reverse(constructTypes); + return (this.parent == null) + ? constructTypes.iterator() + : Iterators.concat(constructTypes.iterator(), + this.parent.typesIterator(theConstruct)); + } + + /* this will iterate through the type hierarchy for the given type, included. + */ + public Iterator> + hierarchy(Construct theConstruct, final String theName) { + return Iterators.filter(typesIterator(theConstruct), + new Predicate>() { + Object next = theName; + public boolean apply(Map.Entry theEntry) { + if (next != null && next.equals(theEntry.getKey())) { + next = theEntry.getValue().get("derived_from"); + return true; + } + else + return false; + } + }); + } + + public boolean isDerivedFrom(Construct theConstruct, String theType, String theBaseType) { + + Iterator> hierachyIterator = + hierarchy(theConstruct, theType); + while (hierachyIterator.hasNext()) { + Map.Entry typeDef = hierachyIterator.next(); + + if (typeDef.getKey().equals(theBaseType)) { + return true; + } + } + return false; + } + + /* We go over the type hierarchy and retain only an iterator over the + * elements of the given facet for each type in the hierarchy. + * We concatenate these iterators and filter out duplicates. + * TODO: cannot just filter out duplicates - a redefinition can refine the one in the base construct so we + * should merge them! + */ + public Iterator facets(Construct theConstruct, + final Facet theFacet, + final String theName) { + return + Iterators.filter( + Iterators.concat( + Iterators.transform( + hierarchy(theConstruct, theName), + new Function, Iterator>() { + public Iterator apply(Map.Entry theEntry) { + Map m = (Map)theEntry.getValue().get(theFacet.name()); + return m == null + ? Collections.emptyIterator() + : m.entrySet().iterator(); + } + } + ) + ), + new Predicate() { + Set insts = new HashSet(); + public boolean apply(Map.Entry theEntry) { + return !insts.contains(theEntry.getKey()); + } + } + ); + } + + //no need to specify a construct, only nodes can have requirements + public Iterator requirements(final String theName) { + return + Iterators.concat( + Iterators.transform( + hierarchy(Construct.Node, theName), + new Function, Iterator>() { + public Iterator apply(Map.Entry theEntry) { + List l = (List)theEntry.getValue().get("requirements"); + return l == null + ? Collections.emptyIterator() + : Iterators.concat( + Iterators.transform( + l.iterator(), + new Function> () { + public Iterator apply(Map theEntry) { + return theEntry.entrySet().iterator(); + } + } + ) + ); + } + } + ) + ); + } + + /* Example: find the definition of property 'port' of the node type + * tosca.nodes.Database (properties being a facet of the node construct) + * + * Note: the definition of a facet is cumulative, i.e. more specialized + * definitions contribute (by overwriting) to the + */ + public Map getFacetDefinition(Construct theConstruct, + String theConstructTypeName, + Facet theFacet, + String theName) { + Map def = null; + Iterator> ti = hierarchy(theConstruct, theConstructTypeName); + while (ti.hasNext()) { + //this is where requirements would yield a List .. + Map fset = (Map)ti.next().getValue().get(theFacet.name()); + if (fset != null) { + def = def == null ? fset.get(theName) + : mergeDefinitions(def, fset.get(theName)); + } + } + return def; + } + + public Map getRequirementDefinition(Construct theConstruct, + String theConstructTypeName, + String theName) { + Iterator> ti = hierarchy(theConstruct, theConstructTypeName); + while (ti.hasNext()) { + //this is where requirements yield a List .. + List reqs = (List)ti.next().getValue().get("requirements"); + + if(reqs!=null){ + for (Map req: reqs) { + Map.Entry reqe = (Map.Entry)req.entrySet().iterator().next(); + if (theName.equals(reqe.getKey())) { + return (Map)reqe.getValue(); + } + } + }else{ + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Avoiding adding requirment block since it doesn't exists on the template...."); + } + } + return null; + } + + /* */ + private EnumMap> getTemplates(Target theTarget) { + EnumMap> targetTemplates = templates.get(theTarget); + if (targetTemplates == null) { + targetTemplates = new EnumMap>(Construct.class); + targetTemplates.put(Construct.Data, new LinkedHashMap()); + targetTemplates.put(Construct.Relationship, new LinkedHashMap()); + targetTemplates.put(Construct.Node, new LinkedHashMap()); + targetTemplates.put(Construct.Group, new LinkedHashMap()); + targetTemplates.put(Construct.Policy, new LinkedHashMap()); + + templates.put(theTarget, targetTemplates); + } + return targetTemplates; + } + + public Map getTargetTemplates(Target theTarget, Construct theConstruct) { + return getTemplates(theTarget).get(theConstruct); + } + + public void addTemplate(Target theTarget, Construct theConstruct, String theName, Map theDef) + throws CatalogException { + Map constructTemplates = getTargetTemplates(theTarget, theConstruct); + if (null == constructTemplates) { + throw new CatalogException("No such thing as " + theConstruct + " templates"); + } + if (constructTemplates.containsKey(theName)) { + throw new CatalogException(theConstruct + " template '" + theName + "' re-declaration"); + } + constructTemplates.put(theName, theDef); + } + + public boolean hasTemplate(Target theTarget, Construct theConstruct, String theName) { + Map constructTemplates = getTargetTemplates(theTarget, theConstruct); + return constructTemplates != null && + constructTemplates.containsKey(theName); + } + + public Map getTemplate(Target theTarget, Construct theConstruct, String theName) { + Map constructTemplates = getTargetTemplates(theTarget, theConstruct); + if (constructTemplates != null) + return constructTemplates.get(theName); + else + return null; + } + + public static Map mergeDefinitions(Map theAggregate, Map theIncrement) { + if (theIncrement == null) + return theAggregate; + + for(Map.Entry e: (Set)theIncrement.entrySet()) { + theAggregate.putIfAbsent(e.getKey(), e.getValue()); + } + return theAggregate; + } + + /* tracks imports, i.e.targets */ + private LinkedHashMap targets = + new LinkedHashMap(); + /* tracks dependencies between targets, i.e. the 'adjency' matrix defined by + * the 'import' relationship */ + private Table imports = HashBasedTable.create(); + + + /* + * theParent contains an 'include/import' statement pointing to the Target + */ + public boolean addTarget(Target theTarget, Target theParent) { + boolean cataloged = targets.containsKey(theTarget.getLocation()); + + if(!cataloged) { + targets.put(theTarget.getLocation(), theTarget); + } + + if (theParent != null) { + imports.put(theParent, theTarget, Boolean.TRUE); + } + + return !cataloged; + } + + public Target getTarget(URI theLocation) { + return targets.get(theLocation); + } + + public Collection targets() { + return targets.values(); + } + + /* Targets that no other targets depend on */ + public Collection topTargets() { + return targets.values() + .stream() + .filter(t -> !imports.containsColumn(t)) + .collect(Collectors.toList()); + + } + + public String importString(Target theTarget) { + return importString(theTarget, " "); + } + + private String importString(Target theTarget, String thePrefix) { + StringBuilder sb = new StringBuilder(""); + Map parents = imports.column(theTarget); + if (parents != null) { + for (Target p: parents.keySet()) { + sb.append(thePrefix) + .append("from ") + .append(p.getLocation()) + .append("\n") + .append(importString(p, thePrefix + " ")); + } + //we only keep the positive relationships + } + return sb.toString(); + } + + /* */ + private class TargetComparator implements Comparator { + + /* @return 1 if there is a dependency path from TargetOne to TargetTwo, -1 otherwise */ + public int compare(Target theTargetOne, Target theTargetTwo) { + if (hasPath(theTargetTwo, theTargetOne)) + return -1; + + if (hasPath(theTargetOne, theTargetTwo)) + return 1; + + return 0; + } + + public boolean hasPath(Target theStart, Target theEnd) { + Map deps = imports.row(theStart); + if (deps.containsKey(theEnd)) + return true; + for (Target dep: deps.keySet()) { + if (hasPath(dep, theEnd)) + return true; + } + return false; + } + } + + public Collection sortedTargets() { + List keys = new ArrayList(this.targets.values()); + Collections.sort(keys, new TargetComparator()); + return keys; + } + + public static void main(String[] theArgs) throws Exception { + + Catalog cat = new Catalog(); + + Target a = new Target("a", new URI("a")), + b = new Target("b", new URI("b")), + c = new Target("c", new URI("c")), + d = new Target("d", new URI("d")); + + cat.addTarget(a, null); + cat.addTarget(b, null); + cat.addTarget(c, null); + cat.addTarget(d, null); + + cat.addTarget(b, c); + cat.addTarget(a, c); + cat.addTarget(c, d); + cat.addTarget(a, b); + + for (Target t: cat.sortedTargets()) + debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), t.toString()); + + Catalog root = new Catalog(); + root.addType(Construct.Node, "_a", Collections.emptyMap()); + root.addType(Construct.Node, "__a", Collections.singletonMap("derived_from", "_a")); + root.addType(Construct.Node, "___a", Collections.singletonMap("derived_from", "_a")); + + Catalog base = new Catalog(root); + base.addType(Construct.Node, "_b", Collections.singletonMap("derived_from", "__a")); + base.addType(Construct.Node, "__b", Collections.singletonMap("derived_from", "_b")); + base.addType(Construct.Node, "__b_", Collections.singletonMap("derived_from", "_a")); + + if (theArgs.length > 0) { + Iterator> ti = + base.hierarchy(Construct.Node, theArgs[0]); + while (ti.hasNext()) { + debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), "> {}", ti.next().getKey()); + } + } + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CatalogException.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CatalogException.java new file mode 100644 index 0000000..d8e2dba --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CatalogException.java @@ -0,0 +1,14 @@ +package org.onap.sdc.dcae.checker; + + +public class CatalogException extends Exception { + + public CatalogException(String theMsg, Throwable theCause) { + super(theMsg, theCause); + } + + public CatalogException(String theMsg) { + super(theMsg); + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java new file mode 100644 index 0000000..fee617f --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java @@ -0,0 +1,3643 @@ +package org.onap.sdc.dcae.checker; + +import java.lang.reflect.Method; +import java.lang.reflect.InvocationTargetException; + +import java.io.File; +import java.io.Reader; +import java.io.IOException; + +import java.net.URI; +import java.net.URISyntaxException; + +import java.util.HashMap; +import java.util.TreeMap; +import java.util.Iterator; +import java.util.ListIterator; +import java.util.Map; +import java.util.List; +import java.util.LinkedList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Set; +import java.util.Collection; +import java.util.Collections; +import java.util.regex.Pattern; +import java.util.regex.Matcher; +import java.util.stream.Collectors; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.yaml.snakeyaml.Yaml; + +import com.google.common.collect.Maps; +import com.google.common.collect.MapDifference; +import com.google.common.reflect.Invokable; + +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; + +import kwalify.Validator; +import kwalify.Rule; +import kwalify.Types; +import kwalify.ValidationException; +import kwalify.SchemaException; + +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.jxpath.JXPathException; +import org.apache.commons.lang.reflect.ConstructorUtils; +import org.onap.sdc.dcae.checker.annotations.Catalogs; +import org.onap.sdc.dcae.checker.annotations.Checks; +import org.reflections.Reflections; +import org.reflections.util.FilterBuilder; +import org.reflections.util.ConfigurationBuilder; +import org.reflections.scanners.TypeAnnotationsScanner; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.scanners.MethodAnnotationsScanner; + +/* + * To consider: model consistency checking happens now along with validation + * (is implemented as part of the validation hooks). It might be better to + * separate the 2 stages and perform all the consistency checking once + * validation is completed. + */ +public class Checker { + private static final String PROPERTIES = "properties"; + private static final String DEFAULT = "default"; + private static final String ATTRIBUTES = "attributes"; + private static final String DATA_TYPES = "data_types"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String VALID_SOURCE_TYPES = "valid_source_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String INTERFACES = "interfaces"; + private static final String VALID_TARGET_TYPES = "valid_target_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String NODE_TYPES = "node_types"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + private static final String GROUP_TYPES = "group_types"; + private static final String TARGETS_CONSTANT = "targets"; + private static final String POLICY_TYPES = "policy_types"; + private static final String IS_NONE_OF_THOSE = "' is none of those"; + private static final String INPUTS = "inputs"; + private static final String CAPABILITY = "capability"; + private static final String ARTIFACTS = "artifacts"; + private static final String WAS_DEFINED_FOR_THE_NODE_TYPE = " was defined for the node type "; + private static final String UNKNOWN = "Unknown "; + private static final String TYPE = " type "; + + private Target target = null; //what we're validating at the moment + + private Map grammars = new HashMap<>(); //grammars for the different tosca versions + + private Catalog catalog; + private TargetLocator locator = new CommonLocator(); + + private Table checks = HashBasedTable.create(); + private Table catalogs = HashBasedTable.create(); + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private static Catalog commonsCatalogInstance = null; + + private static final String[] EMPTY_STRING_ARRAY = new String[0]; + + /* Need a proper way to indicate where the grammars are and how they should be identified */ + private static final String[] grammarFiles = new String[]{"tosca/tosca_simple_yaml_1_0.grammar", + "tosca/tosca_simple_yaml_1_1.grammar"}; + + private Pattern spacePattern = Pattern.compile("\\s"); + + private Pattern indexPattern = Pattern.compile("/\\p{Digit}+"); + + //this is getting silly .. + private static Class[][] checkHookArgTypes = + new Class[][]{ + new Class[]{Map.class, CheckContext.class}, + new Class[]{List.class, CheckContext.class}}; + + private static Class[] validationHookArgTypes = + new Class[]{Object.class, Rule.class, Validator.ValidationContext.class}; + + public Checker() throws CheckerException { + loadGrammars(); + loadAnnotations(); + } + + public static void main(String[] theArgs) { + if (theArgs.length == 0) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(), "checker resource_to_validate [processor]*"); + return; + } + + try { + Catalog cat = Checker.check(new File(theArgs[0])); + + for (Target t : cat.targets()) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(), "{}\n{}\n{}", t.getLocation(), cat.importString(t), t.getReport()); + } + + for (Target t : cat.sortedTargets()) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(), t.toString()); + } + + } catch (Exception x) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(),"Exception {}", x); + } + } + + private void loadGrammars() throws CheckerException { + + for (String grammarFile : grammarFiles) { + Target grammarTarget = this.locator.resolve(grammarFile); + if (grammarTarget == null) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to locate grammar {}", grammarFile); + continue; + } + + parseTarget(grammarTarget); + if (grammarTarget.getReport().hasErrors()) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: {}", grammarFile, grammarTarget.getReport().toString()); + continue; + } + + List versions = null; + try { + versions = (List) + ((Map) + ((Map) + ((Map) grammarTarget.getTarget()) + .get("mapping")) + .get("tosca_definitions_version")) + .get("enum"); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: cannot locate tosca_definitions_versions. Exception{}", grammarFile, x); + } + if (versions == null || versions.isEmpty()) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: no tosca_definitions_versions specified", grammarFile); + continue; + } + + for (Object version : versions) { + this.grammars.put(version.toString(), grammarTarget); + } + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Loaded grammars: {}", this.grammars); + } + + private void loadAnnotations() { + Reflections reflections = new Reflections( + new ConfigurationBuilder() + .forPackages("org.onap.sdc.dcae") + .filterInputsBy(new FilterBuilder() + .include(".*\\.class") + ) + .setScanners(new TypeAnnotationsScanner(), + new SubTypesScanner(), + new MethodAnnotationsScanner()) + .setExpandSuperTypes(false) + ); + + Map handlers = new HashMap<>(); + + Set checkHandlers = reflections.getMethodsAnnotatedWith(Checks.class); + for (Method checkHandler : checkHandlers) { + checks.put(checkHandler.getAnnotation(Checks.class).path(), + checkHandler, + handlers.computeIfAbsent(checkHandler.getDeclaringClass(), + type -> { + try { + return (getClass() == type) ? this + : type.newInstance(); + } catch (Exception x) { + throw new RuntimeException(x); + } + })); + } + + Set catalogHandlers = reflections.getMethodsAnnotatedWith(Catalogs.class); + for (Method catalogHandler : catalogHandlers) { + catalogs.put(catalogHandler.getAnnotation(Catalogs.class).path(), + catalogHandler, + handlers.computeIfAbsent(catalogHandler.getDeclaringClass(), + type -> { + try { + return (getClass() == type) ? this + : type.newInstance(); + } catch (Exception x) { + throw new RuntimeException(x); + } + })); + } + } + + + public void setTargetLocator(TargetLocator theLocator) { + this.locator = theLocator; + } + + public Collection targets() { + if (this.catalog == null) { + throw new IllegalStateException("targets are only available after check"); + } + + return this.catalog.targets(); + } + + public Catalog catalog() { + return this.catalog; + } + + public void process(Processor theProcessor) { + + theProcessor.process(this.catalog); + } + + /* a facility for handling all files in a target directory .. */ + public static Catalog check(File theSource) + throws CheckerException { + + Catalog catalog = new Catalog(commonsCatalog()); + Checker checker = new Checker(); + try { + if (theSource.isDirectory()) { + for (File f : theSource.listFiles()) { + if (f.isFile()) { + checker.check(new Target(theSource.getCanonicalPath(), f.toURI().normalize()), catalog); + } + } + } else { + checker.check(new Target(theSource.getCanonicalPath(), theSource.toURI().normalize()), catalog); + } + } catch (IOException iox) { + throw new CheckerException("Failed to initialize target", iox); + } + + return catalog; + } + + public void check(String theSource) + throws CheckerException { + check(theSource, buildCatalog()); + } + + public void check(String theSource, Catalog theCatalog) + throws CheckerException { + Target tgt = + this.locator.resolve(theSource); + if (null == tgt) { + throw new CheckerException("Unable to locate the target " + theSource); + } + + check(tgt, theCatalog); + } + + public void check(Target theTarget) throws CheckerException { + check(theTarget, buildCatalog()); + } + + public void check(Target theTarget, Catalog theCatalog) throws CheckerException { + + this.catalog = theCatalog; + this.locator.addSearchPath(theTarget.getLocation()); + + if (this.catalog.addTarget(theTarget, null)) { + List targets = parseTarget(theTarget); + if (theTarget.getReport().hasErrors()) { + return; + } + for (Target targetItr : targets) { + this.catalog.addTarget(targetItr, null); + if (!validateTarget(targetItr).getReport().hasErrors()) { + checkTarget(targetItr); + } + } + } + } + + public void validate(Target theTarget) throws CheckerException { + validate(theTarget, buildCatalog()); + } + + public void validate(Target theTarget, Catalog theCatalog) throws CheckerException { + this.catalog = theCatalog; + this.locator.addSearchPath(theTarget.getLocation()); + + if (this.catalog.addTarget(theTarget, null)) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "@validateTarget"); + if (!validateTarget(theTarget).getReport().hasErrors()) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "@checkTarget"); + checkTarget(theTarget); + } + } + } + + private List parseTarget(final Target theTarget) + throws CheckerException { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "parseTarget {}", theTarget); + + Reader source = null; + try { + source = theTarget.open(); + } catch (IOException iox) { + throw new CheckerException("Failed to open target " + theTarget, iox); + } + + + ArrayList yamlRoots = new ArrayList<>(); + try { + Yaml yaml = new Yaml(); + for (Object yamlRoot : yaml.loadAll(source)) { + yamlRoots.add(yamlRoot); + } + + + } catch (Exception x) { + theTarget.report(x); + return Collections.emptyList(); + } finally { + try { + source.close(); + } catch (IOException iox) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "Exception {}", iox); + } + } + + ArrayList targets = new ArrayList(yamlRoots.size()); + if (yamlRoots.size() == 1) { + //he target turned out to be a bare document + theTarget.setTarget(yamlRoots.get(0)); + targets.add(theTarget); + } else { + //the target turned out to be a stream containing multiple documents + for (int i = 0; i < yamlRoots.size(); i++) { +/* +!!We're changing the target below, i.e. we're changing the target implementation hence caching implementation will suffer!! +*/ + Target newTarget = new Target(theTarget.getName(), + fragmentTargetURI(theTarget.getLocation(), String.valueOf(i))); + newTarget.setTarget(yamlRoots.get(i)); + targets.add(newTarget); + } + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), " exiting parseTarget {}", theTarget); + return targets; + } + + private URI fragmentTargetURI(URI theRoot, String theFragment) { + try { + return new URI(theRoot.getScheme(), + theRoot.getSchemeSpecificPart(), + theFragment); + } catch (URISyntaxException urisx) { + throw new RuntimeException(urisx); + } + } + + private Target validateTarget(Target theTarget) + throws CheckerException { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering validateTarget {}", theTarget); + + String version = (String) + ((Map) theTarget.getTarget()) + .get("tosca_definitions_version"); + if (version == null) { + throw new CheckerException("Target " + theTarget + " does not specify a tosca_definitions_version"); + } + + Target grammar = this.grammars.get(version); + if (grammar == null) { + throw new CheckerException("Target " + theTarget + " specifies unknown tosca_definitions_version " + version); + } + + TOSCAValidator validator = null; + try { + validator = new TOSCAValidator(theTarget, grammar.getTarget()); + } catch (SchemaException sx) { + throw new CheckerException("Grammar error at: " + sx.getPath(), sx); + } + + theTarget.getReport().addAll( + validator.validate(theTarget.getTarget())); + + if (!theTarget.getReport().hasErrors()) { + applyCanonicals(theTarget.getTarget(), validator.canonicals); + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), " exiting validateTarget {}", theTarget); + return theTarget; + } + + private Target checkTarget(Target theTarget) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering checkTarget {}", theTarget); + + CheckContext ctx = new CheckContext(theTarget); + //start at the top + checkServiceTemplateDefinition( + (Map) theTarget.getTarget(), ctx); + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "exiting checkTarget {}", theTarget); + return theTarget; + } + + public void checkProperties( + Map theDefinitions, CheckContext theContext) { + theContext.enter(PROPERTIES); + try { + if (!checkDefinition(PROPERTIES, theDefinitions, theContext)) { + return; + } + + for (Iterator> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkPropertyDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkPropertyDefinition( + String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + //check the type + if (!checkDataType(theDefinition, theContext)) { + return; + } + //check default value is compatible with type + Object defaultValue = theDefinition.get(DEFAULT); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDefinition, theContext); + } + + theContext.exit(); + } + + private void checkAttributes( + Map theDefinitions, CheckContext theContext) { + theContext.enter(ATTRIBUTES); + try { + if (!checkDefinition(ATTRIBUTES, theDefinitions, theContext)) { + return; + } + + for (Iterator> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkAttributeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkAttributeDefinition( + String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + if (!checkDataType(theDefinition, theContext)) { + return; + } + } finally { + theContext.exit(); + } + } + + /* top level rule, we collected the whole information set. + * this is where checking starts + */ + private void checkServiceTemplateDefinition( + Map theDef, CheckContext theContext) { + theContext.enter(""); + + if (theDef == null) { + theContext.addError("Empty template", null); + return; + } + +//!!! imports need to be processed first now that catalogging takes place at check time!! + + //first catalog whatever it is there to be cataloged so that the checks can perform cross-checking + for (Iterator> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry e = ri.next(); + catalogs(e.getKey(), e.getValue(), theContext); + } + + for (Iterator> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + @Catalogs(path = "/data_types") + protected void catalog_data_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter(DATA_TYPES); + try { + catalogTypes(Construct.Data, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + @Checks(path = "/data_types") + protected void check_data_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter(DATA_TYPES); + + try { + if (!checkDefinition(DATA_TYPES, theDefinitions, theContext)) { + return; + } + + for (Iterator> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkDataTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkDataTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Data); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Data, theName, theDefinition, + Facet.properties, theContext); + } + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/capability_types") + protected void catalog_capability_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter(CAPABILITY_TYPES); + try { + catalogTypes(Construct.Capability, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/capability_types") + protected void check_capability_types( + Map theTypes, CheckContext theContext) { + theContext.enter(CAPABILITY_TYPES); + try { + if (!checkDefinition(CAPABILITY_TYPES, theTypes, theContext)) { + return; + } + + for (Iterator> i = theTypes.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkCapabilityTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkCapabilityTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(ATTRIBUTES)) { + checkAttributes( + (Map) theDefinition.get(ATTRIBUTES), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, + Facet.attributes, theContext); + } + + //valid_source_types: see capability_type_definition + //unclear: how is the valid_source_types list definition eveolving across + //the type hierarchy: additive, overwriting, ?? + if (theDefinition.containsKey(VALID_SOURCE_TYPES)) { + checkTypeReference(Construct.Node, theContext, + ((List) theDefinition.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/relationship_types") + protected void catalog_relationship_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter(RELATIONSHIP_TYPES); + try { + catalogTypes(Construct.Relationship, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/relationship_types") + protected void check_relationship_types( + Map theDefinition, CheckContext theContext) { + theContext.enter(RELATIONSHIP_TYPES); + try { + if (!checkDefinition(RELATIONSHIP_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkRelationshipTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkRelationshipTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(ATTRIBUTES)) { + checkProperties( + (Map) theDefinition.get(ATTRIBUTES), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, + Facet.attributes, theContext); + } + + Map interfaces = (Map) theDefinition.get(INTERFACES); + if (interfaces != null) { + theContext.enter(INTERFACES); + for (Iterator> i = + interfaces.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_type_interface_definition( + e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + if (theDefinition.containsKey(VALID_TARGET_TYPES)) { + checkTypeReference(Construct.Capability, theContext, + ((List) theDefinition.get(VALID_TARGET_TYPES)).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/artifact_types") + protected void catalog_artifact_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter(ARTIFACT_TYPES); + try { + catalogTypes(Construct.Artifact, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/artifact_types") + protected void check_artifact_types( + Map theDefinition, CheckContext theContext) { + theContext.enter(ARTIFACT_TYPES); + try { + if (!checkDefinition(ARTIFACT_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkArtifactTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkArtifactTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + try { + checkDefinition(theName, theDefinition, theContext); + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/interface_types") + protected void catalog_interface_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter(INTERFACE_TYPES); + try { + catalogTypes(Construct.Interface, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + @Checks(path = "/interface_types") + protected void check_interface_types( + Map theDefinition, CheckContext theContext) { + theContext.enter(INTERFACE_TYPES); + try { + if (!checkDefinition(INTERFACE_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkInterfaceTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkInterfaceTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Interface); + try { + checkDefinition(theName, theDefinition, theContext); + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/node_types") + protected void catalog_node_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter(NODE_TYPES); + try { + catalogTypes(Construct.Node, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/node_types") + protected void check_node_types( + Map theDefinition, CheckContext theContext) { + theContext.enter(NODE_TYPES); + try { + if (!checkDefinition(NODE_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkNodeTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkNodeTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(ATTRIBUTES)) { + checkProperties( + (Map) theDefinition.get(ATTRIBUTES), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, + Facet.attributes, theContext); + } + + //requirements + if (theDefinition.containsKey(REQUIREMENTS)) { + check_requirements( + (List) theDefinition.get(REQUIREMENTS), theContext); + } + + //capabilities + if (theDefinition.containsKey(CAPABILITIES)) { + check_capabilities( + (Map) theDefinition.get(CAPABILITIES), theContext); + } + + //interfaces: + Map interfaces = + (Map) theDefinition.get(INTERFACES); + checkMapTypeInterfaceDefinition(theContext, interfaces); + } finally { + theContext.exit(); + } + } + + private void checkMapTypeInterfaceDefinition(CheckContext theContext, Map interfaces) { + if (interfaces != null) { + try { + theContext.enter(INTERFACES); + for (Iterator> i = + interfaces.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_type_interface_definition( + e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + } + + @Catalogs(path = "/group_types") + protected void catalog_group_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter(GROUP_TYPES); + try { + catalogTypes(Construct.Group, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + @Checks(path = "/group_types") + protected void check_group_types( + Map theDefinition, CheckContext theContext) { + theContext.enter(GROUP_TYPES); + try { + if (!checkDefinition(GROUP_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkGroupTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkGroupTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Group); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Group, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(TARGETS_CONSTANT)) { + checkTypeReference(Construct.Node, theContext, + ((List) theDefinition.get(TARGETS_CONSTANT)).toArray(EMPTY_STRING_ARRAY)); + } + + //interfaces + Map interfaces = + (Map) theDefinition.get(INTERFACES); + checkMapTypeInterfaceDefinition(theContext, interfaces); + + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/policy_types") + protected void catalog_policy_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter(POLICY_TYPES); + try { + catalogTypes(Construct.Policy, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/policy_types") + protected void check_policy_types( + Map theDefinition, CheckContext theContext) { + theContext.enter(POLICY_TYPES); + try { + if (!checkDefinition(POLICY_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkPolicyTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkPolicyTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Policy); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Policy, theName, theDefinition, + Facet.properties, theContext); + } + + //the targets can be known node types or group types + List targets = (List) theDefinition.get(TARGETS_CONSTANT); + if ((targets != null) && (checkDefinition(TARGETS_CONSTANT, targets, theContext))) { + for (String targetItr : targets) { + if (!(this.catalog.hasType(Construct.Node, targetItr) || + this.catalog.hasType(Construct.Group, targetItr))) { + theContext.addError("The 'targets' entry must contain a reference to a node type or group type, '" + target + IS_NONE_OF_THOSE, null); + } + } + } + } finally { + theContext.exit(); + } + } + + //checking of actual constructs (capability, ..) + + /* First, interface types do not have a hierarchical organization (no + * 'derived_from' in a interface type definition). + * So, when interfaces (with a certain type) are defined in a node + * or relationship type (and they can define new? operations), what + * is there to check: + * Can operations here re-define their declaration from the interface + * type spec?? From A.5.11.3 we are to understand indicates override to be + * the default interpretation .. but they talk about sub-classing so it + * probably intended as a reference to the node or relationship type + * hierarchy and not the interface type (no hierarchy there). + * Or is this a a case of augmentation where new operations can be added?? + */ + private void check_type_interface_definition( + String theName, Map theDef, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkType(Construct.Interface, theDef, theContext)) { + return; + } + + if (theDef.containsKey(INPUTS)) { + check_inputs((Map) theDef.get(INPUTS), theContext); + } + } finally { + theContext.exit(); + } + } + + private void check_capabilities(Map theDefinition, + CheckContext theContext) { + theContext.enter(CAPABILITIES); + try { + if (!checkDefinition(CAPABILITIES, theDefinition, theContext)) { + return; + } + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkCapabilityDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + /* A capability definition appears within the context ot a node type */ + private void checkCapabilityDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + //check capability type + if (!checkType(Construct.Capability, theDef, theContext)) { + return; + } + + //check properties + if (!checkFacetAugmentation( + Construct.Capability, theDef, Facet.properties, theContext)) { + return; + } + + //check attributes + if (!checkFacetAugmentation( + Construct.Capability, theDef, Facet.attributes, theContext)) { + return; + } + + //valid_source_types: should point to valid template nodes + if (theDef.containsKey(VALID_SOURCE_TYPES)) { + checkTypeReference(Construct.Node, theContext, + ((List) theDef.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY)); + //per A.6.1.4 there is an additinal check to be performed here: + //"Any Node Type (names) provides as values for the valid_source_types keyname SHALL be type-compatible (i.e., derived from the same parent Node Type) with any Node Types defined using the same keyname in the parent Capability Type." + } + //occurences: were verified in range_definition + + } finally { + theContext.exit(); + } + } + + private void check_requirements(List theDefinition, + CheckContext theContext) { + theContext.enter(REQUIREMENTS); + try { + if (!checkDefinition(REQUIREMENTS, theDefinition, theContext)) { + return; + } + + for (Iterator i = theDefinition.iterator(); i.hasNext(); ) { + Map e = i.next(); + Iterator> ei = + (Iterator>) e.entrySet().iterator(); + Map.Entry eie = ei.next(); + checkRequirementDefinition(eie.getKey(), eie.getValue(), theContext); + assert !ei.hasNext(); + } + } finally { + theContext.exit(); + } + } + + private void checkRequirementDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName, Construct.Requirement); + + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + //check capability type + String capabilityType = (String) theDef.get(CAPABILITY); + if (null != capabilityType) { + checkTypeReference(Construct.Capability, theContext, capabilityType); + } + + //check node type + String nodeType = (String) theDef.get("node"); + if (null != nodeType) { + checkTypeReference(Construct.Node, theContext, nodeType); + } + + //check relationship type + Map relationshipSpec = (Map) theDef.get("relationship"); + String relationshipType = null; + if (null != relationshipSpec) { + relationshipType = (String) relationshipSpec.get("type"); + if (relationshipType != null) { //should always be the case + checkTypeReference(Construct.Relationship, theContext, relationshipType); + } + + Map interfaces = (Map) + relationshipSpec.get(INTERFACES); + if (interfaces != null) { + //augmentation (additional properties or operations) of the interfaces + //defined by the above relationship types + + //check that the interface types are known + for (Map interfaceDef : interfaces.values()) { + checkType(Construct.Interface, interfaceDef, theContext); + } + } + } + + //cross checks + + //the capability definition might come from the capability type or from the capability definition + //within the node type. We might have more than one as a node might specify multiple capabilities of the + //same type. + //the goal here is to cross check the compatibility of the valid_source_types specification in the + //target capability definition (if that definition contains a valid_source_types entry). + List capabilityDefs = new LinkedList<>(); + //nodeType exposes capabilityType + if (nodeType != null) { + Map capabilities = + findTypeFacetByType(Construct.Node, nodeType, + Facet.capabilities, capabilityType); + if (capabilities.isEmpty()) { + theContext.addError("The node type " + nodeType + " does not appear to expose a capability of a type compatible with " + capabilityType, null); + } else { + for (Map.Entry capability : capabilities.entrySet()) { + //this is the capability as it was defined in the node type + Map capabilityDef = capability.getValue(); + //if it defines a valid_source_types then we're working with it, + //otherwise we're working with the capability type it points to. + //The spec does not make it clear if the valid_source_types in a capability definition augments or + //overwrites the one from the capabilityType (it just says they must be compatible). + if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) { + capabilityDefs.add(capabilityDef); + } else { + capabilityDef = + catalog.getTypeDefinition(Construct.Capability, (String) capabilityDef.get("type")); + if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) { + capabilityDefs.add(capabilityDef); + } else { + //!!if there is a capability that does not have a valid_source_type than there is no reason to + //make any further verification (as there is a valid node_type/capability target for this requirement) + capabilityDefs.clear(); + break; + } + } + } + } + } else { + Map capabilityDef = catalog.getTypeDefinition(Construct.Capability, capabilityType); + if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) { + capabilityDefs.add(capabilityDef); + } + } + + //check that the node type enclosing this requirement definition + //is in the list of valid_source_types + if (!capabilityDefs.isEmpty()) { + String enclosingNodeType = + theContext.enclosingConstruct(Construct.Node); + assert enclosingNodeType != null; + + if (!capabilityDefs.stream().anyMatch( + (Map capabilityDef) -> { + List valid_source_types = + (List) capabilityDef.get(VALID_SOURCE_TYPES); + return valid_source_types.stream().anyMatch( + (String source_type) -> catalog.isDerivedFrom( + Construct.Node, enclosingNodeType, source_type)); + })) { + theContext.addError("Node type: " + enclosingNodeType + " not compatible with any of the valid_source_types provided in the definition of compatible capabilities", null); + } + } + + //if we have a relationship type, check if it has a valid_target_types + //if it does, make sure that the capability type is compatible with one + //of them + if (relationshipType != null) { //should always be the case + Map relationshipTypeDef = catalog.getTypeDefinition( + Construct.Relationship, relationshipType); + if (relationshipTypeDef != null) { + List valid_target_types = + (List) relationshipTypeDef.get(VALID_TARGET_TYPES); + if (valid_target_types != null) { + boolean found = false; + for (String target_type : valid_target_types) { + if (catalog.isDerivedFrom( + Construct.Capability, capabilityType, target_type)) { + found = true; + break; + } + } + if (!found) { + theContext.addError("Capability type: " + capabilityType + " not compatible with any of the valid_target_types " + valid_target_types + " provided in the definition of relationship type " + relationshipType, null); + } + } + } + } + + //relationship declares the capabilityType in its valid_target_type set + //in A.6.9 'Relationship Type' the spec does not indicate how inheritance + //is to be applied to the valid_target_type spec: cumulative, overwrites, + //so we treat it as an overwrite. + } finally { + theContext.exit(); + } + } + + //topology_template_definition and sub-rules + /* */ + @Checks(path = "/topology_template") + protected void check_topology_template( + Map theDef, CheckContext theContext) { + + theContext.enter("topology_template"); + + for (Iterator> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + /* + * Once the syntax of the imports section is validated parse/validate/catalog * all the imported template information + */ + @Checks(path = "/imports") + protected void check_imports(List theImports, CheckContext theContext) { + theContext.enter("imports"); + + for (ListIterator li = theImports.listIterator(); li.hasNext(); ) { + Object importEntry = li.next(); + Object importFile = ((Map) mapEntry(importEntry).getValue()).get("file"); + Target tgt = null; + try { + tgt = catalog.getTarget((URI) importFile); + } catch (ClassCastException ccx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Import is {}. Exception {}", importFile, ccx); + } + + if (tgt == null || tgt.getReport().hasErrors()) { + //import failed parsing or validation, we skip it + continue; + } + + //import should have been fully processed by now ??? + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "Processing import {}.", tgt); + checkTarget(tgt); + + } + theContext.exit(); + } + + /* */ + @Checks(path = "/topology_template/substitution_mappings") + protected void check_substitution_mappings(Map theSub, + CheckContext theContext) { + theContext.enter("substitution_mappings"); + try { + //type is mandatory + String type = (String) theSub.get("node_type"); + if (!checkTypeReference(Construct.Node, theContext, type)) { + theContext.addError("Unknown node type: " + type + "", null); + return; //not much to go on with + } + + Map capabilities = (Map) theSub.get(CAPABILITIES); + if (null != capabilities) { + for (Map.Entry ce : capabilities.entrySet()) { + //the key must be a capability of the type + if (null == findTypeFacetByName(Construct.Node, type, + Facet.capabilities, ce.getKey())) { + theContext.addError("Unknown node type capability: " + ce.getKey() + ", type " + type, null); + } + //the value is a 2 element list: first is a local node, + //second is the name of one of its capabilities + List targetList = ce.getValue(); + if (targetList.size() != 2) { + theContext.addError("Invalid capability mapping: " + target + ", expecting 2 elements", null); + continue; + } + + String targetNode = (String) targetList.get(0); + String targetCapability = (String) targetList.get(1); + + Map targetNodeDef = (Map) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode); + if (null == targetNodeDef) { + theContext.addError("Invalid capability mapping node template: " + targetNode, null); + continue; + } + + String targetNodeType = (String) targetNodeDef.get("type"); + if (null == findTypeFacetByName(Construct.Node, targetNodeType, + Facet.capabilities, targetCapability)) { + theContext.addError("Invalid capability mapping capability: " + targetCapability + ". No such capability found for node template " + targetNode + ", of type " + targetNodeType, null); + } + } + } + + Map requirements = (Map) theSub.get(REQUIREMENTS); + if (null != requirements) { + for (Map.Entry re : requirements.entrySet()) { + //the key must be a requirement of the type + if (null == findNodeTypeRequirementByName(type, re.getKey())) { + theContext.addError("Unknown node type requirement: " + re.getKey() + ", type " + type, null); + } + + List targetList = re.getValue(); + if (targetList.size() != 2) { + theContext.addError("Invalid requirement mapping: " + targetList + ", expecting 2 elements", null); + continue; + } + + String targetNode = (String) targetList.get(0); + String targetRequirement = (String) targetList.get(1); + + Map targetNodeDef = (Map) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode); + if (null == targetNodeDef) { + theContext.addError("Invalid requirement mapping node template: " + targetNode, null); + continue; + } + + String targetNodeType = (String) targetNodeDef.get("type"); + if (null == findNodeTypeRequirementByName(targetNodeType, targetRequirement)) { + theContext.addError("Invalid requirement mapping requirement: " + targetRequirement + ". No such requirement found for node template " + targetNode + ", of type " + targetNodeType, null); + } + } + } + } finally { + theContext.exit(); + } + } + + + /* */ + @Checks(path = "/topology_template/inputs") + protected void check_inputs(Map theInputs, + CheckContext theContext) { + theContext.enter(INPUTS); + + try { + if (!checkDefinition(INPUTS, theInputs, theContext)) { + return; + } + + for (Iterator> i = theInputs.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkInputDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkInputDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + // + if (!checkDataType(theDef, theContext)) { + return; + } + //check default value + Object defaultValue = theDef.get(DEFAULT); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDef, theContext); + } + } finally { + theContext.exit(); + } + } + + @Checks(path = "topology_template/outputs") + protected void check_outputs(Map theOutputs, + CheckContext theContext) { + theContext.enter("outputs"); + + try { + if (!checkDefinition("outputs", theOutputs, theContext)) { + return; + } + + for (Iterator> i = theOutputs.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkOutputDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkOutputDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + checkDefinition(theName, theDef, theContext); + //check the expression + } finally { + theContext.exit(); + } + } + + @Checks(path = "/topology_template/groups") + protected void check_groups(Map theGroups, + CheckContext theContext) { + theContext.enter("groups"); + + try { + if (!checkDefinition("groups", theGroups, theContext)) { + return; + } + + for (Iterator> i = theGroups.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkGroupDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkGroupDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkType(Construct.Group, theDef, theContext)) { + return; + } + + if (!checkFacet( + Construct.Group, theDef, Facet.properties, theContext)) { + return; + } + + if (theDef.containsKey(TARGETS_CONSTANT)) { + + List targetsTypes = (List) + this.catalog.getTypeDefinition(Construct.Group, + (String) theDef.get("type")) + .get(TARGETS_CONSTANT); + + List targets = (List) theDef.get(TARGETS_CONSTANT); + for (String targetItr : targets) { + if (!this.catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) { + theContext.addError("The 'targets' entry must contain a reference to a node template, '" + targetItr + "' is not one", null); + } else { + if (targetsTypes != null) { + String targetType = (String) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetItr).get("type"); + + boolean found = false; + for (String type : targetsTypes) { + found = this.catalog + .isDerivedFrom(Construct.Node, targetType, type); + if (found) { + break; + } + } + + if (!found) { + theContext.addError("The 'targets' entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null); + } + } + } + } + } + } finally { + theContext.exit(); + } + } + + @Checks(path = "/topology_template/policies") + protected void check_policies(List> thePolicies, + CheckContext theContext) { + theContext.enter("policies"); + + try { + if (!checkDefinition("policies", thePolicies, theContext)) { + return; + } + + for (Map policy : thePolicies) { + assert policy.size() == 1; + Map.Entry e = policy.entrySet().iterator().next(); + checkPolicyDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkPolicyDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkType(Construct.Policy, theDef, theContext)) { + return; + } + + if (!checkFacet( + Construct.Policy, theDef, Facet.properties, theContext)) { + return; + } + + //targets: must point to node or group templates (that are of a type + //specified in the policy type definition, if targets were specified + //there). + if (theDef.containsKey(TARGETS_CONSTANT)) { + List targetsTypes = (List) + this.catalog.getTypeDefinition(Construct.Policy, + (String) theDef.get("type")) + .get(TARGETS_CONSTANT); + + List targets = (List) theDef.get(TARGETS_CONSTANT); + for (String targetItr : targets) { + Construct targetConstruct = null; + + if (this.catalog.hasTemplate(theContext.target(), Construct.Group, targetItr)) { + targetConstruct = Construct.Group; + } else if (this.catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) { + targetConstruct = Construct.Node; + } else { + theContext.addError("The 'targets' entry must contain a reference to a node template or group template, '" + target + IS_NONE_OF_THOSE, null); + } + + if (targetConstruct != null && + targetsTypes != null) { + //get the target type and make sure is compatible with the types + //indicated in the type spec + String targetType = (String) + this.catalog.getTemplate(theContext.target(), targetConstruct, targetItr).get("type"); + + boolean found = false; + for (String type : targetsTypes) { + found = this.catalog + .isDerivedFrom(targetConstruct, targetType, type); + if (found) { + break; + } + } + + if (!found) { + theContext.addError("The 'targets' " + targetConstruct + " entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null); + } + } + } + } + + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/topology_template/node_templates") + protected void check_node_templates(Map theTemplates, + CheckContext theContext) { + theContext.enter("node_templates"); + try { + if (!checkDefinition("node_templates", theTemplates, theContext)) { + return; + } + + for (Iterator> i = theTemplates.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkNodeTemplateDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + /* */ + private void checkNodeTemplateDefinition(String theName, + Map theNode, + CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkDefinition(theName, theNode, theContext)) { + return; + } + + if (!checkType(Construct.Node, theNode, theContext)) { + return; + } + + //copy + String copy = (String) theNode.get("copy"); + if (copy != null) { + if (!checkTemplateReference(Construct.Node, theContext, copy)) { + theContext.addError("The 'copy' reference " + copy + " does not point to a known node template", null); + } else { + //the 'copy' node specification should be used to provide 'defaults' + //for this specification + } + } + + /* check that we operate on properties and attributes within the scope of + the specified node type */ + if (!checkFacet( + Construct.Node, /*theName,*/theNode, Facet.properties, theContext)) { + return; + } + + if (!checkFacet( + Construct.Node, /*theName,*/theNode, Facet.attributes, theContext)) { + return; + } + + //requirement assignment seq + if (theNode.containsKey(REQUIREMENTS)) { + checkRequirementsAssignmentDefinition( + (List) theNode.get(REQUIREMENTS), theContext); + } + + //capability assignment map: subject to augmentation + if (theNode.containsKey(CAPABILITIES)) { + checkCapabilitiesAssignmentDefinition( + (Map) theNode.get(CAPABILITIES), theContext); + } + + //interfaces + if (theNode.containsKey(INTERFACES)) { + checkTemplateInterfacesDefinition( + (Map) theNode.get(INTERFACES), theContext); + } + + //artifacts: artifacts do not have different definition forms/syntax + //depending on the context (type or template) but they are still subject + //to 'augmentation' + if (theNode.containsKey(ARTIFACTS)) { + check_template_artifacts_definition( + (Map) theNode.get(ARTIFACTS), theContext); + } + + /* node_filter: the context to which the node filter is applied is very + * wide here as opposed to the node filter specification in a requirement + * assignment which has a more strict context (target node/capability are + * specified). + * We could check that there are nodes in this template having the + * properties/capabilities specified in this filter, i.e. the filter has + * a chance to succeed. + */ + } finally { + theContext.exit(); + } + } + + @Checks(path = "/topology_template/relationship_templates") + protected void check_relationship_templates(Map theTemplates, + CheckContext theContext) { + theContext.enter("relationship_templates"); + + for (Iterator> i = theTemplates.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + checkRelationshipTemplateDefinition(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + private void checkRelationshipTemplateDefinition( + String theName, + Map theRelationship, + CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkDefinition(theName, theRelationship, theContext)) { + return; + } + + if (!checkType(Construct.Relationship, theRelationship, theContext)) { + return; + } + + /* check that we operate on properties and attributes within the scope of + the specified relationship type */ + if (!checkFacet(Construct.Relationship, theRelationship, + Facet.properties, theContext)) { + return; + } + + if (!checkFacet(Construct.Relationship, theRelationship, + Facet.attributes, theContext)) { + return; + } + + /* interface definitions + note: augmentation is allowed here so not clear what to check .. + maybe report augmentations if so configured .. */ + + } finally { + theContext.exit(); + } + } + + //requirements and capabilities assignment appear in a node templates + private void checkRequirementsAssignmentDefinition( + List theRequirements, CheckContext theContext) { + theContext.enter(REQUIREMENTS); + try { + if (!checkDefinition(REQUIREMENTS, theRequirements, theContext)) { + return; + } + + //the node type for the node template enclosing these requirements + String nodeType = (String) catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node)) + .get("type"); + + for (Iterator ri = theRequirements.iterator(); ri.hasNext(); ) { + Map requirement = (Map) ri.next(); + + Iterator> rai = requirement.entrySet().iterator(); + + Map.Entry requirementEntry = rai.next(); + assert !rai.hasNext(); + + String requirementName = requirementEntry.getKey(); + Map requirementDef = findNodeTypeRequirementByName( + nodeType, requirementName); + + if (requirementDef == null) { + theContext.addError("No requirement " + requirementName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null); + continue; + } + + checkRequirementAssignmentDefinition( + requirementName, requirementEntry.getValue(), requirementDef, theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkRequirementAssignmentDefinition( + String theRequirementName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + theContext//.enter("requirement_assignment") + .enter(theRequirementName, Construct.Requirement); + + //grab the node type definition to verify compatibility + + try { + //node assignment + boolean targetNodeIsTemplate = false; + String targetNode = (String) theAssignment.get("node"); + if (targetNode == null) { + targetNode = (String) theDefinition.get("node"); + //targetNodeIsTemplate stays false, targetNode must be a type + } else { + //the value must be a node template or a node type + targetNodeIsTemplate = isTemplateReference( + Construct.Node, theContext, targetNode); + if ((!targetNodeIsTemplate) && (!isTypeReference(Construct.Node, targetNode))){ + theContext.addError("The 'node' entry must contain a reference to a node template or node type, '" + targetNode + IS_NONE_OF_THOSE, null); + return; + } + + //additional checks + String targetNodeDef = (String) theDefinition.get("node"); + if (targetNodeDef != null && targetNode != null) { + if (targetNodeIsTemplate) { + //if the target is node template, it must be compatible with the + //node type specification in the requirement defintion + String targetNodeType = (String) + catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type"); + if (!catalog.isDerivedFrom( + Construct.Node, targetNodeType, targetNodeDef)) { + theContext.addError("The required target node type '" + targetNodeType + "' of target node " + targetNode + " is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null); + return; + } + } else { + //if the target is a node type it must be compatible (= or derived + //from) with the node type specification in the requirement definition + if (!catalog.isDerivedFrom( + Construct.Node, targetNode, targetNodeDef)) { + theContext.addError("The required target node type '" + targetNode + "' is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null); + return; + } + } + } + } + + String targetNodeType = targetNodeIsTemplate ? + (String) catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type") : + targetNode; + + //capability assignment + boolean targetCapabilityIsType = false; + String targetCapability = (String) theAssignment.get(CAPABILITY); + if (targetCapability == null) { + targetCapability = (String) theDefinition.get(CAPABILITY); + //in a requirement definition the target capability can only be a + //capability type (and not a capability name within some target node + //type) + targetCapabilityIsType = true; + } else { + targetCapabilityIsType = isTypeReference(Construct.Capability, targetCapability); + + //check compatibility with the target compatibility type specified + //in the requirement definition, if any + String targetCapabilityDef = (String) theDefinition.get(CAPABILITY); + if (targetCapabilityDef != null && targetCapability != null) { + if (targetCapabilityIsType) { + if (!catalog.isDerivedFrom( + Construct.Capability, targetCapability, targetCapabilityDef)) { + theContext.addError("The required target capability type '" + targetCapability + "' is not compatible with the target capability type found in the requirement definition: " + targetCapabilityDef, null); + return; + } + } else { + //the capability is from a target node. Find its definition and + //check that its type is compatible with the capability type + //from the requirement definition + + //check target capability compatibility with target node + if (targetNode == null) { + theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', which was not specified", null); + return; + } + if (!targetNodeIsTemplate) { + theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', but there you specified a node type", null); + return; + } + //check that the targetNode (its type) indeed has the + //targetCapability + + Map targetNodeCapabilityDef = + findTypeFacetByName( + Construct.Node, targetNodeType, + Facet.capabilities, targetCapability); + if (targetNodeCapabilityDef == null) { + theContext.addError("No capability '" + targetCapability + "' was specified in the node " + targetNode + " of type " + targetNodeType, null); + return; + } + + String targetNodeCapabilityType = (String) targetNodeCapabilityDef.get("type"); + + if (!catalog.isDerivedFrom(Construct.Capability, + targetNodeCapabilityType, + targetCapabilityDef)) { + theContext.addError("The required target capability type '" + targetCapabilityDef + "' is not compatible with the target capability type found in the target node type capability definition : " + targetNodeCapabilityType + ", targetNode " + targetNode + ", capability name " + targetCapability, null); + return; + } + } + } + } + + //relationship assignment + Map targetRelationship = (Map) theAssignment.get("relationship"); + if (targetRelationship != null) { + //this has to be compatible with the relationship with the same name + //from the node type + //check the type + } + + //node_filter; used jxpath to simplify the navigation somewhat + //this is too cryptic + JXPathContext jxPath = JXPathContext.newContext(theAssignment); + jxPath.setLenient(true); + + List propertiesFilter = + (List) jxPath.getValue("/node_filter/properties"); + if (propertiesFilter != null) { + for (Map propertyFilter : propertiesFilter) { + if (targetNode != null) { + //if we have a target node or node template then it must have + //have these properties + for (Object propertyName : propertyFilter.keySet()) { + if (null == findTypeFacetByName(Construct.Node, + targetNodeType, + Facet.properties, + propertyName.toString())) { + theContext.addError("The node_filter property " + propertyName + " is invalid: requirement target node " + targetNode + " does not have such a property", null); + } + } + } + } + } + + List capabilitiesFilter = + (List) jxPath.getValue("node_filter/capabilities"); + if (capabilitiesFilter != null) { + for (Map capabilityFilterDef : capabilitiesFilter) { + assert capabilityFilterDef.size() == 1; + Map.Entry capabilityFilterEntry = + (Map.Entry) capabilityFilterDef.entrySet().iterator().next(); + String targetFilterCapability = capabilityFilterEntry.getKey(); + Map targetFilterCapabilityDef = null; + + //if we have a targetNode capabilityName must be a capability of + //that node (type); or it can be simply capability type (but the node + //must have a capability of that type) + + String targetFilterCapabilityType = null; + if (targetNode != null) { + targetFilterCapabilityDef = + findTypeFacetByName(Construct.Node, targetNodeType, + Facet.capabilities, targetFilterCapability); + if (targetFilterCapabilityDef != null) { + targetFilterCapabilityType = + (String) targetFilterCapabilityDef/*.values().iterator().next()*/.get("type"); + } else { + Map targetFilterCapabilities = + findTypeFacetByType(Construct.Node, targetNodeType, + Facet.capabilities, targetFilterCapability); + + if (!targetFilterCapabilities.isEmpty()) { + if (targetFilterCapabilities.size() > 1) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "checkRequirementAssignmentDefinition: filter check, target node type '{}' has more than one capability of type '{}', not supported", targetNodeType, targetFilterCapability); + } + //pick the first entry, it represents a capability of the required type + Map.Entry capabilityEntry = targetFilterCapabilities.entrySet().iterator().next(); + targetFilterCapabilityDef = Collections.singletonMap(capabilityEntry.getKey(), + capabilityEntry.getValue()); + targetFilterCapabilityType = targetFilterCapability; + } + } + } else { + //no node (type) specified, it can be a straight capability type + targetFilterCapabilityDef = catalog.getTypeDefinition( + Construct.Capability, targetFilterCapability); + //here comes the odd part: it can still be a just a name in which + //case we should look at the requirement definition, see which + //capability (type) it indicates + assert targetCapabilityIsType; //cannot be otherwise, we'd need a node + targetFilterCapabilityDef = catalog.getTypeDefinition( + Construct.Capability, targetCapability); + targetFilterCapabilityType = targetCapability; + } + + if (targetFilterCapabilityDef == null) { + theContext.addError("Capability (name or type) " + targetFilterCapability + " is invalid: not a known capability (type) " + + ((targetNodeType != null) ? (" of node type" + targetNodeType) : ""), null); + continue; + } + + for (Map propertyFilter : + (List) jxPath.getValue("/node_filter/capabilities/" + targetFilterCapability + "/properties")) { + //check that the properties are in the scope of the + //capability definition + for (Object propertyName : propertyFilter.keySet()) { + if (null == findTypeFacetByName(Construct.Capability, + targetCapability, + Facet.properties, + propertyName.toString())) { + theContext.addError("The capability filter " + targetFilterCapability + " property " + propertyName + " is invalid: target capability " + targetFilterCapabilityType + " does not have such a property", null); + } + } + } + } + } + + } finally { + theContext//.exit() + .exit(); + } + } + + private void checkCapabilitiesAssignmentDefinition( + Map theCapabilities, CheckContext theContext) { + theContext.enter(CAPABILITIES); + try { + if (!checkDefinition(CAPABILITIES, theCapabilities, theContext)) { + return; + } + + //the node type for the node template enclosing these requirements + String nodeType = (String) catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node)) + .get("type"); + + for (Iterator> ci = + theCapabilities.entrySet().iterator(); + ci.hasNext(); ) { + + Map.Entry ce = ci.next(); + + String capabilityName = ce.getKey(); + Map capabilityDef = findTypeFacetByName(Construct.Node, nodeType, + Facet.capabilities, capabilityName); + if (capabilityDef == null) { + theContext.addError("No capability " + capabilityName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null); + continue; + } + + checkCapabilityAssignmentDefinition( + capabilityName, ce.getValue(), capabilityDef, theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkCapabilityAssignmentDefinition( + String theCapabilityName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + + theContext.enter(theCapabilityName, Construct.Capability); + try { + String capabilityType = (String) theDefinition.get("type"); + //list of property and attributes assignments + checkFacet(Construct.Capability, theAssignment, capabilityType, + Facet.properties, theContext); + checkFacet(Construct.Capability, theAssignment, capabilityType, + Facet.attributes, theContext); + } finally { + theContext.exit(); + } + } + + private void checkTemplateInterfacesDefinition( + Map theInterfaces, + CheckContext theContext) { + theContext.enter(INTERFACES); + try { + if (!checkDefinition(INTERFACES, theInterfaces, theContext)) { + return; + } + + //the node type for the node template enclosing these requirements + String nodeType = (String) catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node)) + .get("type"); + + for (Iterator> ii = + theInterfaces.entrySet().iterator(); + ii.hasNext(); ) { + + Map.Entry ie = ii.next(); + + String interfaceName = ie.getKey(); + Map interfaceDef = findTypeFacetByName(Construct.Node, nodeType, + Facet.interfaces, interfaceName); + + if (interfaceDef == null) { + /* this is subject to augmentation: this could be a warning but not an error */ + theContext.addError("No interface " + interfaceName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null); + continue; + } + + checkTemplateInterfaceDefinition( + interfaceName, ie.getValue(), interfaceDef, theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkTemplateInterfaceDefinition( + String theInterfaceName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + + theContext.enter(theInterfaceName, Construct.Interface); + try { + //check the assignment of the common inputs + checkFacet(Construct.Interface, + theAssignment, + (String) theDefinition.get("type"), + Facet.inputs, + theContext); + } finally { + theContext.exit(); + } + } + + + @Checks(path = "/topology_template/artifacts") + protected void check_template_artifacts_definition( + Map theDefinition, + CheckContext theContext) { + theContext.enter(ARTIFACTS); + theContext.exit(); + } + + //generic checking actions, not related to validation rules + + /* will check the validity of the type specification for any construct containing a 'type' entry */ + private boolean checkType(Construct theCategory, Map theSpec, CheckContext theContext) { + String type = (String) theSpec.get("type"); + if (type == null) { + theContext.addError("Missing type specification", null); + return false; + } + + if (!catalog.hasType(theCategory, type)) { + theContext.addError(UNKNOWN + theCategory + " type: " + type, null); + return false; + } + + return true; + } + + /* the type can be: + * a known type: predefined or user-defined + * a collection (list or map) and then check that the entry_schema points to one of the first two cases (is that it?) + */ + private boolean checkDataType(Map theSpec, CheckContext theContext) { + + if (!checkType(Construct.Data, theSpec, theContext)) { + return false; + } + + String type = (String) theSpec.get("type"); + if (/*isCollectionType(type)*/ + "list".equals(type) || "map".equals(type)) { + Map entrySchema = (Map) theSpec.get("entry_schema"); + if (entrySchema == null) { + //maybe issue a warning ?? or is 'string' the default?? + return true; + } + + if (!catalog.hasType(Construct.Data, (String) entrySchema.get("type"))) { + theContext.addError("Unknown entry_schema type: " + entrySchema, null); + return false; + } + } + return true; + } + + /* Check that a particular facet (properties, attributes) of a construct type + * (node type, capability type, etc) is correctly (consistenly) defined + * across a type hierarchy + */ + private boolean checkTypeConstructFacet(Construct theConstruct, + String theTypeName, + Map theTypeSpec, + Facet theFacet, + CheckContext theContext) { + Map defs = + (Map) theTypeSpec.get(theFacet.name()); + if (null == defs) { + return true; + } + + boolean res = true; + + //given that the type was cataloged there will be at least one entry + Iterator> i = + catalog.hierarchy(theConstruct, theTypeName); + if (!i.hasNext()) { + theContext.addError( + "The type " + theTypeName + " needs to be cataloged before attempting 'checkTypeConstruct'", null); + return false; + } + i.next(); //skip self + while (i.hasNext()) { + Map.Entry e = i.next(); + Map superDefs = (Map) e.getValue() + .get(theFacet.name()); + if (null == superDefs) { + continue; + } + //this computes entries that appear on both collections but with different values, i.e. the re-defined properties + Map> diff = Maps.difference(defs, superDefs).entriesDiffering(); + + for (Iterator>> di = diff.entrySet().iterator(); di.hasNext(); ) { + Map.Entry> de = di.next(); + MapDifference.ValueDifference dediff = de.getValue(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {}: {} has been re-defined between the {} types {} and {}", theConstruct, theFacet, de.getKey(), theConstruct, e.getKey(), theTypeName); + //for now we just check that the type is consistenly re-declared + if (!this.catalog.isDerivedFrom(theFacet.construct(), + (String) dediff.leftValue().get("type"), + (String) dediff.rightValue().get("type"))) { + theContext.addError( + theConstruct + TYPE + theFacet + ", redefiniton changed its type: " + de.getKey() + " has been re-defined between the " + theConstruct + " types " + e.getKey() + " and " + theTypeName + " in an incompatible manner", null); + res = false; + } + } + } + + return res; + } + + /* + * Checks the validity of a certain facet of a construct + * (properties of a node) across a type hierarchy. + * For now the check is limited to a verifying that a a facet was declared + * somewhere in the construct type hierarchy (a node template property has + * been declared in the node type hierarchy). + * + * 2 versions with the more generic allowing the specification of the type + * to be done explicitly. + */ + private boolean checkFacet(Construct theConstruct, + Map theSpec, + Facet theFacet, + CheckContext theContext) { + return checkFacet(theConstruct, theSpec, null, theFacet, theContext); + } + + /** + * We walk the hierarchy and verify the assignment of a property with respect to its definition. + * We also collect the names of those properties defined as required but for which no assignment was provided. + */ + private boolean checkFacet(Construct theConstruct, + Map theSpec, + String theSpecType, + Facet theFacet, + CheckContext theContext) { + + Map defs = (Map) theSpec.get(theFacet.name()); + if (null == defs) { + return true; + } + defs = Maps.newHashMap(defs); // + + boolean res = true; + if (theSpecType == null) { + theSpecType = (String) theSpec.get("type"); + } + if (theSpecType == null) { + theContext.addError("No specification type available", null); + return false; + } + + Map missed = new HashMap<>(); //keeps track of the missing required properties, the value is + //false if a default was found along the hierarchy + Iterator> i = + catalog.hierarchy(theConstruct, theSpecType); + while (i.hasNext() && !defs.isEmpty()) { + Map.Entry type = i.next(); + + Map typeDefs = (Map) type.getValue() + .get(theFacet.name()); + if (null == typeDefs) { + continue; + } + + MapDifference diff = Maps.difference(defs, typeDefs); + + //this are the ones this type and the spec have in common (same key, + //different values) + Map> facetDefs = + diff.entriesDiffering(); + //TODO: this assumes the definition of the facet is not cumulative, i.e. + //subtypes 'add' something to the definition provided by the super-types + //it considers the most specialized definition stands on its own + for (MapDifference.ValueDifference valdef : facetDefs.values()) { + checkDataValuation(valdef.leftValue(), valdef.rightValue(), theContext); + } + + //remove from properties all those that appear in this type: unfortunately this returns an unmodifiable map .. + defs = Maps.newHashMap(diff.entriesOnlyOnLeft()); + } + + if (!defs.isEmpty()) { + theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + defs, null); + res = false; + } + + if (!missed.isEmpty()) { + List missedNames = + missed.entrySet() + .stream() + .filter(e -> e.getValue().byteValue() == (byte) 1) + .map(e -> e.getKey()) + .collect(Collectors.toList()); + if (!missedNames.isEmpty()) { + theContext.addError(theConstruct + " " + theFacet + " missing required values for: " + missedNames, null); + res = false; + } + } + + return res; + } + + /* Augmentation occurs in cases such as the declaration of capabilities within a node type. + * In such cases the construct facets (the capabilitity's properties) can redefine (augment) the + * specification found in the construct type. + */ + private boolean checkFacetAugmentation(Construct theConstruct, + Map theSpec, + Facet theFacet, + CheckContext theContext) { + return checkFacetAugmentation(theConstruct, theSpec, null, theFacet, theContext); + } + + private boolean checkFacetAugmentation(Construct theConstruct, + Map theSpec, + String theSpecType, + Facet theFacet, + CheckContext theContext) { + + Map augs = (Map) theSpec.get(theFacet.name()); + if (null == augs) { + return true; + } + + boolean res = true; + if (theSpecType == null) { + theSpecType = (String) theSpec.get("type"); + } + if (theSpecType == null) { + theContext.addError("No specification type available", null); + return false; + } + + for (Iterator> ai = augs.entrySet().iterator(); ai.hasNext(); ) { + Map.Entry ae = ai.next(); + + //make sure it was declared by the type + Map facetDef = catalog.getFacetDefinition(theConstruct, theSpecType, theFacet, ae.getKey()); + if (facetDef == null) { + theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + ae.getKey(), null); + res = false; + continue; + } + + //check the compatibility of the augmentation: only the type cannot be changed + //can the type be changed in a compatible manner ?? + if (!facetDef.get("type").equals(ae.getValue().get("type"))) { + theContext.addError(theConstruct + " " + theFacet + " " + ae.getKey() + " has a different type than its definition: " + ae.getValue().get("type") + " instead of " + facetDef.get("type"), null); + res = false; + continue; + } + + //check any valuation (here just defaults) + Object defaultValue = ae.getValue().get(DEFAULT); + if (defaultValue != null) { + checkDataValuation(defaultValue, ae.getValue(), theContext); + } + } + + return res; + } + + private boolean catalogTypes(Construct theConstruct, Map theTypes, CheckContext theContext) { + + boolean res = true; + for (Map.Entry typeEntry : theTypes.entrySet()) { + res &= catalogType(theConstruct, typeEntry.getKey(), typeEntry.getValue(), theContext); + } + + return res; + } + + private boolean catalogType(Construct theConstruct, + String theName, + Map theDef, + CheckContext theContext) { + + if (!catalog.addType(theConstruct, theName, theDef)) { + theContext.addError(theConstruct + TYPE + theName + " re-declaration", null); + return false; + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {} has been cataloged", theConstruct, theName); + + String parentType = (String) theDef.get("derived_from"); + if (parentType != null && !catalog.hasType(theConstruct, parentType)) { + theContext.addError( + theConstruct + TYPE + theName + " indicates a supertype that has not (yet) been declared: " + parentType, null); + return false; + } + return true; + } + + private boolean checkTypeReference(Construct theConstruct, + CheckContext theContext, + String... theTypeNames) { + boolean res = true; + for (String typeName : theTypeNames) { + if (!isTypeReference(theConstruct, typeName)) { + theContext.addError("Reference to " + theConstruct + " type '" + typeName + "' points to unknown type", null); + res = false; + } + } + return res; + } + + private boolean isTypeReference(Construct theConstruct, + String theTypeName) { + return this.catalog.hasType(theConstruct, theTypeName); + } + + /* node or relationship templates */ + private boolean checkTemplateReference(Construct theConstruct, + CheckContext theContext, + String... theTemplateNames) { + boolean res = true; + for (String templateName : theTemplateNames) { + if (!isTemplateReference(theConstruct, theContext, templateName)) { + theContext.addError("Reference to " + theConstruct + " template '" + templateName + "' points to unknown template", null); + res = false; + } + } + return res; + } + + private boolean isTemplateReference(Construct theConstruct, + CheckContext theContext, + String theTemplateName) { + return this.catalog.hasTemplate(theContext.target(), theConstruct, theTemplateName); + } + + /* + * For inputs/properties/attributes/(parameters). It is the caller's + * responsability to provide the value (from a 'default', inlined, ..) + * + * @param theDef the definition of the given construct/facet as it appears in + * its enclosing type definition. + * @param + */ + private boolean checkDataValuation(Object theExpr, + Map theDef, + CheckContext theContext) { + //first check if the expression is a function, if not handle it as a value assignment + Data.Function f = Data.function(theExpr); + if (f != null) { + return f.evaluator() + .eval(theExpr, theDef, theContext); + } else { + Data.Type type = Data.typeByName((String) theDef.get("type")); + if (type != null) { + Data.Evaluator evaluator; + + evaluator = type.evaluator(); + if (evaluator == null) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No value evaluator available for type {}", type); + } else { + if ((theExpr != null) && (!evaluator.eval(theExpr, theDef, theContext))) { + return false; + } + } + + + evaluator = type.constraintsEvaluator(); + if (evaluator == null) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No constraints evaluator available for type {}", type); + } else { + if (theExpr != null) { + if (!evaluator.eval(theExpr, theDef, theContext)) { + return false; + } + } else { + //should have a null value validatorT + } + } + + return true; + } else { + theContext.addError("Expression " + theExpr + " of " + theDef + " could not be evaluated", null); + return false; + } + } + } + + /** + * Given the type of a certain construct (node type for example), look up + * in one of its facets (properties, capabilities, ..) for one of the given + * facet type (if looking in property, one of the given data type). + * + * @return a map of all facets of the given type, will be empty to signal + * none found + *

+ * Should we look for a facet construct of a compatible type: any type derived + * from the given facet's construct type?? + */ + private Map + findTypeFacetByType(Construct theTypeConstruct, + String theTypeName, + Facet theFacet, + String theFacetType) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType {}, {}: {} {}", theTypeName, theTypeConstruct, theFacetType, theFacet); + Map res = new HashMap<>(); + Iterator> i = + catalog.hierarchy(theTypeConstruct, theTypeName); + while (i.hasNext()) { + Map.Entry typeSpec = i.next(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", theTypeConstruct, typeSpec.getKey()); + Map typeFacet = + (Map) typeSpec.getValue().get(theFacet.name()); + if (typeFacet == null) { + continue; + } + Iterator> fi = typeFacet.entrySet().iterator(); + while (fi.hasNext()) { + Map.Entry facet = fi.next(); + String facetType = (String) facet.getValue().get("type"); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", facet.getKey(), facetType); + + //here is the question: do we look for an exact match or .. + //now we check that the type has a capability of a type compatible + //(equal or derived from) the given capability type. + if (catalog.isDerivedFrom( + theFacet.construct(), facetType, theFacetType)) { + res.putIfAbsent(facet.getKey(), facet.getValue()); + } + } + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, found {}", res); + + return res; + } + + private Map + findTypeFacetByName(Construct theTypeConstruct, + String theTypeName, + Facet theFacet, + String theFacetName) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName {} {}", theTypeConstruct, theTypeName); + Iterator> i = + catalog.hierarchy(theTypeConstruct, theTypeName); + while (i.hasNext()) { + Map.Entry typeSpec = i.next(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName, Checking {} type {}", theTypeConstruct, typeSpec.getKey()); + Map typeFacet = + (Map) typeSpec.getValue().get(theFacet.name()); + if (typeFacet == null) { + continue; + } + Map facet = typeFacet.get(theFacetName); + if (facet != null) { + return facet; + } + } + return null; + } + + /* Requirements are the odd ball as they are structured as a sequence .. */ + private Map findNodeTypeRequirementByName( + String theNodeType, String theRequirementName) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName {}/{}", theNodeType, theRequirementName); + Iterator> i = + catalog.hierarchy(Construct.Node, theNodeType); + while (i.hasNext()) { + Map.Entry nodeType = i.next(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName, Checking node type {}", nodeType.getKey()); + List> nodeTypeRequirements = + (List>) nodeType.getValue().get(REQUIREMENTS); + if (nodeTypeRequirements == null) { + continue; + } + + for (Map requirement : nodeTypeRequirements) { + Map requirementDef = requirement.get(theRequirementName); + if (requirementDef != null) { + return requirementDef; + } + } + } + return null; + } + + /* + * Additional generics checks to be performed on any definition: construct, + * construct types, etc .. + */ + public boolean checkDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + if (theDefinition == null) { + theContext.addError("Missing definition for " + theName, null); + return false; + } + + if (theDefinition.isEmpty()) { + theContext.addError("Empty definition for " + theName, null); + return false; + } + + return true; + } + + private boolean checkDefinition(String theName, + List theDefinition, + CheckContext theContext) { + if (theDefinition == null) { + theContext.addError("Missing definition for " + theName, null); + return false; + } + + if (theDefinition.isEmpty()) { + theContext.addError("Empty definition for " + theName, null); + return false; + } + + return true; + } + + /* plenty of one entry maps around */ + private Map.Entry mapEntry(Object theMap) { + return (Map.Entry) ((Map) theMap).entrySet().iterator().next(); + } + + /** + * Given that we remembered the canonical forms that were needed during + * validation to replace the short forms we can apply them to the target + * yaml. + * We take advantage here of the fact that the context path maintained + * during validation is compatible with (j)xpath, with the exception of + * sequence/array indentation .. + */ + + private String patchIndexes(CharSequence thePath) { + Matcher m = indexPattern.matcher(thePath); + StringBuffer path = new StringBuffer(); + while (m.find()) { + String index = m.group(); + index = "[" + (Integer.valueOf(index.substring(1)).intValue() + 1) + "]"; + m.appendReplacement(path, Matcher.quoteReplacement(index)); + } + m.appendTail(path); + return path.toString(); + } + + private String patchWhitespaces(String thePath) { + String[] elems = thePath.split("/"); + StringBuffer path = new StringBuffer(); + for (int i = 0; i < elems.length; i++) { + if (spacePattern.matcher(elems[i]).find()) { + path.append("[@name='") + .append(elems[i]) + .append("']"); + } else { + path.append("/") + .append(elems[i]); + } + } + return path.toString(); + } + + private void applyCanonicals(Object theTarget, + Map theCanonicals) { + if (theCanonicals.isEmpty()) { + return; + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "applying canonicals: {}", theCanonicals); + applyCanonicals(theTarget, theCanonicals, "/", false); + } + + /* + * applies canonicals selectively + */ + private void applyCanonicals(Object theTarget, + Map theCanonicals, + String thePrefix, + boolean doRemove) { + + JXPathContext jxPath = JXPathContext.newContext(theTarget); + for (Iterator> ces = + theCanonicals.entrySet().iterator(); + ces.hasNext(); ) { + Map.Entry ce = ces.next(); + //should we check prefix before or after normalization ?? + String path = ce.getKey(); + if (path.startsWith(thePrefix)) { + path = patchWhitespaces( + patchIndexes(path)); + try { + jxPath.setValue(path, ce.getValue()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Applied canonical form (prefix '{}') at: {}", thePrefix, path); + + if (doRemove) { + ces.remove(); + } + } catch (JXPathException jxpx) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to apply canonical to {} {}", theTarget, jxpx); + } + } + } + } + + /* + * commons are built-in and supposed to be bulletproof so any error in here + * goes out loud. + */ + private static Catalog commonsCatalog() { + + synchronized (Catalog.class) { + + if (commonsCatalogInstance != null) { + return commonsCatalogInstance; + } + + //if other templates are going to be part of the common type system + //add them to this list. order is relevant. + final String[] commons = new String[]{ + "tosca/tosca-common-types.yaml"}; + + Checker commonsChecker; + try { + commonsChecker = new Checker(); + + for (String common : commons) { + commonsChecker.check(common, buildCatalog(false)); + Report commonsReport = commonsChecker.targets().iterator().next().getReport(); + + if (commonsReport.hasErrors()) { + throw new RuntimeException("Failed to process commons:\n" + + commonsReport); + } + } + } catch (CheckerException cx) { + throw new RuntimeException("Failed to process commons", cx); + } + commonsCatalogInstance = commonsChecker.catalog; + return commonsCatalogInstance; + } + } + + public static Catalog buildCatalog() { + return buildCatalog(true); + } + + private static Catalog buildCatalog(boolean doCommons) { + + Catalog catalog = new Catalog(doCommons ? commonsCatalog() : null); + if (!doCommons) { + //add core TOSCA types + for (Data.CoreType type : Data.CoreType.class.getEnumConstants()) { + catalog.addType(Construct.Data, type.toString(), Collections.emptyMap()); + } + } + return catalog; + } + + private boolean invokeHook(String theHookName, + Class[] theArgTypes, + Object... theArgs) { + + Invokable hookHandler = null; + try { + Method m = Checker.class.getDeclaredMethod( + theHookName, theArgTypes); + m.setAccessible(true); + hookHandler = Invokable.from(m); + } catch (NoSuchMethodException nsmx) { + //that's ok, not every rule has to have a handler + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "That's ok, not every rule has to have a handler. Method name =", theHookName); + } + + if (hookHandler != null) { + try { + hookHandler.invoke(this, theArgs); + } catch (InvocationTargetException | IllegalAccessException itx) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invocation failed for hook handler {} {}", theHookName, itx); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Hook handler failed {} {}", theHookName, x); + } + } + + return hookHandler != null; + } + + private void validationHook(String theTiming, + Object theTarget, + Rule theRule, + Validator.ValidationContext theContext) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "looking up validation handler for {}, {} {}", theRule.getName(), theTiming, theContext.getPath()); + if (!invokeHook(theRule.getName() + "_" + theTiming + "_validation_handler", + validationHookArgTypes, + theTarget, theRule, theContext)) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "no validation handler for {}", theRule.getName() + "_" + theTiming); + } + } + + private void checks(String theName, + Object theTarget, + CheckContext theContext) { + Map handlers = checks.row(/*theName*/theContext.getPath(theName)); + if (handlers != null) { + for (Map.Entry handler : handlers.entrySet()) { + try { + handler.getKey().invoke(handler.getValue(), new Object[]{theTarget, theContext}); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Check {} with {} failed {}", theName, handler.getKey(), x); + } + } + } else { + boolean hasHook = false; + for (Class[] argTypes : checkHookArgTypes) { + hasHook |= invokeHook("check_" + theName, + argTypes, + theTarget, theContext); + //shouldn't we stop as soon as hasHook is true?? + } + + if (!hasHook) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "no check handler for {}", theName); + } + } + } + + private void catalogs(String theName, + Object theTarget, + CheckContext theContext) { + + Map handlers = catalogs.row(/*theName*/theContext.getPath(theName)); + if (handlers != null) { + for (Map.Entry handler : handlers.entrySet()) { + try { + handler.getKey().invoke(handler.getValue(), new Object[]{theTarget, theContext}); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Cataloging {} with {} failed {}", theName, handler.getKey(), x); + } + } + } + } + + private class TOSCAValidator extends Validator { + + //what were validating + private Target target; + + /* Some of the TOSCA entries accept a 'short form/notation' instead of the canonical map representation. + * kwalify cannot easily express these alternatives and as such we handle them here. In the pre-validation phase we detect the presence of a short notation +and compute the canonical form and validate it. In the post-validation phase we +substitute the canonical form for the short form so that checking does not have to deal with it. + */ + + private Map canonicals = new TreeMap<>(); + + TOSCAValidator(Target theTarget, Object theSchema) { + super(theSchema); + this.target = theTarget; + } + + public Target getTarget() { + return this.target; + } + + /* hook method called by Validator#validate() + */ + @Override + protected boolean preValidationHook(Object value, Rule rule, ValidationContext context) { + + validationHook("pre", value, rule, context); + //short form handling + String hint = rule.getShort(); + if (value != null && + hint != null) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Attempting canonical at {}, rule {}", context.getPath(), rule.getName()); + + Object canonical = null; + //if the canonical form requires a collection + if (Types.isCollectionType(rule.getType())) { + //and the actual value isn't one + if (!(value instanceof Map || value instanceof List)) { + //used to use singleton map/list here (was good for catching errors) + //but there is the possibility if short forms within short forms so + //the created canonicals need to accomodate other values. + if (Types.isMapType(rule.getType())) { + canonical = new HashMap(); + ((Map) canonical).put(hint, value); + } else { + //the hint is irrelevant here but we should impose a value when the target is a list + canonical = new LinkedList(); + ((List) canonical).add(value); + } + } else { + //we can accomodate: + // map to list of map transformation + if (!Types.isMapType(rule.getType()) /* a seq */ && + value instanceof Map) { + canonical = new LinkedList(); + ((List) canonical).add(value); + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType()); + return false; + } + } + + int errc = context.errorCount(); + validateRule(canonical, rule, context); + if (errc != context.errorCount()) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} failed validation", rule.getName(), hint, context.getPath()); + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} passed validation. Canonical form is {}", rule.getName(), hint, context.getPath(), canonical); + //replace the short notation with the canonicall one so we don't + //have to deal it again during checking + this.canonicals.put(context.getPath(), canonical); + return true; + } + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType()); + } + } + + //perform default validation process + return false; + } + + /* + * Only gets invoked once the value was succesfully verified against the syntax indicated by the given rule. + */ + @Override + protected void postValidationHook(Object value, + Rule rule, + ValidationContext context) { + validationHook("post", value, rule, context); + } + + } + + /** + * Maintains state across the checking process. + */ + public class CheckContext { + + private Target target; + private ArrayList elems = new ArrayList<>(10); + private ArrayList constructs = new ArrayList<>(10); + + CheckContext(Target theTarget) { + this.target = theTarget; + } + + public CheckContext enter(String theName) { + return enter(theName, null); + } + + public CheckContext enter(String theName, Construct theConstruct) { + this.elems.add(theName); + this.constructs.add(theConstruct); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering check {} {}", theName, getPath()); + return this; + } + + public CheckContext exit() { + String path = getPath(); + String name = this.elems.remove(this.elems.size() - 1); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "exiting check {} {}", name, path); + this.constructs.remove(this.constructs.size() - 1); + return this; + } + + public String getPath() { + return buildPath(null); + } + + String getPath(String theNextElem) { + return buildPath(theNextElem); + } + + String buildPath(String theElem) { + StringBuilder sb = new StringBuilder(); + for (String e : this.elems) { + sb.append(e) + .append("/"); + } + if (theElem != null) { + sb.append(theElem) + .append("/"); + } + + return sb.substring(0, sb.length() - 1); + } + + public String enclosingConstruct(Construct theConstruct) { + for (int i = this.constructs.size() - 1; i > 0; i--) { + Construct c = this.constructs.get(i); + if (c != null && c.equals(theConstruct)) { + return this.elems.get(i); + } + } + return null; + } + + public CheckContext addError(String theMessage, Throwable theCause) { + this.target.report(new TargetError("", getPath(), theMessage, theCause)); + return this; + } + + public Checker checker() { + return Checker.this; + } + + public Catalog catalog() { + return Checker.this.catalog; + } + + public Target target() { + return this.target; + } + + public String toString() { + return "CheckContext(" + this.target.getLocation() + "," + getPath() + ")"; + } + } + + // -------------------------------------------------------------------------------------------------- // + + private String errorReport(List theErrors) { + StringBuilder sb = new StringBuilder(theErrors.size() + " errors"); + for (Throwable x : theErrors) { + sb.append("\n"); + if (x instanceof ValidationException) { + ValidationException vx = (ValidationException) x; + // .apend("at ") + // .append(error.getLineNumber()) + // .append(" : ") + sb.append("[").append(vx.getPath()).append("] "); + } else if (x instanceof TargetError) { + TargetError tx = (TargetError) x; + sb.append("[").append(tx.getLocation()).append("] "); + } + sb.append(x.getMessage()); + if (x.getCause() != null) { + sb.append("\n\tCaused by:\n").append(x.getCause()); + } + } + sb.append("\n"); + return sb.toString(); + } + + protected void range_definition_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering range_definition {}", + theContext.getPath()); + + assert theRule.getType().equals("seq"); + List bounds = (List) theValue; + + if (bounds.size() != 2) { + theContext.addError("Too many values in bounds specification", theRule, theValue, null); + return; + } + + try { + Double.parseDouble(bounds.get(0).toString()); + } catch (NumberFormatException nfe) { + theContext.addError("Lower bound not a number", theRule, theValue, null); + } + + try { + Double.parseDouble(bounds.get(1).toString()); + } catch (NumberFormatException nfe) { + if (!"UNBOUNDED".equals(bounds.get(1).toString())) { + theContext.addError("Upper bound not a number or 'UNBOUNDED'", theRule, theValue, null); + } + } + + } + + /* + * early processing (validation time) of the imports allows us to catalog + * their types before those declared in the main document. + */ + protected void imports_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering imports {}", theContext.getPath()); + assert theRule.getType().equals("seq"); + + Target tgt = ((TOSCAValidator) theContext.getValidator()).getTarget(); + + applyCanonicals(tgt.getTarget(), ((TOSCAValidator) theContext.getValidator()).canonicals, "/imports", true); + + for (ListIterator li = ((List) theValue).listIterator(); li.hasNext();) { + + Map.Entry importEntry = mapEntry(li.next()); + + Map def = (Map) importEntry.getValue(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Processing import {}", def); + + String tfile = (String) def.get("file"); + Target tgti = this.locator.resolve(tfile); + if (tgti == null) { + theContext.addError("Failure to resolve import '" + def + "', imported from " + tgt, theRule, null, + null); + continue; + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Import {} located at {}", def, + tgti.getLocation()); + + if (this.catalog.addTarget(tgti, tgt)) { + // we've never seen this import (location) before + try { + + List tgtis = parseTarget(tgti); + if (tgtis.isEmpty()) + continue; + + if (tgtis.size() > 1) { + theContext.addError( + "Import '" + tgti + "', imported from " + tgt + ", contains multiple yaml documents", + theRule, null, null); + continue; + } + + tgti = tgtis.get(0); + + // tgti = parseTarget(tgti); + if (tgt.getReport().hasErrors()) { + theContext.addError("Failure parsing import '" + tgti + "',imported from " + tgt, theRule, null, + null); + continue; + } + + validateTarget(tgti); + if (tgt.getReport().hasErrors()) { + theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, + null, null); + continue; + } + } catch (CheckerException cx) { + theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, cx, + null); + } + } + + // replace with the actual location (also because this is what they + // get + // index by .. bad, this exposed catalog inner workings) + + def.put("file", tgti.getLocation()); + } + } + + protected void node_templates_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering node_templates_post_validation_handler {}", + theContext.getPath()); + assert theRule.getType().equals("map"); + Map nodeTemplates = (Map) theValue; + for (Iterator> i = nodeTemplates.entrySet().iterator(); i.hasNext();) { + Map.Entry node = i.next(); + try { + catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Node, + node.getKey(), node.getValue()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Node template {} has been cataloged", + node.getKey()); + } catch (CatalogException cx) { + theContext.addError(cx.toString(), theRule, node, null); + } + } + } + + protected void inputs_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering inputs_post_validation_handler {}", + theContext.getPath()); + assert theRule.getType().equals("map"); + + // we'll repeat this test during checking but because we index inputs + // early + // we need it here too + if (theValue == null) { + return; + } + + Map inputs = (Map) theValue; + for (Iterator> i = inputs.entrySet().iterator(); i.hasNext();) { + Map.Entry input = i.next(); + try { + catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Data, + input.getKey(), input.getValue()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Input {} has been cataloged", + input.getKey()); + } catch (CatalogException cx) { + theContext.addError(cx.toString(), theRule, input, null); + } + } + } + + private void process(String theProcessorSpec) throws CheckerException { + + String[] spec = theProcessorSpec.split(" "); + if (spec.length == 0) + throw new IllegalArgumentException("Incomplete processor specification"); + + Class processorClass = null; + try { + processorClass = Class.forName(spec[0]); + } catch (ClassNotFoundException cnfx) { + throw new CheckerException("Cannot find processor implementation", cnfx); + } + + Processor proc = null; + try { + proc = (Processor) ConstructorUtils.invokeConstructor(processorClass, + Arrays.copyOfRange(spec, 1, spec.length)); + } catch (Exception x) { + throw new CheckerException("Cannot instantiate processor", x); + } + + process(proc); + } + + protected void check_artifact_definition(String theName, Map theDef, CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + // check artifact type + if (!checkType(Construct.Artifact, theDef, theContext)) + return; + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_policy_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Policy); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Policy, theName, theDefinition, Facet.properties, theContext); + } + + // the targets can be known node types or group types + List targets = (List) theDefinition.get("targets"); + if (targets != null) { + if (checkDefinition("targets", targets, theContext)) { + for (String target : targets) { + if (!(this.catalog.hasType(Construct.Node, target) + || this.catalog.hasType(Construct.Group, target))) { + theContext.addError( + "The 'targets' entry must contain a reference to a node type or group type, '" + + target + "' is none of those", + null); + } + } + } + } + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_group_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Group); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Group, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("targets")) { + checkTypeReference(Construct.Node, theContext, + ((List) theDefinition.get("targets")).toArray(EMPTY_STRING_ARRAY)); + } + + // interfaces + Map interfaces = (Map) theDefinition.get("interfaces"); + if (interfaces != null) { + try { + theContext.enter("interfaces"); + for (Iterator> i = interfaces.entrySet().iterator(); i.hasNext();) { + Map.Entry e = i.next(); + check_type_interface_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_node_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_properties((Map) theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.attributes, theContext); + } + + // requirements + if (theDefinition.containsKey("requirements")) { + check_requirements((List) theDefinition.get("requirements"), theContext); + } + + // capabilities + if (theDefinition.containsKey("capabilities")) { + check_capabilities((Map) theDefinition.get("capabilities"), theContext); + } + + // interfaces: + Map interfaces = (Map) theDefinition.get("interfaces"); + if (interfaces != null) { + try { + theContext.enter("interfaces"); + for (Iterator> i = interfaces.entrySet().iterator(); i.hasNext();) { + Map.Entry e = i.next(); + check_type_interface_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + // artifacts + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_interface_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Interface); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + // not much else here: a list of operation_definitions, each with + // its + // implementation and inputs + + // check that common inputs are re-defined in a compatible manner + + // check that the interface operations are overwritten in a + // compatible manner + // for (Iterator> i = theDefinition.entrySet() + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_artifact_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_relationship_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_properties((Map) theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.attributes, theContext); + } + + Map interfaces = (Map) theDefinition.get("interfaces"); + if (interfaces != null) { + theContext.enter("interfaces"); + for (Iterator> i = interfaces.entrySet().iterator(); i.hasNext();) { + Map.Entry e = i.next(); + check_type_interface_definition(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + if (theDefinition.containsKey("valid_target_types")) { + checkTypeReference(Construct.Capability, theContext, + ((List) theDefinition.get("valid_target_types")).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_capability_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_attributes((Map) theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.attributes, theContext); + } + + // valid_source_types: see capability_type_definition + // unclear: how is the valid_source_types list definition eveolving + // across + // the type hierarchy: additive, overwriting, ?? + if (theDefinition.containsKey("valid_source_types")) { + checkTypeReference(Construct.Node, theContext, + ((List) theDefinition.get("valid_source_types")).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_data_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Data); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Data, theName, theDefinition, Facet.properties, theContext); + } + } finally { + theContext.exit(); + } + } + + /* + * top level rule, we collected the whole information set. this is where + * checking starts + */ + protected void check_service_template_definition(Map theDef, CheckContext theContext) { + theContext.enter(""); + + if (theDef == null) { + theContext.addError("Empty template", null); + return; + } + + // !!! imports need to be processed first now that catalogging takes + // place at check time!! + + // first catalog whatever it is there to be cataloged so that the checks + // can perform cross-checking + for (Iterator> ri = theDef.entrySet().iterator(); ri.hasNext();) { + Map.Entry e = ri.next(); + catalogs(e.getKey(), e.getValue(), theContext); + } + + for (Iterator> ri = theDef.entrySet().iterator(); ri.hasNext();) { + Map.Entry e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + protected void check_attribute_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + if (!checkDataType(theDefinition, theContext)) { + return; + } + } finally { + theContext.exit(); + } + } + + public void check_attributes(Map theDefinitions, CheckContext theContext) { + theContext.enter("attributes"); + try { + if (!checkDefinition("attributes", theDefinitions, theContext)) + return; + + for (Iterator> i = theDefinitions.entrySet().iterator(); i.hasNext();) { + Map.Entry e = i.next(); + check_attribute_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + protected void check_property_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + // check the type + if (!checkDataType(theDefinition, theContext)) { + return; + } + // check default value is compatible with type + Object defaultValue = theDefinition.get("default"); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDefinition, theContext); + } + + theContext.exit(); + } + + public void check_properties(Map theDefinitions, CheckContext theContext) { + theContext.enter("properties"); + try { + if (!checkDefinition("properties", theDefinitions, theContext)) + return; + + for (Iterator> i = theDefinitions.entrySet().iterator(); i.hasNext();) { + Map.Entry e = i.next(); + check_property_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CheckerException.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CheckerException.java new file mode 100644 index 0000000..1963c28 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CheckerException.java @@ -0,0 +1,18 @@ +package org.onap.sdc.dcae.checker; + + +/** + * A checker exception represents an error that stops the checker from + * completing its task. + */ +public class CheckerException extends Exception { + + public CheckerException(String theMsg, Throwable theCause) { + super(theMsg, theCause); + } + + public CheckerException(String theMsg) { + super(theMsg); + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java new file mode 100644 index 0000000..295a1f2 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java @@ -0,0 +1,144 @@ +package org.onap.sdc.dcae.checker; + +import java.io.InputStream; +import java.io.IOException; + +import java.net.URL; +import java.net.URI; +import java.net.URISyntaxException; + +import java.nio.file.Paths; + +import java.util.Set; +import java.util.LinkedHashSet; + +import com.google.common.collect.Iterables; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + + +public class CommonLocator implements TargetLocator { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private Set searchPaths = new LinkedHashSet(); + + /* will create a locator with 2 default search paths: the file directory + * from where the app was and the jar from which this checker (actually this + * class) was loaded */ + public CommonLocator() { + addSearchPath( + Paths.get(".").toAbsolutePath().normalize().toUri()); + } + + public CommonLocator(String... theSearchPaths) { + for (String path: theSearchPaths) { + addSearchPath(path); + } + } + + public boolean addSearchPath(URI theURI) { + + if (!theURI.isAbsolute()) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Search paths must be absolute uris: {}", theURI); + return false; + } + + return searchPaths.add(theURI); + } + + public boolean addSearchPath(String thePath) { + URI suri = null; + try { + suri = new URI(thePath); + } + catch(URISyntaxException urisx) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid search path: {} {}", thePath, urisx); + return false; + } + + return addSearchPath(suri); + } + + public Iterable searchPaths() { + return Iterables.unmodifiableIterable(this.searchPaths); + } + + /** + * Takes the given path and first URI resolves it and then attempts to open + * it (a way of verifying its existence) against each search path and stops + * at the first succesful test. + */ + public Target resolve(String theName) { + URI puri = null; + InputStream pis = null; + + //try classpath + URL purl = getClass().getClassLoader().getResource(theName); + if (purl != null) { + try { + return new Target(theName, purl.toURI()); + } + catch (URISyntaxException urisx) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "The file {} wasn't found {}", theName, urisx); + } + } + + //try absolute + try { + puri = new URI(theName); + if (puri.isAbsolute()) { + try { + pis = puri.toURL().openStream(); + } + catch (IOException iox) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "The path {} is an absolute uri but it cannot be opened {}", theName, iox); + return null; + } + } + } + catch(URISyntaxException urisx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, urisx); + //keep it silent but what are the chances .. + } + + //try relative to the search paths + for (URI suri: searchPaths) { + try { + puri = suri.resolve(theName); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver trying {}", puri); + pis = puri.toURL().openStream(); + return new Target(theName, puri.normalize()); + } + catch (Exception x) { + debugLogger.log(LogLevel.ERROR, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, x); + continue; + } + finally { + if (pis!= null) { + try { + pis.close(); + } + catch (IOException iox) { + } + } + } + } + + return null; + } + + public String toString() { + return "CommonLocator(" + this.searchPaths + ")"; + } + + + public static void main(String[] theArgs) { + TargetLocator tl = new CommonLocator(); + tl.addSearchPath(java.nio.file.Paths.get("").toUri()); + tl.addSearchPath("file:///"); + debugLogger.log(LogLevel.DEBUG, CommonLocator.class.getName(), tl.resolve(theArgs[0]).toString()); + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java new file mode 100644 index 0000000..b05cff9 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java @@ -0,0 +1,22 @@ +package org.onap.sdc.dcae.checker; + +/* + * What exactly is allowed to go in here is a subject of meditation :) I would have said 'elements with a type' but + * that will no cover Requirement and Workflow, or topology template top elements but won't cover others .. + * + * Properties/Attributes/Inputs/Outputs are just Data constructs under a particular name. + */ +public enum Construct { + Data, + Requirement, + Capability, + Relationship, + Artifact, + Interface, + Node, + Group, + Policy, + Workflow +} + + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java new file mode 100644 index 0000000..70552bb --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java @@ -0,0 +1,895 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Collection; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.HashMap; +import java.util.Iterator; +import java.util.EnumSet; + +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +/* + * String -- 'primitive tosca type' converters, used in verifying valuations + */ +public class Data { + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private Data() { + } + + /* + */ + @FunctionalInterface + public static interface Evaluator { + + public boolean eval(Object theExpr, Map theDef, Checker.CheckContext theCtx); + } + + + /* data type processing */ + + private static Map typesByName = new HashMap(); + static { + //CoreType.String.toString(); + //CoreFunction.concat.toString(); + //Constraint.equal.toString(); + } + + + public static Data.Type typeByName(String theName) { + return typesByName.getOrDefault(theName, userType); + } +/* + public static Evaluator getTypeEvaluator(Type theType) { + } +*/ + + /* Needs a better name ?? RValue?? + * This is not an rvalue (C def) per se but the construct who's instances + * yield rvalues. It is a construct that yields data, not the data (yield) + * itself. + */ + public static interface Type { + + public String name(); + + public Evaluator evaluator(); + + public Evaluator constraintsEvaluator(); + } + + /* generic placeholder + */ + private static Type userType = new Type() { + + public String name() { + return null; + } + + public Evaluator evaluator() { + return Data::evalUser; + } + + public Evaluator constraintsEvaluator() { + return Data::evalUserConstraints; + } + }; + + + public static enum CoreType implements Type { + + String("string", + (expr,def,ctx) -> expr != null && expr instanceof String, + Data::evalScalarConstraints), + Integer("integer", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Integer.class), + Data::evalScalarConstraints), + Float("float", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Double.class, Integer.class), + Data::evalScalarConstraints), + Boolean("boolean", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Boolean.class), + Data::evalScalarConstraints), + Null("null", + (expr,def,ctx) -> expr.equals("null"), + null), + Timestamp("timestamp", + (expr,def,ctx) -> timestampRegex.matcher(expr.toString()).matches(), + null), + List("list", Data::evalList, Data::evalListConstraints), + Map("map", Data::evalMap, Data::evalMapConstraints), + Version("version", + (expr,def,ctx) -> versionRegex.matcher(expr.toString()).matches(), + null), + /* use a scanner and check that the upper bound is indeed greater than + * the lower bound */ + Range("range", + (expr,def,ctx) -> { return rangeRegex.matcher(expr.toString()).matches();}, + null ), + Size("scalar-unit.size", + (expr,def,ctx) -> sizeRegex.matcher(expr.toString()).matches(), + null), + Time("scalar-unit.time", + (expr,def,ctx) -> timeRegex.matcher(expr.toString()).matches(), + null), + Frequency("scalar-unit.frequency", + (expr,def,ctx) -> frequencyRegex.matcher(expr.toString()).matches(), + null); + + + private String toscaName; + private Evaluator valueEvaluator, + constraintsEvaluator; + + private CoreType(String theName, Evaluator theValueEvaluator, Evaluator theConstraintsEvaluator) { + this.toscaName = theName; + this.valueEvaluator = theValueEvaluator; + this.constraintsEvaluator = theConstraintsEvaluator; + + if (typesByName == null) + throw new RuntimeException("No type index available!"); + + typesByName.put(this.toscaName, this); + } + + public String toString() { + return this.toscaName; + } + + public Evaluator evaluator() { + return this.valueEvaluator; + } + + public Evaluator constraintsEvaluator() { + return this.constraintsEvaluator; + } + } + + private static Pattern timestampRegex = null, + versionRegex = null, + rangeRegex = null, + sizeRegex = null, + timeRegex = null, + frequencyRegex = null; + + static { + try { + timestampRegex = Pattern.compile( + "\\p{Digit}+"); //?? where to find the definition + + //.[.[.[-> constraints = + (List>)theDef.get("constraints"); + if (constraints == null) { + return true; + } + + //check value against constraints + boolean res = true; + for (Map constraintDef: constraints) { + Map.Entry constraintEntry = + constraintDef.entrySet().iterator().next(); + Data.Constraint constraint = constraintByName(constraintEntry.getKey()); + +// the def passed here includes all constraints, not necessary! we can pass +// simple constraintEntry.getValue() + Evaluator constraintEvaluator = getTypeConstraintEvaluator(type, constraint); + if (constraintEvaluator == null) { + debugLogger.log(LogLevel.DEBUG, Data.class.getName(), "No constant evaluator available for {}/{}", type, constraint); + continue; + } + + if (!constraintEvaluator.eval(theVal, theDef, theCtx)) { + theCtx.addError("Value " + theVal + " failed constraint " + constraintEntry, null); + res = false; + } + } + return res; + } + + /* + * It assumes the specification is complete, i.e. it contains a valid + * entry_schema section. + * TODO: check constraints, i.e. entrySchema.get("constraints") + */ + public static boolean evalList(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + try { + return evalCollection((List)theVal, theDef, theCtx); + } + catch (ClassCastException ccx) { + theCtx.addError("Value " + theVal + " not a list", null); + return false; + } + } + + public static boolean evalMap(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + try { + return evalCollection(((Map)theVal).values(), theDef, theCtx); + } + catch (ClassCastException ccx) { + theCtx.addError("Value " + theVal + " not a map", null); + return false; + } + } + + + /** + * The elements of a collection can be of a core type or user defined type. + */ + private static boolean evalCollection(Collection theVals, + Map theDef, + Checker.CheckContext theCtx) { + Data.Type entryType = null; + Map entryTypeDef = (Map)theDef.get("entry_schema"); + if (null != entryTypeDef) + entryType = typeByName((String)entryTypeDef.get("type")); + + boolean res = true; + for (Object val: theVals) { + //check if the value is not a function call + Data.Function f = Data.function(val); + if (f != null && + f.evaluator().eval(val, entryTypeDef, theCtx)) { + res = false; + } + else if (entryType != null && + !entryType.evaluator().eval(val, entryTypeDef, theCtx)) { + res= false; + //the error should hav been reported by the particular evaluator + //theCtx.addError("Value " + val + " failed evaluation", null); + } + } + return res; + } + + public static boolean evalListConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + return evalCollectionConstraints((List)theVal, theDef, theCtx); + } + + public static boolean evalMapConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + return evalCollectionConstraints(((Map)theVal).values(), theDef, theCtx); + } + + private static boolean evalCollectionConstraints(Collection theVals, + Map theDef, + Checker.CheckContext theCtx) { + //should check overall constraints + + if (theVals == null) + return true; + + Map entryTypeDef = (Map)theDef.get("entry_schema"); + if (null == entryTypeDef) + return true; + + String entryTypeName = (String)entryTypeDef.get("type"); + Data.Type entryType = typeByName(entryTypeName); + + boolean res = true; + for (Object val: theVals) { + Evaluator entryEvaluator = entryType.constraintsEvaluator(); + if (entryEvaluator != null && + !entryEvaluator.eval(val, entryTypeDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error, but it also adds some context + //theCtx.addError("Value " + val + " failed evaluation", null); + } + } + return res; + } + + /* + * All required properties across the hierarchical defintion must be present + * TODO: The expr cannot contain any entry not specified in the type definition + */ + public static boolean evalUser(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + + boolean res = true; + Map val = (Map)theVal; + //must be done with respect to the super-type(s) definition + Iterator props = theCtx.catalog() + .facets(Construct.Data, + Facet.properties, + (String)theDef.get("type")); + while (props.hasNext()) { + Map.Entry propEntry = props.next(); + Map propDef = (Map)propEntry.getValue(); + Object propVal = val.get(propEntry.getKey()); + + if (propVal != null) { + Data.Type propType = typeByName((String)propDef.get("type")); + + if (!propType.evaluator().eval(propVal, propDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error + //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null); + } + } + } + return res; + } + + public static boolean evalUserConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + boolean res = true; + Map val = (Map)theVal; + Iterator props = theCtx.catalog() + .facets(Construct.Data, + Facet.properties, + (String)theDef.get("type")); + while (props.hasNext()) { + Map.Entry propEntry = props.next(); + Map propDef = (Map)propEntry.getValue(); + Object propVal = val.get(propEntry.getKey()); + + if (propVal != null) { + Data.Type propType = typeByName((String)propDef.get("type")); + + if (propType.constraintsEvaluator() != null && + !propType.constraintsEvaluator().eval(propVal, propDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error + //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null); + } + } + else { + if (Boolean.TRUE == (Boolean)propDef.getOrDefault("required", Boolean.FALSE) && + !propDef.containsKey("default")) { + theCtx.addError("Property " + propEntry.getKey() + " failed 'required' constraint; definition is " + propDef, null); + res = false; + } + } + } + return res; + } + + private static boolean valueOf(Checker.CheckContext theCtx, + Object theExpr, + Class ... theTypes) { + for (Class type: theTypes) { + if (type.isAssignableFrom(theExpr.getClass())) { + return true; + } + } + + theCtx.addError("Expression " + theExpr + " as " + theExpr.getClass().getName() + " is not compatible with any of required types: " + Arrays.toString(theTypes), null); + return false; + } + +/* + private static boolean valueOf(Class theTarget, + String theExpr, + Checker.CheckContext theCtx) { + try { + theTarget.getMethod("valueOf", new Class[] {String.class}) + .invoke(null, theExpr); + return true; + } + catch (InvocationTargetException itx) { + theCtx.addError("Failed to parse " + theExpr + " as a " + theTarget.getName(), itx.getCause()); + return false; + } + catch (Exception x) { + theCtx.addError("Failed to valueOf " + theExpr + " as a " + theTarget.getName(), x); + return false; + } + } +*/ + + /* + * Function e(valuation) + * ? + * note to self : is there a more efficient way of retrieving a map's + * single entry? (without knowing the key) + * + * ! Function evaluators have to handle null definition (i.e. perform argument checking) so that + * we can use them in the context of collections with without entry_schemas + */ + + //just as Type but is it worth expressing this 'commonality'?? + + public static interface Function { + + public String name(); + + public Evaluator evaluator(); + } + + /* + * This is a heuristic induced from the tosca specification .. it answers the + * question of wether the given expression is a function + */ + public static Function function(Object theExpr) { + if (theExpr instanceof Map && + ((Map)theExpr).size() == 1) { + try { + return Enum.valueOf(CoreFunction.class, functionName(theExpr)); + } + catch (IllegalArgumentException iax) { + //no such function but we cannot really record an error as we only guessed the expression as being a function .. + debugLogger.log(LogLevel.DEBUG, Data.class.getName(), "Failed attempt to interpret {} as a function call", theExpr); + } + } + + return null; + } + + /* + */ + public static String functionName(Object theExpr) { + return (String) + ((Map.Entry) + ((Map)theExpr).entrySet().iterator().next()) + .getKey(); + } + + /* + */ + public static Data.Function functionByName(String theName) { + return Enum.valueOf(CoreFunction.class, theName); + } + + /* + */ + public static enum CoreFunction implements Function { + + concat(Data::evalConcat), + token(Data::evalToken), + get_input(Data::evalGetInput), + get_property(Data::evalGetProperty), + get_attribute(Data::evalGetAttribute), + get_operation_output((expr,def,ctx) -> true), + get_nodes_of_type(Data::evalGetNodesOfType), + get_artifact((expr,def,ctx) -> true); + + private Evaluator evaluator; + + private CoreFunction(Evaluator theEval) { + this.evaluator = theEval; + } + + public Evaluator evaluator() { + return this.evaluator; + } + } + + private static boolean evalConcat( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return true; + } + + private static boolean evalToken( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return true; + } + + private static boolean evalGetInput( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof String)) { + theCtx.addError("get_input: argument must be a String" ,null); + return false; + } + + //check that an input with the given name exists and has a compatible type + Map inputDef = theCtx.catalog() + .getTemplate(theCtx.target(), Construct.Data, (String)entry.getValue()); + if (inputDef == null) { + theCtx.addError("get_input: no such input " + entry.getValue(), null); + return false; + } + + if (theDef == null) + return true; + + //the output must be type compatible with the input + String targetType = (String)theDef.get("type"); + if (targetType != null) { + String inputType = (String)inputDef.get("type"); + + if (!theCtx.catalog() + .isDerivedFrom(Construct.Data, inputType, targetType)) { + theCtx.addError("get_input: input type " + inputType + " is incompatible with the target type " + targetType, null); + return false; + } + } + + return true; + } + + /* + * Who's the smarty that decided to define optional arguments in between + * required ones ?! + * (factors the evaluation of get_attribute and get_property) + */ + private static boolean evalGetData( + Object theVal, Map theDef, + EnumSet theFacets, Checker.CheckContext theCtx) { + + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof List)) { + theCtx.addError("get_property: argument must be a List" ,null); + return false; + } + + List args = (List)entry.getValue(); + if (args.size() < 2) { + theCtx.addError("'get_property' has at least 2 arguments", null); + return false; + } + + //the first argument is a node or relationship template + String tmpl = (String)args.get(0); + Construct tmplConstruct = null; + Map tmplSpec = null; + + if ("SELF".equals(tmpl)) { + tmpl = theCtx.enclosingConstruct(Construct.Node); + if (tmpl == null) { + tmpl = theCtx.enclosingConstruct(Construct.Relationship); + if (tmpl == null) { + theCtx.addError("'get_property' invalid SELF reference: no node or relationship template in scope at " + theCtx.getPath(), null); + return false; + } + else { + tmplConstruct = Construct.Relationship; + } + } + else { + tmplConstruct = Construct.Node; + } + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), tmplConstruct, tmpl); + } + else if ("SOURCE".equals("tmpl")) { + //we are in the scope of a relationship template and this is the source node template. + tmpl = theCtx.enclosingConstruct(Construct.Relationship); + if (tmpl == null) { + theCtx.addError("'get_property' invalid SOURCE reference: no relationship template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else if ("TARGET".equals("tmpl")) { + //we are in the scope of a relationship template and this is the target node template. + tmpl = theCtx.enclosingConstruct(Construct.Relationship); + if (tmpl == null) { + theCtx.addError("'get_property' invalid TARGET reference: no relationship template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else if ("HOST".equals("tmpl")) { + tmpl = theCtx.enclosingConstruct(Construct.Node); + if (tmpl == null) { + theCtx.addError("'get_property' invalid HOST reference: no node template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else { + //try node template first + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Node, tmpl); + if (tmplSpec == null) { + //try relationship + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Relationship, tmpl); + if (tmplSpec == null) { + theCtx.addError("'get_data' invalid template reference '" + tmpl + "': no node or relationship template with this name", null); + return false; + } + else { + tmplConstruct = Construct.Relationship; + } + } + else { + tmplConstruct = Construct.Node; + } + } + + int facetNameIndex = 1; + Construct facetConstruct = tmplConstruct; //who's construct the facet is supposed to belong to + Map facetConstructSpec = null; + String facetConstructType = null; + + if (tmplConstruct.equals(Construct.Node) && + args.size() > 2) { + //the second arg might be a capability or requirement name. If it is a + //capability than the third argument becomes a property of the + //coresponding capability type. If it is a requirement than the + //requirement definition indicates a capability who's type has a + //property with the name indicated in the third argument .. + // + //while the spec does not make it explicit this can only take place + //if the first argument turned out to be a node template (as relationship + //templates/types do not have capabilities/requirements + String secondArg = (String)args.get(1); + if ((facetConstructSpec = theCtx.catalog().getFacetDefinition( + tmplConstruct, + (String)tmplSpec.get("type"), + Facet.capabilities, + secondArg)) != null) { + facetNameIndex = 2; + facetConstruct = Construct.Capability; + facetConstructType = (String)facetConstructSpec.get("type"); + } + else if ((facetConstructSpec = theCtx.catalog().getRequirementDefinition( + tmplConstruct, + (String)tmplSpec.get("type"), + secondArg)) != null) { + facetNameIndex = 2; + facetConstruct = Construct.Capability; + + //find the specof the capability this requirement points to + //TODO: check, can the capability reference be anything else but a capability tyep? + facetConstructType = (String)facetConstructSpec.get("capability"); + } + } + else { + //we'll attempt to handle it as a property of the node template + facetConstruct = Construct.Node; + facetConstructSpec = tmplSpec; + facetConstructType = (String)facetConstructSpec.get("type"); + } + + //validate the facet name + Map facetSpec = null; + { + String facetName = (String)args.get(facetNameIndex); + for (Facet facet: theFacets) { + facetSpec = theCtx.catalog() + .getFacetDefinition( + facetConstruct, + facetConstructType, + facet, + facetName); + if (facetSpec != null) + break; + } + + if (facetSpec == null) { +//TODO: not the greatest message if the call strated with a requirement .. + theCtx.addError("'get_data' invalid reference, '" + facetConstruct + "' " + facetConstructType + " has no " + theFacets + " with name " + facetName, null); + return false; + } + } + + //the rest of the arguments have to resolve to a field of the property's + //data type; the propertySpec contains the type specification + for (int i = facetNameIndex + 1; i < args.size(); i++) { + } + + return true; + } + + /**/ + private static boolean evalGetProperty( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return evalGetData(theVal, theDef, EnumSet.of(Facet.properties), theCtx); + } + + /* + * get_property and get_attribute are identical, just operating on different + * facets, with one exception: there is an intrinsec attribute for every + * declared property. + */ + private static boolean evalGetAttribute( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return evalGetData(theVal, theDef, EnumSet.of(Facet.attributes, Facet.properties), theCtx); + } + + private static boolean evalGetNodesOfType( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof String)) { + theCtx.addError("get_nodes_of_type: argument must be a String", null); + return false; + } + + String arg = (String)entry.getValue(); + + if (null == theCtx.catalog().getTypeDefinition(Construct.Node, arg)) { + theCtx.addError("get_nodes_of_type: no such node type " + arg, null); + return false; + } + else { + return true; + } + } + + /* */ + public static Constraint constraintByName(String theName) { + return Enum.valueOf(Constraint.class, theName); + } + + /* */ + public static Constraint constraint(Object theExpr) { + if (theExpr instanceof Map && + ((Map)theExpr).size() == 1) { + return constraintByName(constraintName(theExpr)); + } + + return null; + } + + /* */ + public static String constraintName(Object theExpr) { + return (String) + ((Map.Entry) + ((Map)theExpr).entrySet().iterator().next()) + .getKey(); + } + + private static Object getConstraintValue(Map theDef, + Constraint theConstraint) { + List constraints = (List)theDef.get("constraints"); + if (null == constraints) + return null; + + for(Map constraint: constraints) { + Object val = constraint.get(theConstraint.toString()); + if (val != null) + return val; + } + return null; + } + + public static enum Constraint { + equal, + greater_than, + greater_or_equal, + less_than, + less_or_equal, + in_range, + valid_values, + length, + min_length, + max_length, + pattern; + } + + + /* hold the constraint evaluators for pairs of type/constraint. + * If a pair is not present than the given constraint does not apply + * to the type. + */ + private static Table typeConstraintEvaluator =null; + + public static Evaluator + getTypeConstraintEvaluator(Type theType, Constraint theConstraint) { + if (typeConstraintEvaluator == null) { + typeConstraintEvaluator = HashBasedTable.create(); + + typeConstraintEvaluator.put(CoreType.String, Constraint.equal, + (val,def,ctx) -> val.equals(getConstraintValue(def,Constraint.equal))); + typeConstraintEvaluator.put(CoreType.String, Constraint.valid_values, + (val,def,ctx) -> { + return ((List)getConstraintValue(def,Constraint.valid_values)).contains(val); + }); + typeConstraintEvaluator.put(CoreType.String, Constraint.length, + (val,def,ctx) -> ((String)val).length() == ((Number)getConstraintValue(def,Constraint.length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.min_length, + (val,def,ctx) -> ((String)val).length() >= ((Number)getConstraintValue(def,Constraint.min_length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.max_length, + (val,def,ctx) -> ((String)val).length() <= ((Number)getConstraintValue(def,Constraint.max_length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.pattern, + (val,def,ctx) -> Pattern.compile((String)getConstraintValue(def,Constraint.pattern)) + .matcher((String)val) + .matches()); + + typeConstraintEvaluator.put(CoreType.Integer, Constraint.equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.equal)) == 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.greater_than, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.greater_than)) > 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.greater_or_equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.greater_or_equal)) >= 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.less_than, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.less_than)) < 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.less_or_equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.less_or_equal)) <= 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.in_range, + (val,def,ctx) -> { List range = (List)getConstraintValue(def, Constraint.in_range); + return ((Integer)val).compareTo(range.get(0)) >= 0 && + ((Integer)val).compareTo(range.get(1)) <= 0; + }); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.valid_values, + (val,def,ctx) -> ((List)getConstraintValue(def, Constraint.valid_values)).contains((Integer)val)); + +//yaml parser represents yaml floats as java Double and we are even more tolerant as many double values +//get represented as ints and the parser will substitute an Integer + typeConstraintEvaluator.put(CoreType.Float, Constraint.equal, + (val,def,ctx) -> ((Number)val).doubleValue() == ((Number)getConstraintValue(def,Constraint.equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.greater_than, + (val,def,ctx) -> ((Number)val).doubleValue() > ((Number)getConstraintValue(def,Constraint.greater_than)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.greater_or_equal, + (val,def,ctx) -> ((Number)val).doubleValue() >= ((Number)getConstraintValue(def,Constraint.greater_or_equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.less_than, + (val,def,ctx) -> ((Number)val).doubleValue() < ((Number)getConstraintValue(def,Constraint.less_than)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.less_or_equal, + (val,def,ctx) -> ((Number)val).doubleValue() <= ((Number)getConstraintValue(def,Constraint.less_or_equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.in_range, + (val,def,ctx) -> { List range = (List)getConstraintValue(def, Constraint.in_range); + return ((Number)val).doubleValue() >= range.get(0).doubleValue() && + ((Number)val).doubleValue() <= range.get(1).doubleValue(); + }); + typeConstraintEvaluator.put(CoreType.Float, Constraint.valid_values, + (val,def,ctx) -> ((List)getConstraintValue(def, Constraint.valid_values)).contains((Number)val)); + } + + Evaluator eval = typeConstraintEvaluator.get(theType, theConstraint); + + return eval == null ? (expr,def,ctx) -> true + : eval; + } + + + private static boolean stringValidValues(String theVal, + List theValidValues, + Checker.CheckContext theCtx) { + if (!theValidValues.contains(theVal)) { + theCtx.addError("not a valid value: " + theVal + " not part of " + theValidValues, null); + return false; + } + + return true; + } + + public static final void main(String[] theArgs) { + Data.CoreType dt = Enum.valueOf(Data.CoreType.class, theArgs[0]); + debugLogger.log(LogLevel.DEBUG, Data.class.getName(), "{} > {}", theArgs[1], dt.evaluator().eval(theArgs[1], null, null)); + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Facet.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Facet.java new file mode 100644 index 0000000..3dfd140 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Facet.java @@ -0,0 +1,37 @@ +package org.onap.sdc.dcae.checker; + +/* + * Oddballs: + * - requirements (a requirement does not have a type - i.e. is not based + * on a Construct) and can target a node, a capability or both .. When present + * as a facet of another Construct it is also the only one represented as a + * sequence so it will need special handling anyway. + */ +public enum Facet { + + inputs(Construct.Data), + outputs(Construct.Data), + properties(Construct.Data), + attributes(Construct.Data), + capabilities(Construct.Capability), + //requirements(Construct.Capability),//?? + artifacts(Construct.Artifact), + interfaces(Construct.Interface); + /* + Node + Relationship + they can be considered as facets of the topology template ... + */ + + private Construct construct; + + private Facet(Construct theConstruct) { + this.construct = theConstruct; + } + + public Construct construct() { + return this.construct; + } +} + + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/JSP.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/JSP.java new file mode 100644 index 0000000..797b4e2 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/JSP.java @@ -0,0 +1,624 @@ +package org.onap.sdc.dcae.checker; + +import java.io.IOException; +import java.io.File; + +import java.net.URI; + +import java.util.Set; +import java.util.Map; +import java.util.List; +import java.util.Arrays; +import java.util.Iterator; +import java.util.Collection; +import java.util.Collections; +import java.util.stream.Collectors; +import java.util.function.Consumer; +import java.util.function.BiFunction; + +import javax.script.Compilable; +import javax.script.CompiledScript; +import javax.script.Bindings; +import javax.script.ScriptContext; +import javax.script.SimpleScriptContext; +import javax.script.ScriptEngine; +import javax.script.ScriptEngineManager; +import javax.script.ScriptException; + +import jdk.nashorn.api.scripting.JSObject; +import jdk.nashorn.api.scripting.AbstractJSObject; + +import org.apache.commons.jxpath.JXPathContext; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + + +/** + * Java Script Processor + * Each script is represented by a Target and the JSP processor maintains a collection of Targets, i.e. scripts. + * A collection of targets can be used with only one JSP processor at a time (as the processor stores processor specific * compiled versions within the target). + */ +public class JSP implements Processor { + + private ScriptEngine engine; + private Collection targets; + + public JSP(String[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s, new File(s).toURI())) + .collect(Collectors.toList())); + } + + public JSP(File[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s.getName(), s.toURI())) + .collect(Collectors.toList())); + } + + public JSP(URI[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s.toString(), s)) + .collect(Collectors.toList())); + } + + /** + * The given collection is allowed to change while used by the JSP engine but access to it needs to be synchronized. + * The engine uses the target field of each Target to store a compiled version of each script. An external reset of + * this field (maybe in order to indicate some change in the Target) will caue a re-compilation of the Target. + */ + public JSP(Collection theTargets) { + this.targets = theTargets; + ScriptEngineManager engineManager = new ScriptEngineManager(); + this.engine = engineManager.getEngineByName("nashorn"); + } + + public Collection targets() { + return this.targets; + } + + /* pre-compiles all known targets + */ + protected void compile() throws ProcessorException { + synchronized (this.targets) { + for (Target t: this.targets) + compile(t); + } + } + + protected CompiledScript compile(Target theTarget) throws ProcessorException { + + CompiledScript cs = null; + + synchronized(theTarget) { + try { + cs = (CompiledScript)theTarget.getTarget(); + } + catch(ClassCastException ccx) { + throw new ProcessorException(theTarget, "Unexpected target content"); + } + + if (cs == null) { + try { + cs = ((Compilable)this.engine).compile(theTarget.open()); + theTarget.setTarget(cs); + } + catch (IOException iox) { + throw new ProcessorException(theTarget, "Failed to read script", iox); + } + catch (ScriptException sx) { + throw new ProcessorException(theTarget, "Failed to compile script", sx); + } + } + } + + return cs; + } + + public ContextBuilder process(Catalog theCatalog) { + return new ContextBuilder( + this.engine.createBindings()) + //new DelegateBindings(this.engine.getBindings(ScriptContext.ENGINE_SCOPE))) + .with("catalog", new JSCatalog(theCatalog)); + } + + /** + */ + public class ContextBuilder implements ProcessBuilder { + + private ScriptContext context; + + protected ContextBuilder(Bindings theBindings) { + this.context = new SimpleScriptContext(); + this.context.setBindings(theBindings, Process.PROCESS_SCOPE /*ScriptContext.ENGINE_SCOPE*/); + } + + public ContextBuilder withPreprocessing(BiFunction thePreprocessing) { + this.context.setAttribute("preprocessor", thePreprocessing, Process.PROCESS_SCOPE); + return this; + } + + public ContextBuilder withPostprocessing(BiFunction thePostprocessing) { + this.context.setAttribute("postprocessor", thePostprocessing, Process.PROCESS_SCOPE); + return this; + } + + public ContextBuilder with(String theName, Object theValue) { + this.context.getBindings(Process.PROCESS_SCOPE).put(theName, theValue); + return this; + } + + public ContextBuilder withOpt(String theName, Object theValue) { + if (theValue != null) + this.context.getBindings(Process.PROCESS_SCOPE).put(theName, theValue); + return this; + } + + public JSProcess process() { + return new JSProcess(this.context); + } + + } + + /** + */ + public class JSProcess implements Process { + + private Report report = new Report(); + private Iterator scripts; + private JScriptInfo scriptInfo = new JScriptInfo(); + private Target script; //script currently being evaluated + private boolean stopped = false; + private ScriptContext context; + + private JSProcess(ScriptContext theContext) { + + this.context = theContext; + this.context.getBindings(Process.PROCESS_SCOPE) + .put("stop", new Consumer() { + public void accept(String theMsg) { + JSProcess.this.stopped = true; + //log the message?? + } + }); + this.context.getBindings(Process.PROCESS_SCOPE) + .put("report", new Consumer() { + public void accept(String theMsg) { + JSProcess.this.report.add(new ProcessorException(script, theMsg)); + } + }); + this.context.getBindings(Process.PROCESS_SCOPE) + .put("reportOnce", new Consumer() { + public void accept(String theMsg) { + JSProcess.this.report.addOnce(new ProcessorException(script, theMsg)); + } + }); + this.scripts = JSP.this.targets.iterator(); + } + + protected String infoName(Target theTarget) { + String name = theTarget.getName(); + return name.substring(0, name.indexOf(".")) + "_info"; + } + + public JSP processor() { + return JSP.this; + } + + public boolean hasNext() { + return !this.stopped && this.scripts.hasNext(); + } + + protected Target next() { + if (hasNext()) + return this.script = this.scripts.next(); + else + throw new RuntimeException("Process is completed"); + } + + protected boolean runProcessor(String theName) throws ProcessorException { + BiFunction proc = (BiFunction) + this.context.getAttribute(theName, Process.PROCESS_SCOPE); + if (proc != null) { + try { + return proc.apply(this.script, this.context).booleanValue(); + } + catch (Exception x) { + throw new ProcessorException(this.script, theName + "failed", x); + } + } + + return true; + } + + public Process runNext() throws ProcessorException { + Target target = next(); + synchronized(target) { + String name = infoName(target); + try { + if (runProcessor("preprocessor")) { + compile(target).eval(this.context); + runProcessor("postprocessor"); + } + } + catch (ScriptException sx) { + throw new ProcessorException(target, "Failed to execute validation script", sx); + } + } + + return this; + } + + public Process runNextSilently() { + try { + return runNext(); + } + catch (ProcessorException px) { + this.report.add(px); + } + return this; + } + + public Report run() { + while (hasNext()) + runNextSilently(); + return this.report; + } + + public void stop() { + this.stopped = true; + } + + public Report report() { + return this.report; + } + } + + private static class JScriptInfo implements TargetInfo { + + private JSObject info; + + protected JScriptInfo() { + } + + protected JScriptInfo setInfo(JSObject theInfo) { + this.info = theInfo; + return this; + } + + public Set entryNames() { + return this.info == null ? Collections.EMPTY_SET : this.info.keySet(); + } + + public boolean hasEntry(String theName) { + return this.info == null ? false : this.info.hasMember(theName); + } + + public Object getEntry(String theName) { + return this.info == null ? null : + this.info.hasMember(theName) ? this.info.getMember(theName) : null; + } + } + + + /* Exposes the catalog information in a more Java Script friendly manner. + */ + public static class JSCatalog { + + private Catalog catalog; + + private JSCatalog(Catalog theCatalog) { + this.catalog = theCatalog; + } + + /** */ + public JSTarget[] targets() { + return + this.catalog.targets() + .stream() + .map(t -> { return new JSTarget(t); }) + .toArray(size -> new JSTarget[size]); //or toArray(JSNode[]::new) + } + + public JSTarget[] topTargets() { + return + this.catalog.topTargets() + .stream() + .map(t -> { return new JSTarget(t); }) + .toArray(size -> new JSTarget[size]); //or toArray(JSNode[]::new) + } + + /** */ + public String[] types(String theConstruct) { + Set names = + this.catalog.getConstructTypes(Enum.valueOf(Construct.class,theConstruct)).keySet(); + return names.toArray(new String[names.size()]); + } + + /** */ + public boolean isDerivedFrom(String theConstruct, String theType, String theSuperType) { + return this.catalog.isDerivedFrom(Enum.valueOf(Construct.class,theConstruct), theType, theSuperType); + } + + /** */ + public JSObject facetDefinition(String theConstruct, String theType, String theFacet, String theName) { + return new JSElement(theName, + this.catalog.getFacetDefinition( + Enum.valueOf(Construct.class, theConstruct), theType, + Enum.valueOf(Facet.class, theFacet), theName)); + } + + + /** */ +/* + public JSElement[] targetNodes(Target theTarget) { + return + this.catalog.getTargetTemplates(theTarget, Construct.Node) + .entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); //or toArray(JSNode[]::new) + } +*/ + + public class JSTarget { + + private Target tgt; + private JXPathContext jxPath; + + private JSTarget(Target theTarget) { + this.tgt = theTarget; + this.jxPath = JXPathContext.newContext(this.tgt.getTarget()); + this.jxPath.setLenient(true); + } + + public String getName() { return this.tgt.getName(); } + + public JSElement resolve(String thePath) { + Object res = jxPath.getValue(thePath); + if (res instanceof Map) { + return new JSElement(thePath, (Map)res); + } + //?? + return null; + } + + public JSElement[] getInputs() { + + Map inputs = (Map)jxPath.getValue("/topology_template/inputs"); + return (inputs == null) ? + new JSElement[0] + : inputs.entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); + } + +// public JSElement[] getOutputs() { +// } + + public JSElement getMetadata() { + return new JSElement("metadata", (Map)jxPath.getValue("/metadata")); + } + + public JSElement[] getNodes() { + return + JSCatalog.this.catalog.getTargetTemplates(this.tgt, Construct.Node) + .entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); //or toArray(JSElement[]::new) + } + +// public JSElement[] getPolicies() { +// } + + } + + + /* + */ + public class JSElement extends AbstractJSObject { + + + private String name; + private Map def; + + private JSElement(String theName, Object theDef) { + this.name = theName; + this.def = theDef == null ? Collections.emptyMap() + : (theDef instanceof Map) ? (Map)theDef + : Collections.singletonMap("value",theDef); + } + + public String getName() { return this.name; } + + public boolean hasMember(String theMember) { + return this.def.containsKey(theMember); + } + + public Object getMember(final String theMember) { + Object val = this.def.get(theMember); + if (val != null) { + if (val instanceof Map) { + return new JSElement(theMember, val); + /* + return ((Map)obj).entrySet() + .stream() + .map((Map.Entry e) -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); + */ + } + + if (val instanceof List) { + //a property value can be a list of: primitive types or maps (for a user defined type) + //requirements are exposed as a list of maps + List lval = (List)val; + if (lval.get(0) instanceof Map) { + return lval + .stream() + .map((e) -> new JSElement(theMember, e)) + .toArray(size -> new JSElement[size]); + + /* + return val + .stream() + .map((e) -> { + Map.Entry re = ((Map)e).entrySet().iterator().next(); + return new JSElement(re.getKey(), re.getValue()); + }) + .toArray(size -> new JSElement[size]); + */ + } + } + + return val; + } + else { + if ("name".equals(theMember)) + return this.name; + if ("toString".equals(theMember)) + return _toString; + if ("hasOwnProperty".equals(theMember)) + return _hasOwnProperty; + return super.getMember(theMember); + } + } + /* TODO: we do not expose 'name' in here */ + public Set keySet() { + return this.def.keySet(); + } + + } + + + static final JSObject _toString = + new TracerJSObject("_toString") { + public Object call(Object thiz, Object... args) { + return ((JSElement)thiz).def.toString(); + } + + public boolean isFunction() { return true; } + }; + + static final JSObject _hasOwnProperty = + new TracerJSObject("_hasOwnProperty") { + public Object call(Object thiz, Object... args) { + return ((JSElement)thiz).def.containsKey(args[0]); + } + + public boolean isFunction() { return true; } + }; + + }//JSCatalog + + + + private static class TracerJSObject extends AbstractJSObject { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private String mark; + + TracerJSObject(String theMark) { + this.mark = theMark; + } + + public Object call(Object thiz, Object... args) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:call", this.mark); + return super.call(thiz, args); + } + + public Object newObject(Object... args) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:newObject", this.mark); + return super.newObject(args); + } + + public Object eval(String s) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:eval", this.mark); + return super.eval(s); + } + + public Object getMember(String name) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getMember", this.mark); + return super.getMember(name); + } + + public Object getSlot(int index) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getSlot", this.mark); + return super.getSlot(index); + } + + public boolean hasMember(String name) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:hasMember", this.mark); + return super.hasMember(name); + } + + public boolean hasSlot(int slot) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:hasSlot", this.mark); + return super.hasSlot(slot); + } + + public void removeMember(String name) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:removeMember", this.mark); + super.removeMember(name); + } + + public void setMember(String name, Object value) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:setMember", this.mark); + super.setMember(name,value); + } + + public void setSlot(int index, Object value) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:setSlot", this.mark); + super.setSlot(index,value); + } + + public Set keySet() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:JSObject:keySet", this.mark); + return super.keySet(); + } + + public Collection values() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:values", this.mark); + return super.values(); + } + + public boolean isInstance(Object instance) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isInstance", this.mark); + return super.isInstance(instance); + } + + public boolean isInstanceOf(Object clazz) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isInstanceOf", this.mark); + return super.isInstance(clazz); + } + + public String getClassName() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getClassName", this.mark); + return super.getClassName(); + } + + public boolean isFunction() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isFunction", this.mark); + return super.isFunction(); + } + + public boolean isStrictFunction() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isStrictFunction", this.mark); + return super.isStrictFunction(); + } + + public boolean isArray() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isArray", this.mark); + return super.isArray(); + } + + public Object getDefaultValue(Class hint) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getDefaultValue({})", this.mark, hint); + return super.getDefaultValue(hint); + } + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Process.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Process.java new file mode 100644 index 0000000..0f529af --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Process.java @@ -0,0 +1,29 @@ +package org.onap.sdc.dcae.checker; + +/** + * + */ +public interface Process { + + public static final int PROCESS_SCOPE = 100; + + /** + * the processor running this process + */ + public T processor(); + + /* */ + public boolean hasNext(); + + /* */ + public Process runNext() throws ProcessorException; + + /* execute all steps to completion + */ + public Report run(); + + /* execution report + */ + public Report report(); + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessBuilder.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessBuilder.java new file mode 100644 index 0000000..8295055 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessBuilder.java @@ -0,0 +1,24 @@ +package org.onap.sdc.dcae.checker; + + +/** + * Just in case you might want to do something with a template (set) once it was checked + */ +public interface ProcessBuilder { + + /* */ + public ProcessBuilder with(String theName, Object theValue); + + /* */ + public ProcessBuilder withOpt(String theName, Object theValue); + + /* */ + public Process process(); + + /* */ + default public Report run() { + return process() + .run(); + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Processor.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Processor.java new file mode 100644 index 0000000..7f29d23 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Processor.java @@ -0,0 +1,11 @@ +package org.onap.sdc.dcae.checker; + + +/** + * Just in case you might want to do something with a template (set) once it was checked + */ +public interface Processor> { + + /* */ + public ProcessBuilder process(Catalog theCatalog); +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessorException.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessorException.java new file mode 100644 index 0000000..d4c5571 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessorException.java @@ -0,0 +1,28 @@ +package org.onap.sdc.dcae.checker; + + +/** + */ +public class ProcessorException extends CheckerException { + + private Target target; + + public ProcessorException(Target theTarget, String theMsg, Throwable theCause) { + super(theMsg, theCause); + this.target = theTarget; + } + + public ProcessorException(Target theTarget, String theMsg) { + super(theMsg); + this.target = theTarget; + } + + public Target getTarget() { + return this.target; + } + + @Override + public String getMessage() { + return this.target + ":" + super.getMessage() + (getCause() == null ? "" : ("(" + getCause() + ")")); + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Report.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Report.java new file mode 100644 index 0000000..0f1b7c3 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Report.java @@ -0,0 +1,102 @@ +package org.onap.sdc.dcae.checker; + +import java.io.IOException; + +import java.util.LinkedList; +import java.util.Collections; + +import org.yaml.snakeyaml.error.MarkedYAMLException; +import kwalify.ValidationException; + +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; + +/** + * Represents a collection of errors that occured during one of the stages + * of the checker: yaml parsing, yaml validation (tosca syntax), tosca checking + */ +/* + * This needs some re-thinking: while it is useful to have all original errors introducing + * the custom json conversion (just to help the service) is not great either. + * I was torn between this approach or creating a custom deserializer and object mapper (which + * would have kept all the customized serialization in the service but then the error analysis + * would be duplicated there too ..). + */ +@JsonSerialize(contentUsing=org.onap.sdc.dcae.checker.Report.ReportEntrySerializer.class) +public class Report extends LinkedList { + + public Report() { + } + + public Report(T[] theErrors) { + Collections.addAll(this, theErrors); + } + + public boolean hasErrors() { + return !this.isEmpty(); + } + + public boolean addOnce(T theError) { + for (T e: this) { + if (e.getMessage().equals(theError.getMessage())) + return false; + } + return add(theError); + } + + public String toString() { + StringBuilder sb = new StringBuilder(this.size() + " errors"); + for (Throwable x: this) { + sb.append("\n") + .append("[") + .append(location(x)) + .append("] ") + .append(x.getMessage()); + if (x.getCause() != null) { + sb.append("\n\tCaused by:\n") + .append(x.getCause()); + } + } + sb.append("\n"); + return sb.toString(); + } + + private static String location(Throwable theError) { + if (theError instanceof MarkedYAMLException) { + MarkedYAMLException mx = (MarkedYAMLException)theError; + return "line " + mx.getProblemMark().getLine() + ", column " + mx.getProblemMark().getColumn(); + } + if (theError instanceof ValidationException) { + ValidationException vx = (ValidationException)theError; + return vx.getPath(); + } + if (theError instanceof TargetError) { + TargetError tx = (TargetError)theError; + return tx.getLocation(); + } + return "unknown"; + } + + + public static class ReportEntrySerializer extends StdSerializer { + + public ReportEntrySerializer() { + super(Throwable.class); + } + + @Override + public void serialize(Throwable theError, JsonGenerator theGenerator, SerializerProvider theProvider) + throws IOException, JsonProcessingException { + theGenerator.writeStartObject(); + theGenerator.writeStringField("location", location(theError)); + theGenerator.writeStringField("message", theError.getMessage()); + if (theError.getCause() != null) + theGenerator.writeStringField("cause", theError.getCause().toString()); + theGenerator.writeEndObject(); + } + } +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Repository.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Repository.java new file mode 100644 index 0000000..9cb853b --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Repository.java @@ -0,0 +1,50 @@ +package org.onap.sdc.dcae.checker; + + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; + +import java.net.URI; +import java.net.URL; +import java.net.MalformedURLException; + +import java.util.Map; + +/** + * Represents a 'container' of (yaml) TOSCA documents + */ +public abstract class Repository { + + protected OnapLoggerError errLogger = OnapLoggerError.getInstance(); + protected OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private String name, + description; + protected URI rootURI; + protected Map credential; //TOSCA type tosca.datatype.Credential + + public Repository(String theName, URI theRoot) { + this.name = theName; + this.rootURI = theRoot; + } + + public String getName() { + return this.name; + } + + public URI getRoot() { + return this.rootURI; + } + + /** optional */ + public abstract Iterable targets(); + + /** */ + public abstract Target resolve(URI theURI); + + @Override + public String toString() { + return "Repository " + this.name + " at " + this.rootURI; + } +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java new file mode 100644 index 0000000..b630564 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java @@ -0,0 +1,80 @@ +package org.onap.sdc.dcae.checker; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.io.BufferedReader; +import java.io.IOException; + +import java.net.URI; +import java.net.URL; +import java.net.MalformedURLException; + +/** + * Represents a yaml document to be parsed/validated/checked + */ +public class Target { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + + private String name; //maintained mainly for logging + private URI location; + private Object target; //this is the parsed form of the target + + private Report report = new Report(); //collects the errors related to this target + + public Target(String theName, URI theLocation) { + this.name = theName; + this.location = theLocation; + } + + public String getName() { + return this.name; + } + + public URI getLocation() { + return this.location; + } + + public Report getReport() { + return this.report; + } + + public void report(Throwable theError) { + this.report.add(theError); + } + + public void report(String theErrMsg) { + this.report.add(new Exception(theErrMsg)); + } + + public void setTarget(Object theTarget) { + this.target = theTarget; + } + + public Object getTarget() { + return this.target; + } + + /* + * @return a reader for the source or null if failed + */ + public Reader open() throws IOException { + + return new BufferedReader( + new InputStreamReader( + this.location.toURL().openStream())); + } + + public String toString() { + //return String.format("Target %s (%.20s ...)", this.location, this.target == null ? "" : this.target.toString()); + return String.format("Target %s at %s", this.name, this.location); + + } +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetError.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetError.java new file mode 100644 index 0000000..0764a56 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetError.java @@ -0,0 +1,43 @@ +package org.onap.sdc.dcae.checker; + + +/** + * A target error represents an error in target the resource being checked. + * We only represent it as a Throwable because the libraries that perform parsing and syntax validation + * represent their errors as such .. + */ +public class TargetError extends Throwable { + + /* + public static enum Level { + error, + warning + } + */ + + private String location; //we might need an more detailed representation + //here: it could be a YAML document jpath or + //document location (line). + private String target; + + public TargetError(String theTarget, String theLocation, String theMessage, Throwable theCause) { + super(theMessage, theCause); + this.target = theTarget; + this.location = theLocation; + } + + public TargetError(String theTarget, String theLocation, String theMessage) { + this(theTarget, theLocation, theMessage, null); + } + + public String getTarget() { + return this.target; + } + + public String getLocation() { + return this.location; + } + + +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetInfo.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetInfo.java new file mode 100644 index 0000000..480b6a8 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetInfo.java @@ -0,0 +1,20 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Set; + + +/** + * Exposes target properties. How they are obtained/calculated not of importance here. + */ +public interface TargetInfo { + + /** */ + public Set entryNames(); + + /** */ + public boolean hasEntry(String theName); + + /** */ + public Object getEntry(String theName); + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetLocator.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetLocator.java new file mode 100644 index 0000000..9b82f16 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetLocator.java @@ -0,0 +1,20 @@ +package org.onap.sdc.dcae.checker; + +import java.net.URI; + + +public interface TargetLocator { + + /** */ + public boolean addSearchPath(URI theURI); + + /** */ + public boolean addSearchPath(String thePath); + + /** */ + public Iterable searchPaths(); + + /** */ + public Target resolve(String theName); + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Workflows.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Workflows.java new file mode 100644 index 0000000..88eb192 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Workflows.java @@ -0,0 +1,120 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Map; + +import org.onap.sdc.dcae.checker.annotations.Checks; + +import java.util.List; +import java.util.Iterator; + +@Checks +public class Workflows { + + @Checks(path="/topology_template/workflows") + public void check_workflows(Map theDefinition, Checker.CheckContext theContext) { + + theContext.enter("workflows"); + + try { + if(!theContext.checker().checkDefinition("workflows", theDefinition, theContext)) + return; + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_workflow_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + + public void check_workflow_definition(String theName, Map theDef, Checker.CheckContext theContext) { + + theContext.enter("workflow", Construct.Workflow); + + if (theDef.containsKey("inputs")) { + theContext + .checker() + .checkProperties((Map)theDef.get("inputs"), theContext); + } + + if (theDef.containsKey("preconditions")) { + check_workflow_preconditions_definition((List)theDef.get("preconditions"), theContext); + } + + if (theDef.containsKey("steps")) { + check_workflow_steps_definition((Map)theDef.get("steps"), theContext); + } + + theContext.exit(); + } + + + public void check_workflow_steps_definition(Map theSteps, Checker.CheckContext theContext) { + + theContext.enter("steps"); + + try { + for (Iterator> i = theSteps.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_workflow_step_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + + } + + public void check_workflow_step_definition(String theName, Map theDef, Checker.CheckContext theContext) { + + theContext.enter(theName); + try { + //requireed entry, must be a node or group template + String target = (String)theDef.get("target"); + Construct targetConstruct = null; + + if (theContext.catalog().hasTemplate(theContext.target(), Construct.Group, target)) { + targetConstruct = Construct.Group; + } + else if (theContext.catalog().hasTemplate(theContext.target(), Construct.Node, target)) { + targetConstruct = Construct.Node; + } + else { + theContext.addError("The 'target' entry must contain a reference to a node template or group template, '" + target + "' is none of those", null); + } + + String targetRelationship = (String)theDef.get("target_relationship"); + if (targetConstruct.equals(Construct.Node)) { + if (targetRelationship != null) { + //must be a requirement of the target Node + } + } + + + } + finally { + theContext.exit(); + } + } + + public void check_workflow_preconditions_definition(List thePreconditions, Checker.CheckContext theContext) { + + theContext.enter("preconditions"); + + try { + for (Map precondition: thePreconditions) { + check_workflow_precondition_definition(precondition, theContext); + } + } + finally { + theContext.exit(); + } + } + + public void check_workflow_precondition_definition(Map theDef, Checker.CheckContext theContext) { + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/.Validates.java.swp b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/.Validates.java.swp new file mode 100644 index 0000000..dae35da Binary files /dev/null and b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/.Validates.java.swp differ diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Catalogs.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Catalogs.java new file mode 100644 index 0000000..8dbe275 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Catalogs.java @@ -0,0 +1,14 @@ +package org.onap.sdc.dcae.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.METHOD}) +public @interface Catalogs { + String path() default "/"; +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Checks.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Checks.java new file mode 100644 index 0000000..96349d7 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Checks.java @@ -0,0 +1,19 @@ +package org.onap.sdc.dcae.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +/* The iffy part: as a type annotaton we do not need a path or a version specification, + as a method annotation it is mandatory (cannot be the default) + We could forsee that a version indcation at type level would cover all check handler within the type + */ +public @interface Checks { + String path() default "/"; + String[] version() default { "1.0", "1.0.0", "1.1", "1.1.0" }; +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Validates.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Validates.java new file mode 100644 index 0000000..29e080d --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Validates.java @@ -0,0 +1,15 @@ +package org.onap.sdc.dcae.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +public @interface Validates { + String rule() default "/"; + String[] timing() default { "post" }; +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/package-info.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/package-info.java new file mode 100644 index 0000000..da2c5ba --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/package-info.java @@ -0,0 +1,101 @@ +/** + * The checker provides an api/tool for the verification of TOSCA yaml files + * as specified in the OASIS specification found at: + * http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/TOSCA-Simple-Profile-YAML-v1.0.pdf + * + * It provides a three stage processing of a tosca yaml file: + * - yaml verification: is the document a valid yaml document as per yaml.org/spec. In particular we're using the snakeyaml library for parsing the yaml document to a nested structure of java objects. + * - tosca yaml grammar validation: is the document a valid tosca yaml + * document, as per the the TOSCA simple profile for yaml. We use a modified + * version of the kwalify library for this task. The grammar for TOSCA yaml + * is itself a yaml document (found in the package in + * resources/tosca-schema.yaml). There are certain limitations on how far + * this grammar can go. + * - consistency verification: we check the type hierarchies for all TOSCA + * constructs (data types, capability types, node types, etc), the definition + * of all facets of a construct (properties, attributes, etc) across the type + * hierachies, the conformity of construct templates (node templates, ..) with + * their types, data valuations(input assignements, constants, function calls). + * + * Each stage is blocking, i.e. a stage will be performed only if the previous + * one completed successfully. + * + * The verification is done across all the imported documents. The common TOSCA + * types are by default made available to all documents being processed (the + * specification is in resources/tosca-common-types.yaml). Networking related + * types can be made available by importing resources/tosca-network-types.yaml + * while the tosca nfv profile definitions are available at + * resources/tosca-nfv-types.yaml. + * + * Besides snakeyaml and kwalify this package also has dependencies on Google's + * guava library and Apache's jxpath. + * + * The three java interfaces exposed by the package are the Checker, Target + * and Report. A Target represents a document processed by the Checker. While + * the Checker starts with a top Target, through import statements it can end up + * processing a number of Targets. The results of processing a Target are made + * available through a Report which currently is nothing more that a list of + * recorded errors. + * + *
+ * {@code + * Checker checker = new Checker(); + * checker.check("tests/example.yaml"); + * + * for (Target t: checker.targets()) + * System.out.println(t.getLocation() + "\n" + t.getReport()); + * } + *
+ * + * The errors are recorded as instances of Exception, mostly due to the fact + * snakeyaml and kwalify do report errors as exceptions. As such there are 3 + * basic types of errros to be expected in a report: YAMLException (from + * snakeyaml, related to parsing), ValidationException (from kwalify, tosca + * grammar validation), TargetException (from the checker itself). This might + * change as we're looking to unify the way errors are reported. A Report + * object has a user friendly toString function. + * + * A CheckerException thrown during the checking process is an indication of a + * malfunction in the checker itself. + * + * The checker handles targets as URIs. The resolution of a target consists in + * going from a string representing some path/uri to the absolute URI. URIs can + * be of any java recognizable schema: file, http, etc. A TargetResolver (not + * currently exposed through the API) attempts in order: + * - if the String is an absolute URI, keep it as such + * - if the String is a relative URI attempt to resolve it as relative to + * know search paths (pre-configured absolute URIs: current directory and the + * root of the main target's URI). The option of adding custom search paths will + * be added. + * - attempt to resolve as a classpath resource (a jar:file: URI) + * + * At this time there are no options for the checker (please provide + * requirements to be considered). + * + * + * + * Other: + * - the checker performs during tosca grammar validation a 'normalization' + * process as the tosca yaml profile allows for short forms in the + * specification of a number of its constructs (see spec). The checker changes + * the actual structure of the parsed document such that only normalized + * (complete) forms of specification are present before the checking phase. + * (the kwalify library was extended in order to be able to specify these + * short forms in the grammar itself and process/tolerate them at validation + * time). + * + * - the checker contains an internal catalog where the types and templates + * of different constructs are aggregated and indexed across all targets in + * order to facilitate the checking phase. Catalogs can be 'linked' and the + * resolution process delegated (the checker maintains a basic catalog with + * the core and common types and there is always a second catalog maintaining + * the information related to the current targets). + * The catalog is currently not exposed by the library. + * + * - imports processing: the import statements present in a target are first + * 'detected' during tosca yaml grammar validation phase. At that stage all + * imports are (recursively) parsed and validated (first 2 phases). Checking + * off all imports (recursively) is done during stage 3. + * + */ +package org.onap.sdc.dcae.checker; \ No newline at end of file diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca-common-types.yaml b/dcaedt_validator/checker/src/main/resources/tosca/tosca-common-types.yaml new file mode 100644 index 0000000..c26c6e8 --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca-common-types.yaml @@ -0,0 +1,665 @@ +tosca_definitions_version: tosca_simple_yaml_1_1_0 +description: > + TOSCA simple profile common types. To be included by default in all templates. + + +data_types: + +#see 5.3.1 + tosca.datatypes.Root: + description: The TOSCA root Data Type all other TOSCA base Data Types derive from + +#from 5.3.2 + tosca.datatypes.Credential: + derived_from: tosca.datatypes.Root + properties: + protocol: + type: string + required: false + token_type: + type: string + default: password + token: + type: string + keys: + type: map + required: false + entry_schema: string + user: + type: string + required: false + +#from 5.3.3 + tosca.datatypes.TimeInterval: + derived_from: tosca.datatypes.Root + properties: + start_time: + type: timestamp + required: true + end_time: + type: timestamp + required: true + +#from 5.3.4 + tosca.datatypes.network.NetworkInfo: + derived_from: tosca.datatypes.Root + properties: + network_name: + type: string + network_id: + type: string + addresses: + type: list + entry_schema: string + +#from 5.3.5 + tosca.datatypes.network.PortInfo: + derived_from: tosca.datatypes.Root + properties: + port_name: + type: string + port_id: + type: string + network_id: + type: string + mac_address: + type: string + addresses: + type: list + entry_schema: string + +#from 5.3.6 + tosca.datatypes.network.PortDef: + derived_from: integer + constraints: + - in_range: [ 1, 65535 ] + +#from 5.3.7 + tosca.datatypes.network.PortSpec: + derived_from: tosca.datatypes.Root + properties: + protocol: + type: string + required: true + default: tcp + constraints: + - valid_values: [ udp, tcp, igmp ] + target: +#I think the intent was (same for source): +#type: tosca.datatypes.network.PortDef + type: integer + entry_schema: tosca.datatypes.network.PortDef + target_range: + type: range + constraints: + - in_range: [ 1, 65535 ] + source: + type: integer + entry_schema: tosca.datatypes.network.PortDef + source_range: + type: range + constraints: + - in_range: [ 1, 65535 ] + +capability_types: + +#from 5.5.1 + tosca.capabilities.Root: + description: The TOSCA root Capability Type all other TOSCA base Capability Types derive from + +#from 5.5.2 + tosca.capabilities.Node: + derived_from: tosca.capabilities.Root + +#from 5.5.3 + tosca.capabilities.Compute: + derived_from: tosca.capabilities.Root + properties: + name: + type: string + required: false + num_cpus: + type: integer + required: false + constraints: + - greater_or_equal: 1 + cpu_frequency: + type: scalar-unit.frequency + required: false + constraints: + - greater_or_equal: 0.1 GHz + disk_size: + type: scalar-unit.size + required: false + constraints: + - greater_or_equal: 0 MB + mem_size: + type: scalar-unit.size + required: false + constraints: + - greater_or_equal: 0 MB + +#from 5.5.4 + tosca.capabilities.Network: + derived_from: tosca.capabilities.Root + properties: + name: + type: string + required: false + +#from 5.5.5 + tosca.capabilities.Storage: + derived_from: tosca.capabilities.Root + properties: + name: + type: string + required: false + +#from 5.5.6 + tosca.capabilities.compute.Container: + derived_from: tosca.capabilities.Compute + +#from 5.5.7 + tosca.capabilities.Endpoint: + derived_from: tosca.capabilities.Root + properties: + protocol: + type: string + default: tcp + port: + type: tosca.datatypes.network.PortDef + required: false + secure: + type: boolean + default: false + url_path: + type: string + required: false + port_name: + type: string + required: false + network_name: + type: string + required: false + default: PRIVATE + initiator: + type: string + default: source + constraints: + - valid_values: [ source, target, peer ] + ports: + type: map + required: false + constraints: + - min_length: 1 + entry_schema: tosca.datatypes.network.PortSpec + attributes: + ip_address: + type: string + +#from 5.5.8 + tosca.capabilities.Endpoint.Public: + derived_from: tosca.capabilities.Endpoint + properties: + # Change the default network_name to use the first public network found + network_name: + type: string + default: PUBLIC + floating: + description: > + indicates that the public address should be allocated from a pool of floating IPs that are associated with the network. + type: boolean + default: false + status: experimental + dns_name: + description: The optional name to register with DNS + type: string + required: false + status: experimental + +#from 5.5.9 + tosca.capabilities.Endpoint.Admin: + derived_from: tosca.capabilities.Endpoint + # Change Endpoint secure indicator to true from its default of false + properties: + secure: + type: boolean + default: true + constraints: + - equal: true + +#from 5.5.10 + tosca.capabilities.Endpoint.Database: + derived_from: tosca.capabilities.Endpoint + +#from 5.5.11 + tosca.capabilities.Attachment: + derived_from: tosca.capabilities.Root + +#from 5.5.12 + tosca.capabilities.OperatingSystem: + derived_from: tosca.capabilities.Root + properties: + architecture: + type: string + required: false + type: + type: string + required: false + distribution: + type: string + required: false + version: + type: version + required: false + +#from 5.5.13 + tosca.capabilities.Scalable: + derived_from: tosca.capabilities.Root + properties: + min_instances: + type: integer + default: 1 + max_instances: + type: integer + default: 1 + default_instances: + type: integer + +#from C.3.11 + tosca.capabilities.network.Bindable: + derived_from: tosca.capabilities.Node + + +relationship_types: + +#from 5.7.1 + tosca.relationships.Root: + description: The TOSCA root Relationship Type all other TOSCA base Relationship Types derive from + attributes: + tosca_id: + type: string + tosca_name: + type: string + interfaces: + Configure: + type: tosca.interfaces.relationship.Configure + +#from 5.7.2 + tosca.relationships.DependsOn: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Node ] + +#from 5.7.3 + tosca.relationships.HostedOn: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.compute.Container ] + +#from 5.7.4 + tosca.relationships.ConnectsTo: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Endpoint ] + properties: + credential: + type: tosca.datatypes.Credential + required: false + +#from 5.7.5 + tosca.relationships.AttachesTo: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Attachment ] + properties: + location: + type: string + constraints: + - min_length: 1 + device: + type: string + required: false + +#from 5.7.6 + tosca.relationships.RoutesTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.Endpoint ] + + +interface_types: + +#from 5.8.3 + tosca.interfaces.Root: +# derived_from: tosca.entity.Root + description: The TOSCA root Interface Type all other TOSCA base Interface Types derive from + +#from 5.8.4 + tosca.interfaces.node.lifecycle.Standard: + derived_from: tosca.interfaces.Root + create: + description: Standard lifecycle create operation. + configure: + description: Standard lifecycle configure operation. + start: + description: Standard lifecycle start operation. + stop: + description: Standard lifecycle stop operation. + delete: + description: Standard lifecycle delete operation. + +#from 5.8.5 + tosca.interfaces.relationship.Configure: + derived_from: tosca.interfaces.Root + pre_configure_source: + description: Operation to pre-configure the source endpoint. + pre_configure_target: + description: Operation to pre-configure the target endpoint. + post_configure_source: + description: Operation to post-configure the source endpoint. + post_configure_target: + description: Operation to post-configure the target endpoint. + add_target: + description: Operation to notify the source node of a target node being added via a relationship. + add_source: + description: Operation to notify the target node of a source node which is now available via a relationship. + target_changed: + description: Operation to notify source some property or attribute of the target changed + remove_target: + description: Operation to remove a target node. + + +node_types: + +#from 5.9.1 + tosca.nodes.Root: + description: The TOSCA Node Type all other TOSCA base Node Types derive from + attributes: + tosca_id: + type: string + tosca_name: + type: string + state: + type: string + capabilities: + feature: + type: tosca.capabilities.Node + requirements: + - dependency: + capability: tosca.capabilities.Node + node: tosca.nodes.Root + relationship: tosca.relationships.DependsOn + occurrences: [ 0, UNBOUNDED ] + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + +#from 5.9.2 + tosca.nodes.Compute: + derived_from: tosca.nodes.Root + attributes: + private_address: + type: string + public_address: + type: string + networks: + type: map +#entry schema for attribute has a string value as per A.5.9 .. +#the standard document defines it as a map similar to the property definition .. + entry_schema: tosca.datatypes.network.NetworkInfo + ports: + type: map + entry_schema: tosca.datatypes.network.PortInfo + requirements: + - local_storage: + capability: tosca.capabilities.Attachment + node: tosca.nodes.BlockStorage + relationship: tosca.relationships.AttachesTo + occurrences: [0, UNBOUNDED] + capabilities: + host: + type: tosca.capabilities.compute.Container + valid_source_types: [tosca.nodes.SoftwareComponent] + endpoint: + type: tosca.capabilities.Endpoint.Admin + os: + type: tosca.capabilities.OperatingSystem + scalable: + type: tosca.capabilities.Scalable + binding: + type: tosca.capabilities.network.Bindable + +#from 5.9.3 + tosca.nodes.SoftwareComponent: + derived_from: tosca.nodes.Root + properties: + # domain-specific software component version + component_version: + type: version + required: false + admin_credential: + type: tosca.datatypes.Credential + required: false + requirements: + - host: + capability: tosca.capabilities.compute.Container + node: tosca.nodes.Compute + relationship: tosca.relationships.HostedOn + +#from 5.9.4 + tosca.nodes.WebServer: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + # Private, layer 4 endpoints + data_endpoint: tosca.capabilities.Endpoint + admin_endpoint: tosca.capabilities.Endpoint.Admin + host: + type: tosca.capabilities.compute.Container + valid_source_types: [ tosca.nodes.WebApplication ] + +#from 5.9.5 + tosca.nodes.WebApplication: + derived_from: tosca.nodes.Root + properties: + context_root: + type: string + capabilities: + app_endpoint: + type: tosca.capabilities.Endpoint + requirements: + - host: + capability: tosca.capabilities.compute.Container + node: tosca.nodes.WebServer + relationship: tosca.relationships.HostedOn + +#from 5.9.6 + tosca.nodes.DBMS: + derived_from: tosca.nodes.SoftwareComponent + properties: + root_password: + type: string + required: false + description: the optional root password for the DBMS service + port: + type: integer + required: false + description: the port the DBMS service will listen to for data and requests + capabilities: + host: + type: tosca.capabilities.compute.Container + valid_source_types: [ tosca.nodes.Database ] + +#from 5.9.7 + tosca.nodes.Database: + derived_from: tosca.nodes.Root + properties: + name: + type: string + description: the logical name of the database + port: + type: integer + description: the port the underlying database service will listen to for data + user: + type: string + description: the optional user account name for DB administration + required: false + password: + type: string + description: the optional password for the DB user account + required: false + requirements: + - host: + capability: tosca.capabilities.compute.Container + node: tosca.nodes.DBMS + relationship: tosca.relationships.HostedOn + capabilities: + database_endpoint: + type: tosca.capabilities.Endpoint.Database + +#from 5.9.8 + tosca.nodes.ObjectStorage: + derived_from: tosca.nodes.Root + properties: + name: + type: string + size: + type: scalar-unit.size + constraints: + - greater_or_equal: 0 GB + maxsize: + type: scalar-unit.size + constraints: + - greater_or_equal: 0 GB + capabilities: + storage_endpoint: + type: tosca.capabilities.Endpoint + +#from 5.9.9 + tosca.nodes.BlockStorage: + derived_from: tosca.nodes.Root + properties: + size: + type: scalar-unit.size + constraints: + - greater_or_equal: 1 MB + volume_id: + type: string + required: false + snapshot_id: + type: string + required: false + capabilities: + attachment: + type: tosca.capabilities.Attachment + +#from 5.9.10 + tosca.nodes.Container.Runtime: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + host: + type: tosca.capabilities.compute.Container + scalable: + type: tosca.capabilities.Scalable + +#from 5.9.11 + tosca.nodes.Container.Application: + derived_from: tosca.nodes.Root + requirements: + - host: + capability: tosca.capabilities.compute.Container + # node: tosca.nodes.Container !invalid node reference! + relationship: tosca.relationships.HostedOn + +#from 5.9.12 + tosca.nodes.LoadBalancer: + derived_from: tosca.nodes.Root + properties: + # TBD + algorithm: + type: string + required: false + status: experimental + capabilities: + client: + type: tosca.capabilities.Endpoint.Public + occurrences: [0, UNBOUNDED] + description: the Floating (IP) clients on the public network can connect to + requirements: + - application: + capability: tosca.capabilities.Endpoint + relationship: tosca.relationships.RoutesTo + occurrences: [0, UNBOUNDED] +# correction by jora: requirement defintion does not allow for a description entry +# description: Connection to one or more load balanced applications + +artifact_types: + +#from 5.4.1 + tosca.artifacts.Root: + description: The TOSCA Artifact Type all other TOSCA Artifact Types derive from + +#from 5.4.2 + tosca.artifacts.File: + derived_from: tosca.artifacts.Root + +#from 5.4.3 + tosca.artifacts.Deployment: + derived_from: tosca.artifacts.Root + description: TOSCA base type for deployment artifacts + +#from 5.4.3.3 + tosca.artifacts.Deployment.Image: + derived_from: tosca.artifacts.Deployment + +#from 5.4.3.4 + tosca.artifacts.Deployment.Image.VM: + derived_from: tosca.artifacts.Deployment.Image + description: Virtual Machine (VM) Image + +#from 5.4.4 + tosca.artifacts.Implementation: + derived_from: tosca.artifacts.Root + description: TOSCA base type for implementation artifacts + +#from 5.4.4.3 + tosca.artifacts.Implementation.Bash: + derived_from: tosca.artifacts.Implementation + description: Script artifact for the Unix Bash shell + mime_type: application/x-sh + file_ext: [ sh ] + +#from 5.4.4.4 + tosca.artifacts.Implementation.Python: + derived_from: tosca.artifacts.Implementation + description: Artifact for the interpreted Python language + mime_type: application/x-python + file_ext: [ py ] + + +#from 5.9 +group_types: + + tosca.groups.Root: + description: The TOSCA Group Type all other TOSCA Group Types derive from + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + +#from 5.10 +policy_types: + + tosca.policies.Root: + description: The TOSCA Policy Type all other TOSCA Policy Types derive from + + tosca.policies.Placement: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern placement of TOSCA nodes or groups of nodes. + + tosca.policies.Scaling: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern scaling of TOSCA nodes or groups of nodes. + + tosca.policies.Update: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern update of TOSCA nodes or groups of nodes. + + tosca.policies.Performance: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to declare performance requirements for TOSCA nodes or groups of nodes. + diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca-examples-types.yaml b/dcaedt_validator/checker/src/main/resources/tosca/tosca-examples-types.yaml new file mode 100644 index 0000000..5eee538 --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca-examples-types.yaml @@ -0,0 +1,117 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +description: > + Non-normative type definitions, as per section 8 of TOSCA simple profile. + + +artifact_types: + + tosca.artifacts.Deployment.Image.Container.Docker: + derived_from: tosca.artifacts.Deployment.Image + description: Docker Container Image + + tosca.artifacts.Deployment.Image.VM.ISO: + derived_from: tosca.artifacts.Deployment.Image.VM + description: Virtual Machine (VM) image in ISO disk format + mime_type: application/octet-stream + file_ext: [ iso ] + + tosca.artifacts.Deployment.Image.VM.QCOW2: + derived_from: tosca.artifacts.Deployment.Image.VM + description: Virtual Machine (VM) image in QCOW v2 standard disk format + mime_type: application/octet-stream + file_ext: [ qcow2 ] + + +capability_types: + + tosca.capabilities.Container.Docker: + derived_from: tosca.capabilities.Container + properties: + version: + type: list + required: false + entry_schema: version + publish_all: + type: boolean + default: false + required: false + publish_ports: + type: list + entry_schema: tosca.datatypes.network.PortSpec + required: false + expose_ports: + type: list + entry_schema: tosca.datatypes.network.PortSpec + required: false + volumes: + type: list + entry_schema: string + required: false + + +node_types: + + tosca.nodes.Database.MySQL: + derived_from: tosca.nodes.Database + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.DBMS.MySQL + + tosca.nodes.DBMS.MySQL: + derived_from: tosca.nodes.DBMS + properties: + port: + type: integer + description: reflect the default MySQL server port + default: 3306 + root_password: + type: string + # MySQL requires a root_password for configuration + # Override parent DBMS definition to make this property required + required: true + capabilities: + # Further constrain the ‘host’ capability to only allow MySQL databases + host: + type: tosca.capabilities.Container + valid_source_types: [ tosca.nodes.Database.MySQL ] + + tosca.nodes.WebServer.Apache: + derived_from: tosca.nodes.WebServer + + tosca.nodes.WebApplication.WordPress: + derived_from: tosca.nodes.WebApplication + properties: + admin_user: + type: string + admin_password: + type: string + db_host: + type: string + requirements: + - database_endpoint: + capability: tosca.capabilities.Endpoint.Database + node: tosca.nodes.Database + relationship: tosca.relationships.ConnectsTo + + tosca.nodes.WebServer.Nodejs: + derived_from: tosca.nodes.WebServer + properties: + # Property to supply the desired implementation in the Github repository + github_url: + required: no + type: string + description: location of the application on the github. + default: https://github.com/mmm/testnode.git + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + inputs: + github_url: + type: string + + tosca.nodes.Container.Application.Docker: + derived_from: tosca.nodes.Container.Application + requirements: + - host: + capability: tosca.capabilities.Container.Docker \ No newline at end of file diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca-network-types.yaml b/dcaedt_validator/checker/src/main/resources/tosca/tosca-network-types.yaml new file mode 100644 index 0000000..e4930e0 --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca-network-types.yaml @@ -0,0 +1,103 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +description: > + TOSCA simple profile for networking. + +metadata: + template_name: tosca_simple_networking + template_version: "1.0" + +#imports: +# - tosca-common-types.yaml + +node_types: + + tosca.nodes.network.Network: + derived_from: tosca.nodes.Root + properties: + ip_version: + type: integer + required: false + default: 4 + constraints: + - valid_values: [ 4, 6 ] + cidr: + type: string + required: false + start_ip: + type: string + required: false + end_ip: + type: string + required: false + gateway_ip: + type: string + required: false + network_name: + type: string + required: false + network_id: + type: string + required: false + segmentation_id: + type: string + required: false + network_type: + type: string + required: false + physical_network: + type: string + required: false + capabilities: + link: + type: tosca.capabilities.network.Linkable + + tosca.nodes.network.Port: + derived_from: tosca.nodes.Root + properties: + ip_address: + type: string + required: false + order: + type: integer + required: true + default: 0 + constraints: + - greater_or_equal: 0 + is_default: + type: boolean + required: false + default: false + ip_range_start: + type: string + required: false + ip_range_end: + type: string + required: false + requirements: + - link: + capability: tosca.capabilities.network.Linkable + relationship: tosca.relationships.network.LinksTo + - binding: + capability: tosca.capabilities.network.Bindable + relationship: tosca.relationships.network.BindsTo + + +capability_types: + + tosca.capabilities.network.Linkable: + derived_from: tosca.capabilities.Node + + # also part of common types - used in Compute node type + tosca.capabilities.network.Bindable: + derived_from: tosca.capabilities.Node + +relationship_types: + + tosca.relationships.network.LinksTo: + derived_from: tosca.relationships.DependsOn + valid_target_types: [ tosca.capabilities.network.Linkable ] + + tosca.relationships.network.BindsTo: + derived_from: tosca.relationships.DependsOn + valid_target_types: [ tosca.capabilities.network.Bindable ] + diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca-nfv-types.yaml b/dcaedt_validator/checker/src/main/resources/tosca/tosca-nfv-types.yaml new file mode 100644 index 0000000..fd52f6b --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca-nfv-types.yaml @@ -0,0 +1,143 @@ +tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0 +description: > + TOSCA simple profile for Network Function Virtualization (NFV). + +metadata: + template_name: http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd01/tosca-nfv-v1.0-csd01.pdf + template_version: "1.0" + +#imports: +# - tosca-common-types.yaml + +capability_types: + +#from 6.3 + tosca.capabilities.nfv.VirtualLinkable: + derived_from: tosca.capabilities.Root + +#from 7.2.1 + tosca.capabilities.nfv.VirtualBindable: + derived_from: tosca.capabilities.Root +# breaks the virtualbinding requirement in the node type nfv.CP +# valid_source_types: [ tosca.nodes.nfv.VDU ] + +#from 7.2.2 + tosca.capabilities.nfv.HA: + derived_from: tosca.capabilities.Root + valid_source_types: [ tosca.nodes.nfv.VDU ] + +#from 7.2.3 + tosca.capabilities.nfv.HA.ActiveActive: + derived_from: tosca.capabilities.nfv.HA + +#from 7.2.4 + tosca.capabilities.nfv.HA.ActivePassive: + derived_from: tosca.capabilities.nfv.HA + +#from 7.2.5 + tosca.capabilities.nfv.Metric: + derived_from: tosca.capabilities.Root + + +relationship_types: + +#from 6.4 + tosca.relationships.nfv.VirtualLinksTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] + +#from 7.3.1 + tosca.relationships.nfv.VirtualBindsTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.VirtualBindable] + +#from 7.3.2 + tosca.relationships.nfv.HA: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.nfv.HA] + +#from 7.3.3 + tosca.relationships.nfv.Monitor: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.Metric] + + +node_types: + +#from 7.4.1 + tosca.nodes.nfv.VNF: + derived_from: tosca.nodes.Root + properties: + id: + type: string + description: ID of this VNF + vendor: + type: string + description: name of the vendor who generate this VNF + version: + type: version + description: version of the software for this VNF + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + +#from 7.4.2 + tosca.nodes.nfv.VDU: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + high_availability: + type: tosca.capabilities.nfv.HA + Virtualbinding: + type: tosca.capabilities.nfv.VirtualBindable + monitoring_parameter: + type: tosca.capabilities.nfv.Metric + requirements: + - high_availability: + capability: tosca.capabilities.nfv.HA + relationship: tosca.relationships.nfv.HA + occurrences: [ 0, 1 ] + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.Compute + relationship: tosca.relationships.HostedOn + +#from 7.4.3 + tosca.nodes.nfv.CP: + derived_from: tosca.nodes.Root + properties: + type: + type: string + required: false + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + - virtualbinding: + capability: tosca.capabilities.nfv.VirtualBindable + attributes: + IP_address: + type: string +#!attributes do not take required .. required: false + +#from 8.1 + tosca.nodes.nfv.VL: + derived_from: tosca.nodes.Root + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VL + capabilities: + virtual_linkable: + type: tosca.capabilities.nfv.VirtualLinkable + +#from 8.2 + tosca.nodes.nfv.VL.ELine: + derived_from: tosca.nodes.nfv.VL + +#from 8.3 + tosca.nodes.nfv.VL.ELAN: + derived_from: tosca.nodes.nfv.VL + +#from + tosca.nodes.nfv.VL.ETree: + derived_from: tosca.nodes.nfv.VL diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar b/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar new file mode 100644 index 0000000..9653086 --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar @@ -0,0 +1,1262 @@ +_status_values: &status_values + enum: + - supported + - unsupported + - experimental + - deprecated + +#I do not know that the lists and maps qualify as 'primitive' .. +_primitive_types: &primitive_types + enum: [string,integer,float,boolean,timestamp,list,map,version,range,scalar-unit.size,scalar_unit.frequency,scalar_unit.time] + +#needs custom validation as we have to make sure there are 2 elements and allow for the +#UNBOUNDED keyword as second element +_range_definition: &range_definition + type: seq + name: range_definition + sequence: + - type: scalar + +#see A.5.2 +#this is where the need of verifying the size of a collection (sequence/map) came from +#this is specified as a sequence where each entry is a map with one entry?? +_constraints_sequence: &constraints_sequence + name: constraints_sequence + short: 0 + type: seq + sequence: + - type: map +# length: 1 + mapping: + equal: + desc: "Constrains a property or parameter to a value equal to the value declared." + type: any + required: no + greater_than: + desc: "Constrains a property or parameter to a value greater than the value declared" + type: scalar + required: no + greater_or_equal: + desc: "Constrains a property or parameter to a value greater than or equal to the value declared." + type: scalar + required: no + less_than: + desc: "Constrains a property or parameter to a value less than the value declared" + type: scalar + required: no + less_or_equal: + desc: "Constrains a property or parameter to a value less than or equal to the value declared." + type: scalar + required: no + in_range: + desc: "Constrains a property or parameter to a value in range of (inclusive) the two values declared. +" + type: seq +# length: 2 + sequence: + - type: scalar + required: no + valid_values: + desc: "Constrains a property or parameter to a value that is in the list of declared values" + type: seq + sequence: + - type: scalar + required: no + length: + desc: "Constrains the property or parameter to a value of a given length." + type: int + required: no + min_length: + desc: "Constrains the property or parameter to a value to a minimum length" + type: scalar + required: no + max_length: + desc: "Constrains the property or parameter to a value to a maximum length" + type: scalar + required: no + pattern: + desc: "Constrains the property or parameter to a value that is allowed by the provided regular expression." + type: str + required: no + +# section A.5.3 property_filter_definition +# it is a constraints sequence that gets attached to a property .. +_property_filter_definition: &property_filter_definition + name: property_filter_definition + type: map + mapping: + =: + *constraints_sequence + +#section A.5.4 node_filter_definition +_node_filter_definition: &node_filter_definition + type: map + name: node_filter_definition + mapping: + properties: + desc: "property names to constraints to be applied to those properties" + required: no + type: seq + sequence: + - *property_filter_definition +# - type: map +# mapping: +# =: +# *constraints_sequence + capabilities: + desc: "" + required: no + type: seq + sequence: + - type: map + name: node_filter_capabilities_sequence + desc: "the key is a capability name or type" + mapping: + =: + name: node_filter_capabilities_entry + type: map + mapping: + properties: + desc: "the capability properties and their constraints" + name: node_filter_capabilities_properties + type: seq + sequence: + - type: map + name: node_filter_capabilities_property + mapping: + =: *constraints_sequence + +#used in property and attribute definitions +_entry_schema_definition: &entry_schema_definition + desc: "The optional key that is used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map" + name: entry_schema_definition + required: no + type: map + short: type + mapping: + "type": + desc: "collection element type" + required: yes + type: str + description: + required: no + type: str + constraints: + *constraints_sequence + +# see section A.5.5 +_artifact_definition: &artifact_definition + type: map + name: artifact_definition + short: implementation # assumes type can be inferred .. + mapping: + "type": + desc: "The required artifact type for the artifact definition" + required: yes + type: str + description: + desc: "The optional description for the artifact definition" + required: no + type: str + implementation: + desc: "The optional URI string (relative or absolute) which can be used to locate the artifacts file. +" + required: no + type: str + repository: + desc: "The optional name of the repository definition which contains the location of the external repository that contains the artifact" + required: no + type: str + deploy_path: + desc: "The file path the associated file would be deployed into within the target nodes container." + required: no + type: str + +# see section A.5.6 +_repository_definition: &repository_definition + type: map + name: repository_definition + short: url + mapping: + description: + desc: "The optional description for the repository." + required: no + type: str + url: + desc: "The required URL or network address used to access the repository" + required: yes + type: str + credential: + desc: "The optional Credential used to authorize access to the repository" + required: no + type: str + +#see section 3.5.7 +_import_definition: &import_definition + type: map + name: import_definition + short: file + mapping: + file: + desc: "file URI" + required: yes + type: str + repository: + desc: "symbolic name of the repository definition where the imported file can be found" + required: no + type: str + namespace_uri: + desc: "namespace URI to that will be applied to type definitions found within the imported file" + required: no + type: str + namespace_prefix: + desc: "optional namespace prefix (alias) that will be used to indicate the namespace_uri when forming a qualified name (i.e., qname) when referencing type definitions from the imported" + required: no + type: str + +#see section A.5.7 +_property_definition: &property_definition + type: map + name: property_definition + mapping: + "type": + type: str + required: yes +#not as easy, it can be an user defined data type +# <<: *primitive_types + description: + type: str + required: no + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + required: no + <<: *constraints_sequence + default: + type: any + required: no + "required": + type: bool + required: no + status: + type: str + required: no + <<: *status_values + entry_schema: + <<: *entry_schema_definition +# desc: "used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map." +# type: str +# required: no + +#see section A.5.8 +#_property_assignment_definition: &property_assignment_definition + +#see A.5.9 +_attribute_definition: &attribute_definition + type: map + name: attribute_definition + mapping: + "type": + type: str + required: yes +# <<: *primitive_types + description: + type: str + required: no + default: + type: any + required: no + status: + desc: "The optional status of the attribute relative to the specification or implementation" + type: str + required: no + <<: *status_values + entry_schema: + <<: *entry_schema_definition + +#see section A.5.10 +#here again, we must support the short form which is the most common +_attribute_assignment_definition: &attribute_assignment_definition + type: map + name: attribute_assignment_definition + mapping: + description: + desc: "The optional description of the attribute." + required: no + type: str + value: +#actually 'value | value_expression' + desc: "represent the type-compatible value to assign to the named attribute. Attribute values may be provided as the result from the evaluation of an expression or a function" + required: yes + type: any + + +# see spec section A.5.11 + +# see spec section A.5.11.1: variant to be used in node or relationship type definitions +_type_operation_definition: &type_operation_definition + type: map + name: type_operation_definition + short: implementation + mapping: + description: + desc: "The optional description string for the associated named operation." + required: no + type: str + implementation: + desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)" + required: no + type: str + inputs: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a input value" + name: property_assignment + type: any + +# from A.5.11.2 +_template_operation_definition: &template_operation_definition + type: map + name: template_operation_definition + short: implementation + mapping: + description: + desc: "The optional description string for the associated named operation." + required: no + type: str + implementation: + desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)" + name: template_operation_implementation_definition + required: no + short: primary + type: map + mapping: + primary: + desc: "The optional implementation artifact name (e.g., the primary script file name within a TOSCA CSAR file). " + required: no + type: str + dependencies: + desc: "The optional list of one or more dependent or secondary implementation artifact name which are referenced by the primary implementation artifact (e.g., a library the script installs or a secondary script)" + required: no + type: seq + sequence: + - type: str + inputs: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a input value" + name: property_assignment + type: any + + +# see section A.5.12, specifically A.5.12.2.1 : definition to be used in node or relationship type definition +_type_interface_definition: &type_interface_definition + type: map + name: type_interface_definition + mapping: + "type": + desc: "represents the required name of the Interface Type for the interface definition +" + required: yes + type: str + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + *property_definition + =: + *type_operation_definition + +# see section A.5.12.2.2, extended notation to be used in node or relationship template definitions +_template_interface_definition: &template_interface_definition + type: map + name: template_interface_definition + mapping: + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + =: + *template_operation_definition + + +# A.6 section: type specific definitions + +# see section A.6.1 +_capability_definition: &capability_definition + type: map + name: capability_definition + short: type + mapping: + "type": + desc: "The required name of the Capability Type the capability definition is based upon" + required: yes + type: str + description: + desc: "The optional description of the Capability definition" + required: no + type: str + properties: + desc: "" + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of property definitions for the Capability definition" + required: no + type: map + mapping: + =: + *attribute_definition + valid_source_types: + desc: "" + required: no + type: seq + sequence: + - type: str + occurrences: + desc: "The optional minimum and maximum occurrences for the capability." + required: no + <<: *range_definition + +# see section A.6.2 +# +_requirement_definition: &requirement_definition + type: map + name: requirement_definition + short: capability #as per A.6.2.2.1 + mapping: + capability: + desc: "The required reserved keyname used that can be used to provide the name of a valid Capability Type that can fulfil the requirement" + required: yes + type: str + node: + desc: "The optional reserved keyname used to provide the name of a valid Node Type that contains the capability definition that can be used to fulfil the requirement. " + required: no + type: str + relationship: +# and from section A.6.2.1, this one is an oddball + desc: "The optional reserved keyname used to provide the name of a valid Relationship Type to construct when fulfilling the requirement." + required: no + name: requirement_relationship_definition + short: type + type: map + mapping: + type: + desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement definitions relationship keyname. +" + required: yes + type: str + interfaces: + #not clear which interface definition is to be used here + desc: "allows augmentation (additional properties and operations) of the interfaces defined by the relationship type indicated above" + required: no + type: map + mapping: + =: + *type_interface_definition + occurrences: + desc: "The optional minimum and maximum occurrences for the requirement." + required: no + <<: *range_definition + +# see section A.6.3 +_artifact_type_definition: &artifact_type_definition + type: map + name: artifact_type_definition + mapping: + derived_from: + desc: "An optional parent Artifact Type name the Artifact Type derives from" + required: no + type: str + description: + desc: "An optional description for the Artifact Type." + required: no + type: str + mime_type: + desc: "The required mime type property for the Artifact Type." + required: no + type: str + file_ext: + desc: "The required file extension property for the Artifact Type" + required: no + type: seq + sequence: + - type: str + properties: + desc: "An optional list of property definitions for the Artifact Type" + required: no + type: map + mapping: + =: + *property_definition + +#see spec section #A.6.4 +_interface_type_definition: &interface_type_definition + type: map + name: interface_type_definition + mapping: + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + type: str + desc: "property_name to property_value(_expression) mapping" + =: + *type_operation_definition + +# A.6.5 +_data_type_definition: &data_type_definition + type: map + name: data_type_definition + mapping: + derived_from: + desc: "The optional key used when a datatype is derived from an existing TOSCA Data Type. +" + required: no + type: str + description: + desc: "The optional description for the Data Type. +" + required: no + type: str + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + <<: *constraints_sequence + properties: + desc: "The optional list property definitions that comprise the schema for a complex Data Type in TOSCA" + type: map + mapping: + =: + *property_definition + +# see section A.6.6 +_capability_type_definition: &capability_type_definition + type: map + name: capability_type_definition + mapping: + derived_from: + desc: "An optional parent capability type name this new Capability Type derives from." + required: no + type: str + description: + desc: "An optional description for the Capability Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Capability Type." + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Capability Type" + required: no + type: map + mapping: + =: + *attribute_definition + valid_source_types: + desc: "An optional list of one or more valid names of Node Types that are supported as valid sources of any relationship established to the declared Capability Type" + required: no + type: seq + sequence: + - type: str + +# section A.6.7 requirement definition: TOSCA YAML profile relies on capability types to +# define requirements + +# see section A.6.9 +_relationship_type_definition: &relationship_type_definition + type: map + name: relationship_type_definition + mapping: + derived_from: + desc: "An optional parent Relationship Type name the Relationship Type derives from" + required: no + type: str + description: + desc: "An optional description for the Relationship Type." + required: no + type: str + properties: + desc: "An optional list of property definitions for the Relationship Type" + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Relationship Type" + required: no + type: map + mapping: + =: + *attribute_definition + interfaces: + desc: "An optional list of interface definitions interfaces supported by the Relationship Type" + required: no + type: map + mapping: + =: + *type_interface_definition + valid_target_types: + desc: "An optional list of one or more names of Capability Types that are valid targets for this relationship. " + required: no + type: seq + sequence: + - type: str + +#see section 3.6.10 +_group_type_definition: &group_type_definition + type: map + name: group_type_definition + mapping: + derived_from: + desc: "An optional parent Group Type name this new Group Type derives from" + required: no + type: str + version: + desc: "An optional version for the Group Type definition" + required: no + type: str + description: + desc: "An optional description for the Group Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Group Type." + required: no + type: map + mapping: + =: + *property_definition + targets: + desc: "An optional list of one or more names of Node Types that are valid +(allowed) as members of the Group Type." + required: no + type: seq + sequence: + - type: str + interfaces: + desc: "An optional list of interface definitions supported by the Group Type" + required: no + type: map + mapping: + =: + *type_interface_definition + +#see section 3.6.11 +_policy_type_definition: &policy_type_definition + type: map + name: policy_type_definition + mapping: + derived_from: + desc: "An optional parent Policy Type name this new Policy Type derives from" + required: no + type: str + version: + desc: "An optional version for the Policy Type definition" + required: no + type: str + description: + desc: "An optional description for the Policy Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Policy Type." + required: no + type: map + mapping: + =: + *property_definition + targets: + desc: "An optional list of valid Node Types or Group Types the Policy Type +can be applied to" + required: no + type: seq + sequence: + - type: str + +# see section A.6.8 +_node_type_definition: &node_type_definition + type: map + name: node_type_definition + mapping: + derived_from: + desc: "An optional parent Node Type name this new Node Type derives from" + required: no + type: str + description: + desc: "An optional description for the Node Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Node Type." + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Node Type. +" + required: no + type: map + mapping: + =: + *attribute_definition + requirements: + desc: "An optional sequenced list of requirement definitions for the Node Type. +" + required: no + type: seq + sequence: + - type: map + mapping: + =: + *requirement_definition + capabilities: + desc: "An optional list of capability definitions for the Node Type" + required: no + type: map + mapping: + =: + *capability_definition + interfaces: + desc: "" + required: no + type: map + mapping: + =: + *type_interface_definition + artifacts: + desc: "An optional list of named artifact definitions for the Node Type" + required: no + type: map + mapping: + =: + *artifact_definition + +# A.7 Template specific definitions + +# see section A.7.1 +_capability_assignment_definition: &capability_assignment_definition + type: map + name: capability_assignment_definition + mapping: + properties: + # list of property assignments + desc: "An optional list of property definitions for the Capability definition" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + attributes: + # list of attribute assignments + desc: "An optional list of attribute definitions for the Capability definition" + required: no + type: map + mapping: + =: + desc: "" + name: attribute_assignment + type: any + +# see section A.7.2 +_requirement_assignment_definition: &requirement_assignment_definition + type: map + name: requirement_assignment_definition + short: node + mapping: + capability: + desc: " used to provide the name of either a: Capability definition within a target node template that can fulfill the requirement or Capability Type that the provider will use to select a type-compatible target node template to fulfill the requirement at runtime." + required: no + type: str + node: +#why is this a reference to a node type and not to a node template?? + desc: "used to identify the target node of a relationship: Node Template name that can fulfil the target node requirement or Node Type name that the provider will use to select a type-compatible node template to fulfil the requirement at runtime" + required: no + type: str + relationship: + desc: "" + required: no +#fins a better name name: relationship_definition + type: map + short: type + mapping: + "type": + desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement assignments relationship keyname" + required: no + type: str + properties: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + interfaces: + desc: "from A.5.12.2.2, right?" + required: no + type: map + mapping: + =: + *template_interface_definition + node_filter: + desc: "The optional filter definition that TOSCA orchestrators or providers would use to select a type-compatible target node that can fulfill the associated abstract requirement at runtime." + required: no + <<: *node_filter_definition + +# see section A.7.3 +_node_template_definition: &node_template_definition + type: map + name: node_template_definition + mapping: + "type": + desc: "The required name of the Node Type the Node Template is based upon" + required: yes + type: str + description: + desc: "An optional description for the Node Template" + required: no + type: str + directives: + desc: "An optional list of directive values to provide processing instructions to orchestrators and tooling." + required: no + type: seq + sequence: + - type: str + properties: +#custom check needs to be added: the value or expression providing the property value +#needs to be compatible with the property definition + desc: "An optional list of property value assignments for the Node Template." + required: no + type: map + mapping: + =: + type: any + name: property_assignment + desc: "a property value or an expression providing a property value" + attributes: + desc: "An optional list of attribute value assignments for the Node Template" + required: no + type: map + mapping: + =: + *attribute_assignment_definition + requirements: + desc: "An optional sequenced list of requirement assignments for the Node Template." + required: no + type: seq + sequence: + - type: map + mapping: + =: + *requirement_assignment_definition + capabilities: + desc: "An optional list of capability assignments for the Node Template." + required: no + type: map + mapping: + =: + *capability_assignment_definition + interfaces: + desc: "An optional list of named interface definitions for the Node Template" + required: no + type: map + mapping: + =: + *template_interface_definition + artifacts: + desc: "An optional list of named artifact definitions for the Node Template. +" + required: no + type: map + mapping: + =: + *artifact_definition + node_filter: + desc: "The optional filter definition that TOSCA orchestrators would use to select the correct target node. This keyname is only valid if the directive has the value of 'selectable' set." + required: no + <<: *node_filter_definition + copy: + desc: "The optional (symbolic) name of another node template to copy into (all keynames and values) and use as a basis for this node template." + required: no + type: str + +# see section A.7.4 +_relationship_template_definition: &relationship_template_definition + type: map + name: relationship_template_definition + mapping: + "type": + desc: "The required name of the Relationship Type the Relationship Template is based upon" + required: yes + type: str + alias: + desc: "The optional name of a different Relationship Template definition whose values are (effectively) copied into the definition for this Relationship Template (prior to any other overrides)." + required: no + type: str + description: + desc: "An optional description for the Relationship Template" + required: no + type: str + properties: + desc: "An optional list of property assignments for the Relationship Template." + required: no + name: properties_assignment_validation + type: map + mapping: + =: + type: any +#scalar + desc: "an expression providing a property value" + attributes: + desc: "An optional list of attribute value assignments for the Relationship Template" + required: no + name: attributes_assignment_validation + type: map + mapping: + =: + type: scalar + desc: "an expression providing an attribute value" + interfaces: + desc: "An optional list of named interface definitions for the Relationship Template ('augmentation' is allowed here)" + required: no + type: map + mapping: + =: + *template_interface_definition + copy: + desc: "The optional (symbolic) name of another relationship template to copy into (all keynames and values) and use as a basis for this relationship template." + required: no + type: str + + +# see section 3.7.5 +_group_definition: &group_definition + type: map + name: group_definition + mapping: + "type": + desc: "The required name of the group type the group definition is based upon" + required: yes + type: str + description: + desc: "The optional description for the group definition" + required: no + properties: + desc: " represents the optional list of property assignments for the group definition that provide values for properties defined in its declared Group Type" + required: no + type: map + mapping: + =: + type: any + name: property_assignment + targets: + desc: "contains the required list of one or more node template names (within the same topology template) that are members of this logical group" + required: yes + type: seq + sequence: + - type: str + interfaces: + desc: "represents the optional list of interface definitions for the group definition that augment those provided by its declared Group Type" + required: no + type: map + mapping: + =: + *template_interface_definition + +# see section 3.7.6 +_policy_template_definition: &policy_template_definition + type: map + name: policy_definition + mapping: + "type": + desc: "The required name of the policy type the policy definition is based upon" + required: yes + type: str + description: + desc: "The optional description for the policy definition" + required: no + properties: + desc: "represents the optional list of property assignments for the policy definition that provide values for properties defined in its declared Policy Type" + required: no + type: map + mapping: + =: + type: any + name: property_assignment + targets: + desc: "represents the optional list of names of node templates or groups that the policy is to applied to" + required: no + type: seq + sequence: + - type: str + +# see section 3.8 Topology Template definition: defines the topology template of a cloud application. +# described as a a reusable grammar as it can be a part of a service template definition +_topology_template_definition: &topology_template_definition + type: map + name: topology_template_definition + mapping: + description: + desc: "a description of the topology template" + required: no + type: str + inputs: + desc: "definition of input parameters for the topology template" + name: inputs + required: no + type: map + mapping: + =: + *property_definition + node_templates: + desc: "definition of the node templates of the topology" + name: node_templates + required: no + type: map + mapping: + =: + *node_template_definition + relationship_templates: + desc: "definition of the relationship templates of the topology" + required: no + name: relationship_templates + type: map + mapping: + =: + *relationship_template_definition + outputs: + desc: "definition of output parameters for the topology template" + name: outputs + required: no + type: map + mapping: + =: + *attribute_assignment_definition + groups: + desc: "An optional list of Group definitions whose members are node templates defined within this same Topology Template" + name: groups + required: no + type: map + mapping: + =: + *group_definition + policies: + # see 8.2.3, initially the list is not described as sequenced but then the grammar shows it as such !? + desc: "An optional sequenced?? list of Policy definitions for the Topology Template." + name: policies + required: no + type: seq + sequence: + - type: map + mapping: + =: + *policy_template_definition + substitution_mappings: +# one possible short-coming that is visible here is that the definition of the capability +# and requirements mappings are given in the spec only with the short/inline version of a +# YAML list/sequence, which cannot be enforced here .. + desc: " a description of the topology template" + name: substitution_mappings + required: no + type: map + mapping: + node_type: + desc: "node type name" + required: yes + type: str + capabilities: + desc: "map_of_capability_mappings_to_expose" + type: map + mapping: + =: + type: seq + sequence: + - type: str + requirements: + desc: "map_of_requirement_mapping_to_expose" + type: map + mapping: + =: + type: seq + sequence: + - type: str + + +# see A.9 Service Template definition: A TOSCA Service Template (YAML) document contains +# element definitions of building blocks for cloud application, or complete models of cloud applications. + +type: map +name: service_template_definition +mapping: + tosca_definitions_version: + desc: "Required TOSCA Definitions version string" + required: yes + type: str + enum: [tosca_simple_yaml_1_0_0] + + tosca_default_namespace: + desc: "Optional. default namespace (for type schema)" + required: no + type: str + + metadata: + desc: "Optional metadata keyname: value pairs" + name: metadata + required: no + type: map + mapping: + template_name: + desc: "Optional name of this service template" + required: no + type: str + template_author: + desc: "Optional author of this service template" + required: no + type: str + template_version: + desc: "Optional version of this service template" + required: no + type: str + =: + desc: "User defined entry" + required: no + type: str + +#to add, the spec says: "Optional list of domain or profile specific metadata keynames" + + description: + desc: "Optional description of the definitions inside the file" + required: no + type: str + + imports: + desc: "ordered list of import statements for importing other definitions files" + name: imports + required: no + type: seq + sequence: + - type: map + mapping: + =: + *import_definition + + dsl_definitions: + desc: "list of YAML alias anchors (or macros)" + name: dsl_definitions + required: no + type: map + mapping: + =: + desc: "some piece of valid yaml that makes the anchor/alias definition" + type: any + required: no + + repositories: + desc: "list of external repository definitions which host TOSCA artifacts" + name: repositories + required: no + type: map + mapping: + =: + *repository_definition + + data_types: + desc: "list of TOSCA datatype definitions" + name: data_types + required: no + type: map + mapping: + =: + *data_type_definition + + node_types: + desc: "list of node type definitions" + name: node_types + required: no + type: map + mapping: + =: + *node_type_definition + + capability_types: + desc: "list of capability type definitions" + name: capability_types + required: no + type: map + mapping: + =: + *capability_type_definition + + relationship_types: + desc: "list of relationship type definitions" + name: relationship_types + required: no + type: map + mapping: + =: + *relationship_type_definition + + artifact_types: + desc: "list of artifact type definitions" + name: artifact_types + required: no + type: map + mapping: + =: + *artifact_type_definition + + interface_types: + desc: "list of interface type definitions" + name: interface_types + required: no + type: map + mapping: + =: + *interface_type_definition + + group_types: + desc: "list of group type definitions" + name: group_types + required: no + type: map + mapping: + =: + *group_type_definition + + policy_types: + desc: "list of policy type definitions" + name: policy_types + required: no + type: map + mapping: + =: + *policy_type_definition + + topology_template: + desc: "topology template definition of the cloud application or service" + required: no + <<: *topology_template_definition diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar b/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar new file mode 100644 index 0000000..e0645d5 --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar @@ -0,0 +1,1583 @@ +_status_values: &status_values + enum: + - supported + - unsupported + - experimental + - deprecated + +#I do not know that the lists and maps qualify as 'primitive' .. +_primitive_types: &primitive_types + enum: [string,integer,float,boolean,timestamp,list,map,version,range,scalar-unit.size,scalar_unit.frequency,scalar_unit.time] + +#needs custom validation as we have to make sure there are 2 elements and allow for the +#UNBOUNDED keyword as second element +_range_definition: &range_definition + desc: "Section used to declare additional metadata information" + required: no + type: seq + name: range_definition + sequence: + - type: scalar + +_version_definition: &version_definition + desc: "An optional TOSCA version number" + required: no + type: str + name: version_definition +# pattern: .[.[.[-