diff options
author | Stone, Avi (as206k) <as206k@att.com> | 2018-04-12 15:46:31 +0300 |
---|---|---|
committer | Stone, Avi (as206k) <as206k@att.com> | 2018-04-12 15:49:38 +0300 |
commit | 5032434b101f25fa44d2e1f8dc8393e30af1ed4f (patch) | |
tree | 2dc7d37a8048e025c7412af080640da4c9a22b65 /dcaedt_validator/checker/src/main/java | |
parent | 2205633792f95f46a02bbf8f87f0c2637265d924 (diff) |
DCAE-D be initial commit
DCAE-D be initial commit
Issue-ID: SDC-1218
Change-Id: Id18ba96c499e785aa9ac395fbaf32d57f08c281b
Signed-off-by: Stone, Avi (as206k) <as206k@att.com>
Diffstat (limited to 'dcaedt_validator/checker/src/main/java')
25 files changed, 6517 insertions, 0 deletions
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java new file mode 100644 index 0000000..1512e56 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java @@ -0,0 +1,444 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Iterator; +import java.util.Collection; +import java.util.Comparator; +import java.util.Set; +import java.util.Map; +import java.util.List; +import java.util.EnumMap; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.ArrayList; +import java.util.Collections; + +import java.util.stream.Collectors; + +import java.net.URI; + +import com.google.common.base.Predicate; +import com.google.common.base.Function; +import com.google.common.collect.Iterators; +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +/* + * Oddball: tracking inputs as data templates could be seen as rather + * odd but we see them as instances of data types, in the same way node + * templates are instances of node types. + */ +public class Catalog { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + /* Type hierarchies are stored as maps from a type name to its definition + * Not the best but easy to follow hierarchies towards their root .. + */ + private EnumMap<Construct, Map<String,Map>> types = + new EnumMap<Construct, Map<String,Map>>(Construct.class); + /* track templates: we track templates (tye instances) first per target then per contruct. + * This allows us to share the catalog among multiple templates sharign the same type set + */ + private Map<Target, EnumMap<Construct, Map<String,Map>>> templates = + new HashMap<Target, EnumMap<Construct, Map<String,Map>>>(); + + private Catalog parent; + + public Catalog(Catalog theParent) { + this.parent = theParent; + /* there are no requirement types, they are the same as capability types */ + types.put(Construct.Data, new LinkedHashMap<String, Map>()); + types.put(Construct.Capability, new LinkedHashMap<String, Map>()); + types.put(Construct.Relationship, new LinkedHashMap<String, Map>()); + types.put(Construct.Artifact, new LinkedHashMap<String, Map>()); + types.put(Construct.Interface, new LinkedHashMap<String, Map>()); + types.put(Construct.Node, new LinkedHashMap<String, Map>()); + types.put(Construct.Group, new LinkedHashMap<String, Map>()); + types.put(Construct.Policy, new LinkedHashMap<String, Map>()); + + } + + public Catalog() { + this(null); + } + + public boolean addType(Construct theConstruct, String theName, Map theDef) { + if (hasType(theConstruct, theName)) { + return false; + } + getConstructTypes(theConstruct).put(theName, theDef); + return true; + } + + public Map getTypeDefinition(Construct theConstruct, String theName) { + Map<String, Map> constructTypes = getConstructTypes(theConstruct); + Map typeDef = constructTypes.get(theName); + if (typeDef == null && this.parent != null) { + return this.parent.getTypeDefinition(theConstruct, theName); + } + return typeDef; + } + + public boolean hasType(Construct theConstruct, String theName) { + Map<String, Map> constructTypes = getConstructTypes(theConstruct); + boolean res = constructTypes.containsKey(theName); + if (!res && this.parent != null) { + res = this.parent.hasType(theConstruct, theName); + } + return res; + } + + protected Map<String, Map> getConstructTypes(Construct theConstruct) { + Map<String, Map> constructTypes = this.types.get(theConstruct); + if (null == constructTypes) { + throw new RuntimeException("Something worse is cooking here!", + new CatalogException("No types for construct " + theConstruct)); + } + return constructTypes; + } + + protected Iterator<Map.Entry<String,Map>> + typesIterator(Construct theConstruct) { + List<Map.Entry<String,Map>> constructTypes = + new ArrayList<Map.Entry<String,Map>>( + this.types.get(theConstruct).entrySet()); + Collections.reverse(constructTypes); + return (this.parent == null) + ? constructTypes.iterator() + : Iterators.concat(constructTypes.iterator(), + this.parent.typesIterator(theConstruct)); + } + + /* this will iterate through the type hierarchy for the given type, included. + */ + public Iterator<Map.Entry<String,Map>> + hierarchy(Construct theConstruct, final String theName) { + return Iterators.filter(typesIterator(theConstruct), + new Predicate<Map.Entry<String,Map>>() { + Object next = theName; + public boolean apply(Map.Entry<String,Map> theEntry) { + if (next != null && next.equals(theEntry.getKey())) { + next = theEntry.getValue().get("derived_from"); + return true; + } + else + return false; + } + }); + } + + public boolean isDerivedFrom(Construct theConstruct, String theType, String theBaseType) { + + Iterator<Map.Entry<String,Map>> hierachyIterator = + hierarchy(theConstruct, theType); + while (hierachyIterator.hasNext()) { + Map.Entry<String,Map> typeDef = hierachyIterator.next(); + + if (typeDef.getKey().equals(theBaseType)) { + return true; + } + } + return false; + } + + /* We go over the type hierarchy and retain only an iterator over the + * elements of the given facet for each type in the hierarchy. + * We concatenate these iterators and filter out duplicates. + * TODO: cannot just filter out duplicates - a redefinition can refine the one in the base construct so we + * should merge them! + */ + public Iterator<Map.Entry> facets(Construct theConstruct, + final Facet theFacet, + final String theName) { + return + Iterators.filter( + Iterators.concat( + Iterators.transform( + hierarchy(theConstruct, theName), + new Function<Map.Entry<String,Map>, Iterator<Map.Entry>>() { + public Iterator<Map.Entry> apply(Map.Entry<String,Map> theEntry) { + Map m = (Map)theEntry.getValue().get(theFacet.name()); + return m == null + ? Collections.emptyIterator() + : m.entrySet().iterator(); + } + } + ) + ), + new Predicate<Map.Entry>() { + Set insts = new HashSet(); + public boolean apply(Map.Entry theEntry) { + return !insts.contains(theEntry.getKey()); + } + } + ); + } + + //no need to specify a construct, only nodes can have requirements + public Iterator<Map.Entry> requirements(final String theName) { + return + Iterators.concat( + Iterators.transform( + hierarchy(Construct.Node, theName), + new Function<Map.Entry<String,Map>, Iterator<Map.Entry>>() { + public Iterator<Map.Entry> apply(Map.Entry<String,Map> theEntry) { + List<Map> l = (List<Map>)theEntry.getValue().get("requirements"); + return l == null + ? Collections.emptyIterator() + : Iterators.concat( + Iterators.transform( + l.iterator(), + new Function<Map, Iterator<Map.Entry>> () { + public Iterator<Map.Entry> apply(Map theEntry) { + return theEntry.entrySet().iterator(); + } + } + ) + ); + } + } + ) + ); + } + + /* Example: find the definition of property 'port' of the node type + * tosca.nodes.Database (properties being a facet of the node construct) + * + * Note: the definition of a facet is cumulative, i.e. more specialized + * definitions contribute (by overwriting) to the + */ + public Map getFacetDefinition(Construct theConstruct, + String theConstructTypeName, + Facet theFacet, + String theName) { + Map def = null; + Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName); + while (ti.hasNext()) { + //this is where requirements would yield a List .. + Map<String,Map> fset = (Map<String,Map>)ti.next().getValue().get(theFacet.name()); + if (fset != null) { + def = def == null ? fset.get(theName) + : mergeDefinitions(def, fset.get(theName)); + } + } + return def; + } + + public Map getRequirementDefinition(Construct theConstruct, + String theConstructTypeName, + String theName) { + Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName); + while (ti.hasNext()) { + //this is where requirements yield a List .. + List<Map> reqs = (List<Map>)ti.next().getValue().get("requirements"); + + if(reqs!=null){ + for (Map req: reqs) { + Map.Entry reqe = (Map.Entry)req.entrySet().iterator().next(); + if (theName.equals(reqe.getKey())) { + return (Map)reqe.getValue(); + } + } + }else{ + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Avoiding adding requirment block since it doesn't exists on the template...."); + } + } + return null; + } + + /* */ + private EnumMap<Construct,Map<String,Map>> getTemplates(Target theTarget) { + EnumMap<Construct, Map<String,Map>> targetTemplates = templates.get(theTarget); + if (targetTemplates == null) { + targetTemplates = new EnumMap<Construct,Map<String,Map>>(Construct.class); + targetTemplates.put(Construct.Data, new LinkedHashMap<String, Map>()); + targetTemplates.put(Construct.Relationship, new LinkedHashMap<String, Map>()); + targetTemplates.put(Construct.Node, new LinkedHashMap<String, Map>()); + targetTemplates.put(Construct.Group, new LinkedHashMap<String, Map>()); + targetTemplates.put(Construct.Policy, new LinkedHashMap<String, Map>()); + + templates.put(theTarget, targetTemplates); + } + return targetTemplates; + } + + public Map<String,Map> getTargetTemplates(Target theTarget, Construct theConstruct) { + return getTemplates(theTarget).get(theConstruct); + } + + public void addTemplate(Target theTarget, Construct theConstruct, String theName, Map theDef) + throws CatalogException { + Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct); + if (null == constructTemplates) { + throw new CatalogException("No such thing as " + theConstruct + " templates"); + } + if (constructTemplates.containsKey(theName)) { + throw new CatalogException(theConstruct + " template '" + theName + "' re-declaration"); + } + constructTemplates.put(theName, theDef); + } + + public boolean hasTemplate(Target theTarget, Construct theConstruct, String theName) { + Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct); + return constructTemplates != null && + constructTemplates.containsKey(theName); + } + + public Map getTemplate(Target theTarget, Construct theConstruct, String theName) { + Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct); + if (constructTemplates != null) + return constructTemplates.get(theName); + else + return null; + } + + public static Map mergeDefinitions(Map theAggregate, Map theIncrement) { + if (theIncrement == null) + return theAggregate; + + for(Map.Entry e: (Set<Map.Entry>)theIncrement.entrySet()) { + theAggregate.putIfAbsent(e.getKey(), e.getValue()); + } + return theAggregate; + } + + /* tracks imports, i.e.targets */ + private LinkedHashMap<URI, Target> targets = + new LinkedHashMap<URI, Target>(); + /* tracks dependencies between targets, i.e. the 'adjency' matrix defined by + * the 'import' relationship */ + private Table<Target,Target,Boolean> imports = HashBasedTable.create(); + + + /* + * theParent contains an 'include/import' statement pointing to the Target + */ + public boolean addTarget(Target theTarget, Target theParent) { + boolean cataloged = targets.containsKey(theTarget.getLocation()); + + if(!cataloged) { + targets.put(theTarget.getLocation(), theTarget); + } + + if (theParent != null) { + imports.put(theParent, theTarget, Boolean.TRUE); + } + + return !cataloged; + } + + public Target getTarget(URI theLocation) { + return targets.get(theLocation); + } + + public Collection<Target> targets() { + return targets.values(); + } + + /* Targets that no other targets depend on */ + public Collection<Target> topTargets() { + return targets.values() + .stream() + .filter(t -> !imports.containsColumn(t)) + .collect(Collectors.toList()); + + } + + public String importString(Target theTarget) { + return importString(theTarget, " "); + } + + private String importString(Target theTarget, String thePrefix) { + StringBuilder sb = new StringBuilder(""); + Map<Target,Boolean> parents = imports.column(theTarget); + if (parents != null) { + for (Target p: parents.keySet()) { + sb.append(thePrefix) + .append("from ") + .append(p.getLocation()) + .append("\n") + .append(importString(p, thePrefix + " ")); + } + //we only keep the positive relationships + } + return sb.toString(); + } + + /* */ + private class TargetComparator implements Comparator<Target> { + + /* @return 1 if there is a dependency path from TargetOne to TargetTwo, -1 otherwise */ + public int compare(Target theTargetOne, Target theTargetTwo) { + if (hasPath(theTargetTwo, theTargetOne)) + return -1; + + if (hasPath(theTargetOne, theTargetTwo)) + return 1; + + return 0; + } + + public boolean hasPath(Target theStart, Target theEnd) { + Map<Target,Boolean> deps = imports.row(theStart); + if (deps.containsKey(theEnd)) + return true; + for (Target dep: deps.keySet()) { + if (hasPath(dep, theEnd)) + return true; + } + return false; + } + } + + public Collection<Target> sortedTargets() { + List keys = new ArrayList(this.targets.values()); + Collections.sort(keys, new TargetComparator()); + return keys; + } + + public static void main(String[] theArgs) throws Exception { + + Catalog cat = new Catalog(); + + Target a = new Target("a", new URI("a")), + b = new Target("b", new URI("b")), + c = new Target("c", new URI("c")), + d = new Target("d", new URI("d")); + + cat.addTarget(a, null); + cat.addTarget(b, null); + cat.addTarget(c, null); + cat.addTarget(d, null); + + cat.addTarget(b, c); + cat.addTarget(a, c); + cat.addTarget(c, d); + cat.addTarget(a, b); + + for (Target t: cat.sortedTargets()) + debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), t.toString()); + + Catalog root = new Catalog(); + root.addType(Construct.Node, "_a", Collections.emptyMap()); + root.addType(Construct.Node, "__a", Collections.singletonMap("derived_from", "_a")); + root.addType(Construct.Node, "___a", Collections.singletonMap("derived_from", "_a")); + + Catalog base = new Catalog(root); + base.addType(Construct.Node, "_b", Collections.singletonMap("derived_from", "__a")); + base.addType(Construct.Node, "__b", Collections.singletonMap("derived_from", "_b")); + base.addType(Construct.Node, "__b_", Collections.singletonMap("derived_from", "_a")); + + if (theArgs.length > 0) { + Iterator<Map.Entry<String, Map>> ti = + base.hierarchy(Construct.Node, theArgs[0]); + while (ti.hasNext()) { + debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), "> {}", ti.next().getKey()); + } + } + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CatalogException.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CatalogException.java new file mode 100644 index 0000000..d8e2dba --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CatalogException.java @@ -0,0 +1,14 @@ +package org.onap.sdc.dcae.checker; + + +public class CatalogException extends Exception { + + public CatalogException(String theMsg, Throwable theCause) { + super(theMsg, theCause); + } + + public CatalogException(String theMsg) { + super(theMsg); + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java new file mode 100644 index 0000000..fee617f --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java @@ -0,0 +1,3643 @@ +package org.onap.sdc.dcae.checker; + +import java.lang.reflect.Method; +import java.lang.reflect.InvocationTargetException; + +import java.io.File; +import java.io.Reader; +import java.io.IOException; + +import java.net.URI; +import java.net.URISyntaxException; + +import java.util.HashMap; +import java.util.TreeMap; +import java.util.Iterator; +import java.util.ListIterator; +import java.util.Map; +import java.util.List; +import java.util.LinkedList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Set; +import java.util.Collection; +import java.util.Collections; +import java.util.regex.Pattern; +import java.util.regex.Matcher; +import java.util.stream.Collectors; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.yaml.snakeyaml.Yaml; + +import com.google.common.collect.Maps; +import com.google.common.collect.MapDifference; +import com.google.common.reflect.Invokable; + +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; + +import kwalify.Validator; +import kwalify.Rule; +import kwalify.Types; +import kwalify.ValidationException; +import kwalify.SchemaException; + +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.jxpath.JXPathException; +import org.apache.commons.lang.reflect.ConstructorUtils; +import org.onap.sdc.dcae.checker.annotations.Catalogs; +import org.onap.sdc.dcae.checker.annotations.Checks; +import org.reflections.Reflections; +import org.reflections.util.FilterBuilder; +import org.reflections.util.ConfigurationBuilder; +import org.reflections.scanners.TypeAnnotationsScanner; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.scanners.MethodAnnotationsScanner; + +/* + * To consider: model consistency checking happens now along with validation + * (is implemented as part of the validation hooks). It might be better to + * separate the 2 stages and perform all the consistency checking once + * validation is completed. + */ +public class Checker { + private static final String PROPERTIES = "properties"; + private static final String DEFAULT = "default"; + private static final String ATTRIBUTES = "attributes"; + private static final String DATA_TYPES = "data_types"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String VALID_SOURCE_TYPES = "valid_source_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String INTERFACES = "interfaces"; + private static final String VALID_TARGET_TYPES = "valid_target_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String NODE_TYPES = "node_types"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + private static final String GROUP_TYPES = "group_types"; + private static final String TARGETS_CONSTANT = "targets"; + private static final String POLICY_TYPES = "policy_types"; + private static final String IS_NONE_OF_THOSE = "' is none of those"; + private static final String INPUTS = "inputs"; + private static final String CAPABILITY = "capability"; + private static final String ARTIFACTS = "artifacts"; + private static final String WAS_DEFINED_FOR_THE_NODE_TYPE = " was defined for the node type "; + private static final String UNKNOWN = "Unknown "; + private static final String TYPE = " type "; + + private Target target = null; //what we're validating at the moment + + private Map<String, Target> grammars = new HashMap<>(); //grammars for the different tosca versions + + private Catalog catalog; + private TargetLocator locator = new CommonLocator(); + + private Table<String, Method, Object> checks = HashBasedTable.create(); + private Table<String, Method, Object> catalogs = HashBasedTable.create(); + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private static Catalog commonsCatalogInstance = null; + + private static final String[] EMPTY_STRING_ARRAY = new String[0]; + + /* Need a proper way to indicate where the grammars are and how they should be identified */ + private static final String[] grammarFiles = new String[]{"tosca/tosca_simple_yaml_1_0.grammar", + "tosca/tosca_simple_yaml_1_1.grammar"}; + + private Pattern spacePattern = Pattern.compile("\\s"); + + private Pattern indexPattern = Pattern.compile("/\\p{Digit}+"); + + //this is getting silly .. + private static Class[][] checkHookArgTypes = + new Class[][]{ + new Class[]{Map.class, CheckContext.class}, + new Class[]{List.class, CheckContext.class}}; + + private static Class[] validationHookArgTypes = + new Class[]{Object.class, Rule.class, Validator.ValidationContext.class}; + + public Checker() throws CheckerException { + loadGrammars(); + loadAnnotations(); + } + + public static void main(String[] theArgs) { + if (theArgs.length == 0) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(), "checker resource_to_validate [processor]*"); + return; + } + + try { + Catalog cat = Checker.check(new File(theArgs[0])); + + for (Target t : cat.targets()) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(), "{}\n{}\n{}", t.getLocation(), cat.importString(t), t.getReport()); + } + + for (Target t : cat.sortedTargets()) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(), t.toString()); + } + + } catch (Exception x) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(),"Exception {}", x); + } + } + + private void loadGrammars() throws CheckerException { + + for (String grammarFile : grammarFiles) { + Target grammarTarget = this.locator.resolve(grammarFile); + if (grammarTarget == null) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to locate grammar {}", grammarFile); + continue; + } + + parseTarget(grammarTarget); + if (grammarTarget.getReport().hasErrors()) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: {}", grammarFile, grammarTarget.getReport().toString()); + continue; + } + + List versions = null; + try { + versions = (List) + ((Map) + ((Map) + ((Map) grammarTarget.getTarget()) + .get("mapping")) + .get("tosca_definitions_version")) + .get("enum"); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: cannot locate tosca_definitions_versions. Exception{}", grammarFile, x); + } + if (versions == null || versions.isEmpty()) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: no tosca_definitions_versions specified", grammarFile); + continue; + } + + for (Object version : versions) { + this.grammars.put(version.toString(), grammarTarget); + } + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Loaded grammars: {}", this.grammars); + } + + private void loadAnnotations() { + Reflections reflections = new Reflections( + new ConfigurationBuilder() + .forPackages("org.onap.sdc.dcae") + .filterInputsBy(new FilterBuilder() + .include(".*\\.class") + ) + .setScanners(new TypeAnnotationsScanner(), + new SubTypesScanner(), + new MethodAnnotationsScanner()) + .setExpandSuperTypes(false) + ); + + Map<Class, Object> handlers = new HashMap<>(); + + Set<Method> checkHandlers = reflections.getMethodsAnnotatedWith(Checks.class); + for (Method checkHandler : checkHandlers) { + checks.put(checkHandler.getAnnotation(Checks.class).path(), + checkHandler, + handlers.computeIfAbsent(checkHandler.getDeclaringClass(), + type -> { + try { + return (getClass() == type) ? this + : type.newInstance(); + } catch (Exception x) { + throw new RuntimeException(x); + } + })); + } + + Set<Method> catalogHandlers = reflections.getMethodsAnnotatedWith(Catalogs.class); + for (Method catalogHandler : catalogHandlers) { + catalogs.put(catalogHandler.getAnnotation(Catalogs.class).path(), + catalogHandler, + handlers.computeIfAbsent(catalogHandler.getDeclaringClass(), + type -> { + try { + return (getClass() == type) ? this + : type.newInstance(); + } catch (Exception x) { + throw new RuntimeException(x); + } + })); + } + } + + + public void setTargetLocator(TargetLocator theLocator) { + this.locator = theLocator; + } + + public Collection<Target> targets() { + if (this.catalog == null) { + throw new IllegalStateException("targets are only available after check"); + } + + return this.catalog.targets(); + } + + public Catalog catalog() { + return this.catalog; + } + + public void process(Processor theProcessor) { + + theProcessor.process(this.catalog); + } + + /* a facility for handling all files in a target directory .. */ + public static Catalog check(File theSource) + throws CheckerException { + + Catalog catalog = new Catalog(commonsCatalog()); + Checker checker = new Checker(); + try { + if (theSource.isDirectory()) { + for (File f : theSource.listFiles()) { + if (f.isFile()) { + checker.check(new Target(theSource.getCanonicalPath(), f.toURI().normalize()), catalog); + } + } + } else { + checker.check(new Target(theSource.getCanonicalPath(), theSource.toURI().normalize()), catalog); + } + } catch (IOException iox) { + throw new CheckerException("Failed to initialize target", iox); + } + + return catalog; + } + + public void check(String theSource) + throws CheckerException { + check(theSource, buildCatalog()); + } + + public void check(String theSource, Catalog theCatalog) + throws CheckerException { + Target tgt = + this.locator.resolve(theSource); + if (null == tgt) { + throw new CheckerException("Unable to locate the target " + theSource); + } + + check(tgt, theCatalog); + } + + public void check(Target theTarget) throws CheckerException { + check(theTarget, buildCatalog()); + } + + public void check(Target theTarget, Catalog theCatalog) throws CheckerException { + + this.catalog = theCatalog; + this.locator.addSearchPath(theTarget.getLocation()); + + if (this.catalog.addTarget(theTarget, null)) { + List<Target> targets = parseTarget(theTarget); + if (theTarget.getReport().hasErrors()) { + return; + } + for (Target targetItr : targets) { + this.catalog.addTarget(targetItr, null); + if (!validateTarget(targetItr).getReport().hasErrors()) { + checkTarget(targetItr); + } + } + } + } + + public void validate(Target theTarget) throws CheckerException { + validate(theTarget, buildCatalog()); + } + + public void validate(Target theTarget, Catalog theCatalog) throws CheckerException { + this.catalog = theCatalog; + this.locator.addSearchPath(theTarget.getLocation()); + + if (this.catalog.addTarget(theTarget, null)) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "@validateTarget"); + if (!validateTarget(theTarget).getReport().hasErrors()) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "@checkTarget"); + checkTarget(theTarget); + } + } + } + + private List<Target> parseTarget(final Target theTarget) + throws CheckerException { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "parseTarget {}", theTarget); + + Reader source = null; + try { + source = theTarget.open(); + } catch (IOException iox) { + throw new CheckerException("Failed to open target " + theTarget, iox); + } + + + ArrayList<Object> yamlRoots = new ArrayList<>(); + try { + Yaml yaml = new Yaml(); + for (Object yamlRoot : yaml.loadAll(source)) { + yamlRoots.add(yamlRoot); + } + + + } catch (Exception x) { + theTarget.report(x); + return Collections.emptyList(); + } finally { + try { + source.close(); + } catch (IOException iox) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "Exception {}", iox); + } + } + + ArrayList targets = new ArrayList(yamlRoots.size()); + if (yamlRoots.size() == 1) { + //he target turned out to be a bare document + theTarget.setTarget(yamlRoots.get(0)); + targets.add(theTarget); + } else { + //the target turned out to be a stream containing multiple documents + for (int i = 0; i < yamlRoots.size(); i++) { +/* +!!We're changing the target below, i.e. we're changing the target implementation hence caching implementation will suffer!! +*/ + Target newTarget = new Target(theTarget.getName(), + fragmentTargetURI(theTarget.getLocation(), String.valueOf(i))); + newTarget.setTarget(yamlRoots.get(i)); + targets.add(newTarget); + } + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), " exiting parseTarget {}", theTarget); + return targets; + } + + private URI fragmentTargetURI(URI theRoot, String theFragment) { + try { + return new URI(theRoot.getScheme(), + theRoot.getSchemeSpecificPart(), + theFragment); + } catch (URISyntaxException urisx) { + throw new RuntimeException(urisx); + } + } + + private Target validateTarget(Target theTarget) + throws CheckerException { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering validateTarget {}", theTarget); + + String version = (String) + ((Map) theTarget.getTarget()) + .get("tosca_definitions_version"); + if (version == null) { + throw new CheckerException("Target " + theTarget + " does not specify a tosca_definitions_version"); + } + + Target grammar = this.grammars.get(version); + if (grammar == null) { + throw new CheckerException("Target " + theTarget + " specifies unknown tosca_definitions_version " + version); + } + + TOSCAValidator validator = null; + try { + validator = new TOSCAValidator(theTarget, grammar.getTarget()); + } catch (SchemaException sx) { + throw new CheckerException("Grammar error at: " + sx.getPath(), sx); + } + + theTarget.getReport().addAll( + validator.validate(theTarget.getTarget())); + + if (!theTarget.getReport().hasErrors()) { + applyCanonicals(theTarget.getTarget(), validator.canonicals); + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), " exiting validateTarget {}", theTarget); + return theTarget; + } + + private Target checkTarget(Target theTarget) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering checkTarget {}", theTarget); + + CheckContext ctx = new CheckContext(theTarget); + //start at the top + checkServiceTemplateDefinition( + (Map<String, Object>) theTarget.getTarget(), ctx); + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "exiting checkTarget {}", theTarget); + return theTarget; + } + + public void checkProperties( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(PROPERTIES); + try { + if (!checkDefinition(PROPERTIES, theDefinitions, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkPropertyDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkPropertyDefinition( + String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + //check the type + if (!checkDataType(theDefinition, theContext)) { + return; + } + //check default value is compatible with type + Object defaultValue = theDefinition.get(DEFAULT); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDefinition, theContext); + } + + theContext.exit(); + } + + private void checkAttributes( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(ATTRIBUTES); + try { + if (!checkDefinition(ATTRIBUTES, theDefinitions, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkAttributeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkAttributeDefinition( + String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + if (!checkDataType(theDefinition, theContext)) { + return; + } + } finally { + theContext.exit(); + } + } + + /* top level rule, we collected the whole information set. + * this is where checking starts + */ + private void checkServiceTemplateDefinition( + Map<String, Object> theDef, CheckContext theContext) { + theContext.enter(""); + + if (theDef == null) { + theContext.addError("Empty template", null); + return; + } + +//!!! imports need to be processed first now that catalogging takes place at check time!! + + //first catalog whatever it is there to be cataloged so that the checks can perform cross-checking + for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry<String, Object> e = ri.next(); + catalogs(e.getKey(), e.getValue(), theContext); + } + + for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry<String, Object> e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + @Catalogs(path = "/data_types") + protected void catalog_data_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(DATA_TYPES); + try { + catalogTypes(Construct.Data, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + @Checks(path = "/data_types") + protected void check_data_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(DATA_TYPES); + + try { + if (!checkDefinition(DATA_TYPES, theDefinitions, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkDataTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkDataTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Data); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Data, theName, theDefinition, + Facet.properties, theContext); + } + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/capability_types") + protected void catalog_capability_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(CAPABILITY_TYPES); + try { + catalogTypes(Construct.Capability, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/capability_types") + protected void check_capability_types( + Map<String, Map> theTypes, CheckContext theContext) { + theContext.enter(CAPABILITY_TYPES); + try { + if (!checkDefinition(CAPABILITY_TYPES, theTypes, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theTypes.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkCapabilityTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkCapabilityTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(ATTRIBUTES)) { + checkAttributes( + (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, + Facet.attributes, theContext); + } + + //valid_source_types: see capability_type_definition + //unclear: how is the valid_source_types list definition eveolving across + //the type hierarchy: additive, overwriting, ?? + if (theDefinition.containsKey(VALID_SOURCE_TYPES)) { + checkTypeReference(Construct.Node, theContext, + ((List<String>) theDefinition.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/relationship_types") + protected void catalog_relationship_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(RELATIONSHIP_TYPES); + try { + catalogTypes(Construct.Relationship, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/relationship_types") + protected void check_relationship_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(RELATIONSHIP_TYPES); + try { + if (!checkDefinition(RELATIONSHIP_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkRelationshipTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkRelationshipTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(ATTRIBUTES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, + Facet.attributes, theContext); + } + + Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get(INTERFACES); + if (interfaces != null) { + theContext.enter(INTERFACES); + for (Iterator<Map.Entry<String, Map>> i = + interfaces.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + check_type_interface_definition( + e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + if (theDefinition.containsKey(VALID_TARGET_TYPES)) { + checkTypeReference(Construct.Capability, theContext, + ((List<String>) theDefinition.get(VALID_TARGET_TYPES)).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/artifact_types") + protected void catalog_artifact_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(ARTIFACT_TYPES); + try { + catalogTypes(Construct.Artifact, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/artifact_types") + protected void check_artifact_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(ARTIFACT_TYPES); + try { + if (!checkDefinition(ARTIFACT_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkArtifactTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkArtifactTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + try { + checkDefinition(theName, theDefinition, theContext); + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/interface_types") + protected void catalog_interface_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(INTERFACE_TYPES); + try { + catalogTypes(Construct.Interface, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + @Checks(path = "/interface_types") + protected void check_interface_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(INTERFACE_TYPES); + try { + if (!checkDefinition(INTERFACE_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkInterfaceTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkInterfaceTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Interface); + try { + checkDefinition(theName, theDefinition, theContext); + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/node_types") + protected void catalog_node_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(NODE_TYPES); + try { + catalogTypes(Construct.Node, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/node_types") + protected void check_node_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(NODE_TYPES); + try { + if (!checkDefinition(NODE_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkNodeTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkNodeTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(ATTRIBUTES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, + Facet.attributes, theContext); + } + + //requirements + if (theDefinition.containsKey(REQUIREMENTS)) { + check_requirements( + (List<Map>) theDefinition.get(REQUIREMENTS), theContext); + } + + //capabilities + if (theDefinition.containsKey(CAPABILITIES)) { + check_capabilities( + (Map<String, Map>) theDefinition.get(CAPABILITIES), theContext); + } + + //interfaces: + Map<String, Map> interfaces = + (Map<String, Map>) theDefinition.get(INTERFACES); + checkMapTypeInterfaceDefinition(theContext, interfaces); + } finally { + theContext.exit(); + } + } + + private void checkMapTypeInterfaceDefinition(CheckContext theContext, Map<String, Map> interfaces) { + if (interfaces != null) { + try { + theContext.enter(INTERFACES); + for (Iterator<Map.Entry<String, Map>> i = + interfaces.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + check_type_interface_definition( + e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + } + + @Catalogs(path = "/group_types") + protected void catalog_group_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(GROUP_TYPES); + try { + catalogTypes(Construct.Group, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + @Checks(path = "/group_types") + protected void check_group_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(GROUP_TYPES); + try { + if (!checkDefinition(GROUP_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkGroupTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkGroupTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Group); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Group, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(TARGETS_CONSTANT)) { + checkTypeReference(Construct.Node, theContext, + ((List<String>) theDefinition.get(TARGETS_CONSTANT)).toArray(EMPTY_STRING_ARRAY)); + } + + //interfaces + Map<String, Map> interfaces = + (Map<String, Map>) theDefinition.get(INTERFACES); + checkMapTypeInterfaceDefinition(theContext, interfaces); + + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/policy_types") + protected void catalog_policy_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(POLICY_TYPES); + try { + catalogTypes(Construct.Policy, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/policy_types") + protected void check_policy_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(POLICY_TYPES); + try { + if (!checkDefinition(POLICY_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkPolicyTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkPolicyTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Policy); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Policy, theName, theDefinition, + Facet.properties, theContext); + } + + //the targets can be known node types or group types + List<String> targets = (List<String>) theDefinition.get(TARGETS_CONSTANT); + if ((targets != null) && (checkDefinition(TARGETS_CONSTANT, targets, theContext))) { + for (String targetItr : targets) { + if (!(this.catalog.hasType(Construct.Node, targetItr) || + this.catalog.hasType(Construct.Group, targetItr))) { + theContext.addError("The 'targets' entry must contain a reference to a node type or group type, '" + target + IS_NONE_OF_THOSE, null); + } + } + } + } finally { + theContext.exit(); + } + } + + //checking of actual constructs (capability, ..) + + /* First, interface types do not have a hierarchical organization (no + * 'derived_from' in a interface type definition). + * So, when interfaces (with a certain type) are defined in a node + * or relationship type (and they can define new? operations), what + * is there to check: + * Can operations here re-define their declaration from the interface + * type spec?? From A.5.11.3 we are to understand indicates override to be + * the default interpretation .. but they talk about sub-classing so it + * probably intended as a reference to the node or relationship type + * hierarchy and not the interface type (no hierarchy there). + * Or is this a a case of augmentation where new operations can be added?? + */ + private void check_type_interface_definition( + String theName, Map theDef, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkType(Construct.Interface, theDef, theContext)) { + return; + } + + if (theDef.containsKey(INPUTS)) { + check_inputs((Map<String, Map>) theDef.get(INPUTS), theContext); + } + } finally { + theContext.exit(); + } + } + + private void check_capabilities(Map<String, Map> theDefinition, + CheckContext theContext) { + theContext.enter(CAPABILITIES); + try { + if (!checkDefinition(CAPABILITIES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkCapabilityDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + /* A capability definition appears within the context ot a node type */ + private void checkCapabilityDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + //check capability type + if (!checkType(Construct.Capability, theDef, theContext)) { + return; + } + + //check properties + if (!checkFacetAugmentation( + Construct.Capability, theDef, Facet.properties, theContext)) { + return; + } + + //check attributes + if (!checkFacetAugmentation( + Construct.Capability, theDef, Facet.attributes, theContext)) { + return; + } + + //valid_source_types: should point to valid template nodes + if (theDef.containsKey(VALID_SOURCE_TYPES)) { + checkTypeReference(Construct.Node, theContext, + ((List<String>) theDef.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY)); + //per A.6.1.4 there is an additinal check to be performed here: + //"Any Node Type (names) provides as values for the valid_source_types keyname SHALL be type-compatible (i.e., derived from the same parent Node Type) with any Node Types defined using the same keyname in the parent Capability Type." + } + //occurences: were verified in range_definition + + } finally { + theContext.exit(); + } + } + + private void check_requirements(List<Map> theDefinition, + CheckContext theContext) { + theContext.enter(REQUIREMENTS); + try { + if (!checkDefinition(REQUIREMENTS, theDefinition, theContext)) { + return; + } + + for (Iterator<Map> i = theDefinition.iterator(); i.hasNext(); ) { + Map e = i.next(); + Iterator<Map.Entry<String, Map>> ei = + (Iterator<Map.Entry<String, Map>>) e.entrySet().iterator(); + Map.Entry<String, Map> eie = ei.next(); + checkRequirementDefinition(eie.getKey(), eie.getValue(), theContext); + assert !ei.hasNext(); + } + } finally { + theContext.exit(); + } + } + + private void checkRequirementDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName, Construct.Requirement); + + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + //check capability type + String capabilityType = (String) theDef.get(CAPABILITY); + if (null != capabilityType) { + checkTypeReference(Construct.Capability, theContext, capabilityType); + } + + //check node type + String nodeType = (String) theDef.get("node"); + if (null != nodeType) { + checkTypeReference(Construct.Node, theContext, nodeType); + } + + //check relationship type + Map relationshipSpec = (Map) theDef.get("relationship"); + String relationshipType = null; + if (null != relationshipSpec) { + relationshipType = (String) relationshipSpec.get("type"); + if (relationshipType != null) { //should always be the case + checkTypeReference(Construct.Relationship, theContext, relationshipType); + } + + Map<String, Map> interfaces = (Map<String, Map>) + relationshipSpec.get(INTERFACES); + if (interfaces != null) { + //augmentation (additional properties or operations) of the interfaces + //defined by the above relationship types + + //check that the interface types are known + for (Map interfaceDef : interfaces.values()) { + checkType(Construct.Interface, interfaceDef, theContext); + } + } + } + + //cross checks + + //the capability definition might come from the capability type or from the capability definition + //within the node type. We might have more than one as a node might specify multiple capabilities of the + //same type. + //the goal here is to cross check the compatibility of the valid_source_types specification in the + //target capability definition (if that definition contains a valid_source_types entry). + List<Map> capabilityDefs = new LinkedList<>(); + //nodeType exposes capabilityType + if (nodeType != null) { + Map<String, Map> capabilities = + findTypeFacetByType(Construct.Node, nodeType, + Facet.capabilities, capabilityType); + if (capabilities.isEmpty()) { + theContext.addError("The node type " + nodeType + " does not appear to expose a capability of a type compatible with " + capabilityType, null); + } else { + for (Map.Entry<String, Map> capability : capabilities.entrySet()) { + //this is the capability as it was defined in the node type + Map capabilityDef = capability.getValue(); + //if it defines a valid_source_types then we're working with it, + //otherwise we're working with the capability type it points to. + //The spec does not make it clear if the valid_source_types in a capability definition augments or + //overwrites the one from the capabilityType (it just says they must be compatible). + if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) { + capabilityDefs.add(capabilityDef); + } else { + capabilityDef = + catalog.getTypeDefinition(Construct.Capability, (String) capabilityDef.get("type")); + if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) { + capabilityDefs.add(capabilityDef); + } else { + //!!if there is a capability that does not have a valid_source_type than there is no reason to + //make any further verification (as there is a valid node_type/capability target for this requirement) + capabilityDefs.clear(); + break; + } + } + } + } + } else { + Map capabilityDef = catalog.getTypeDefinition(Construct.Capability, capabilityType); + if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) { + capabilityDefs.add(capabilityDef); + } + } + + //check that the node type enclosing this requirement definition + //is in the list of valid_source_types + if (!capabilityDefs.isEmpty()) { + String enclosingNodeType = + theContext.enclosingConstruct(Construct.Node); + assert enclosingNodeType != null; + + if (!capabilityDefs.stream().anyMatch( + (Map capabilityDef) -> { + List<String> valid_source_types = + (List<String>) capabilityDef.get(VALID_SOURCE_TYPES); + return valid_source_types.stream().anyMatch( + (String source_type) -> catalog.isDerivedFrom( + Construct.Node, enclosingNodeType, source_type)); + })) { + theContext.addError("Node type: " + enclosingNodeType + " not compatible with any of the valid_source_types provided in the definition of compatible capabilities", null); + } + } + + //if we have a relationship type, check if it has a valid_target_types + //if it does, make sure that the capability type is compatible with one + //of them + if (relationshipType != null) { //should always be the case + Map relationshipTypeDef = catalog.getTypeDefinition( + Construct.Relationship, relationshipType); + if (relationshipTypeDef != null) { + List<String> valid_target_types = + (List<String>) relationshipTypeDef.get(VALID_TARGET_TYPES); + if (valid_target_types != null) { + boolean found = false; + for (String target_type : valid_target_types) { + if (catalog.isDerivedFrom( + Construct.Capability, capabilityType, target_type)) { + found = true; + break; + } + } + if (!found) { + theContext.addError("Capability type: " + capabilityType + " not compatible with any of the valid_target_types " + valid_target_types + " provided in the definition of relationship type " + relationshipType, null); + } + } + } + } + + //relationship declares the capabilityType in its valid_target_type set + //in A.6.9 'Relationship Type' the spec does not indicate how inheritance + //is to be applied to the valid_target_type spec: cumulative, overwrites, + //so we treat it as an overwrite. + } finally { + theContext.exit(); + } + } + + //topology_template_definition and sub-rules + /* */ + @Checks(path = "/topology_template") + protected void check_topology_template( + Map theDef, CheckContext theContext) { + + theContext.enter("topology_template"); + + for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry<String, Object> e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + /* + * Once the syntax of the imports section is validated parse/validate/catalog * all the imported template information + */ + @Checks(path = "/imports") + protected void check_imports(List theImports, CheckContext theContext) { + theContext.enter("imports"); + + for (ListIterator li = theImports.listIterator(); li.hasNext(); ) { + Object importEntry = li.next(); + Object importFile = ((Map) mapEntry(importEntry).getValue()).get("file"); + Target tgt = null; + try { + tgt = catalog.getTarget((URI) importFile); + } catch (ClassCastException ccx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Import is {}. Exception {}", importFile, ccx); + } + + if (tgt == null || tgt.getReport().hasErrors()) { + //import failed parsing or validation, we skip it + continue; + } + + //import should have been fully processed by now ??? + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "Processing import {}.", tgt); + checkTarget(tgt); + + } + theContext.exit(); + } + + /* */ + @Checks(path = "/topology_template/substitution_mappings") + protected void check_substitution_mappings(Map<String, Object> theSub, + CheckContext theContext) { + theContext.enter("substitution_mappings"); + try { + //type is mandatory + String type = (String) theSub.get("node_type"); + if (!checkTypeReference(Construct.Node, theContext, type)) { + theContext.addError("Unknown node type: " + type + "", null); + return; //not much to go on with + } + + Map<String, List> capabilities = (Map<String, List>) theSub.get(CAPABILITIES); + if (null != capabilities) { + for (Map.Entry<String, List> ce : capabilities.entrySet()) { + //the key must be a capability of the type + if (null == findTypeFacetByName(Construct.Node, type, + Facet.capabilities, ce.getKey())) { + theContext.addError("Unknown node type capability: " + ce.getKey() + ", type " + type, null); + } + //the value is a 2 element list: first is a local node, + //second is the name of one of its capabilities + List targetList = ce.getValue(); + if (targetList.size() != 2) { + theContext.addError("Invalid capability mapping: " + target + ", expecting 2 elements", null); + continue; + } + + String targetNode = (String) targetList.get(0); + String targetCapability = (String) targetList.get(1); + + Map<String, Object> targetNodeDef = (Map<String, Object>) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode); + if (null == targetNodeDef) { + theContext.addError("Invalid capability mapping node template: " + targetNode, null); + continue; + } + + String targetNodeType = (String) targetNodeDef.get("type"); + if (null == findTypeFacetByName(Construct.Node, targetNodeType, + Facet.capabilities, targetCapability)) { + theContext.addError("Invalid capability mapping capability: " + targetCapability + ". No such capability found for node template " + targetNode + ", of type " + targetNodeType, null); + } + } + } + + Map<String, List> requirements = (Map<String, List>) theSub.get(REQUIREMENTS); + if (null != requirements) { + for (Map.Entry<String, List> re : requirements.entrySet()) { + //the key must be a requirement of the type + if (null == findNodeTypeRequirementByName(type, re.getKey())) { + theContext.addError("Unknown node type requirement: " + re.getKey() + ", type " + type, null); + } + + List targetList = re.getValue(); + if (targetList.size() != 2) { + theContext.addError("Invalid requirement mapping: " + targetList + ", expecting 2 elements", null); + continue; + } + + String targetNode = (String) targetList.get(0); + String targetRequirement = (String) targetList.get(1); + + Map<String, Object> targetNodeDef = (Map<String, Object>) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode); + if (null == targetNodeDef) { + theContext.addError("Invalid requirement mapping node template: " + targetNode, null); + continue; + } + + String targetNodeType = (String) targetNodeDef.get("type"); + if (null == findNodeTypeRequirementByName(targetNodeType, targetRequirement)) { + theContext.addError("Invalid requirement mapping requirement: " + targetRequirement + ". No such requirement found for node template " + targetNode + ", of type " + targetNodeType, null); + } + } + } + } finally { + theContext.exit(); + } + } + + + /* */ + @Checks(path = "/topology_template/inputs") + protected void check_inputs(Map<String, Map> theInputs, + CheckContext theContext) { + theContext.enter(INPUTS); + + try { + if (!checkDefinition(INPUTS, theInputs, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theInputs.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkInputDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkInputDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + // + if (!checkDataType(theDef, theContext)) { + return; + } + //check default value + Object defaultValue = theDef.get(DEFAULT); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDef, theContext); + } + } finally { + theContext.exit(); + } + } + + @Checks(path = "topology_template/outputs") + protected void check_outputs(Map<String, Map> theOutputs, + CheckContext theContext) { + theContext.enter("outputs"); + + try { + if (!checkDefinition("outputs", theOutputs, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theOutputs.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkOutputDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkOutputDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + checkDefinition(theName, theDef, theContext); + //check the expression + } finally { + theContext.exit(); + } + } + + @Checks(path = "/topology_template/groups") + protected void check_groups(Map<String, Map> theGroups, + CheckContext theContext) { + theContext.enter("groups"); + + try { + if (!checkDefinition("groups", theGroups, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theGroups.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkGroupDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkGroupDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkType(Construct.Group, theDef, theContext)) { + return; + } + + if (!checkFacet( + Construct.Group, theDef, Facet.properties, theContext)) { + return; + } + + if (theDef.containsKey(TARGETS_CONSTANT)) { + + List<String> targetsTypes = (List<String>) + this.catalog.getTypeDefinition(Construct.Group, + (String) theDef.get("type")) + .get(TARGETS_CONSTANT); + + List<String> targets = (List<String>) theDef.get(TARGETS_CONSTANT); + for (String targetItr : targets) { + if (!this.catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) { + theContext.addError("The 'targets' entry must contain a reference to a node template, '" + targetItr + "' is not one", null); + } else { + if (targetsTypes != null) { + String targetType = (String) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetItr).get("type"); + + boolean found = false; + for (String type : targetsTypes) { + found = this.catalog + .isDerivedFrom(Construct.Node, targetType, type); + if (found) { + break; + } + } + + if (!found) { + theContext.addError("The 'targets' entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null); + } + } + } + } + } + } finally { + theContext.exit(); + } + } + + @Checks(path = "/topology_template/policies") + protected void check_policies(List<Map<String, Map>> thePolicies, + CheckContext theContext) { + theContext.enter("policies"); + + try { + if (!checkDefinition("policies", thePolicies, theContext)) { + return; + } + + for (Map<String, Map> policy : thePolicies) { + assert policy.size() == 1; + Map.Entry<String, Map> e = policy.entrySet().iterator().next(); + checkPolicyDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkPolicyDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkType(Construct.Policy, theDef, theContext)) { + return; + } + + if (!checkFacet( + Construct.Policy, theDef, Facet.properties, theContext)) { + return; + } + + //targets: must point to node or group templates (that are of a type + //specified in the policy type definition, if targets were specified + //there). + if (theDef.containsKey(TARGETS_CONSTANT)) { + List<String> targetsTypes = (List<String>) + this.catalog.getTypeDefinition(Construct.Policy, + (String) theDef.get("type")) + .get(TARGETS_CONSTANT); + + List<String> targets = (List<String>) theDef.get(TARGETS_CONSTANT); + for (String targetItr : targets) { + Construct targetConstruct = null; + + if (this.catalog.hasTemplate(theContext.target(), Construct.Group, targetItr)) { + targetConstruct = Construct.Group; + } else if (this.catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) { + targetConstruct = Construct.Node; + } else { + theContext.addError("The 'targets' entry must contain a reference to a node template or group template, '" + target + IS_NONE_OF_THOSE, null); + } + + if (targetConstruct != null && + targetsTypes != null) { + //get the target type and make sure is compatible with the types + //indicated in the type spec + String targetType = (String) + this.catalog.getTemplate(theContext.target(), targetConstruct, targetItr).get("type"); + + boolean found = false; + for (String type : targetsTypes) { + found = this.catalog + .isDerivedFrom(targetConstruct, targetType, type); + if (found) { + break; + } + } + + if (!found) { + theContext.addError("The 'targets' " + targetConstruct + " entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null); + } + } + } + } + + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/topology_template/node_templates") + protected void check_node_templates(Map<String, Map> theTemplates, + CheckContext theContext) { + theContext.enter("node_templates"); + try { + if (!checkDefinition("node_templates", theTemplates, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theTemplates.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkNodeTemplateDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + /* */ + private void checkNodeTemplateDefinition(String theName, + Map theNode, + CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkDefinition(theName, theNode, theContext)) { + return; + } + + if (!checkType(Construct.Node, theNode, theContext)) { + return; + } + + //copy + String copy = (String) theNode.get("copy"); + if (copy != null) { + if (!checkTemplateReference(Construct.Node, theContext, copy)) { + theContext.addError("The 'copy' reference " + copy + " does not point to a known node template", null); + } else { + //the 'copy' node specification should be used to provide 'defaults' + //for this specification + } + } + + /* check that we operate on properties and attributes within the scope of + the specified node type */ + if (!checkFacet( + Construct.Node, /*theName,*/theNode, Facet.properties, theContext)) { + return; + } + + if (!checkFacet( + Construct.Node, /*theName,*/theNode, Facet.attributes, theContext)) { + return; + } + + //requirement assignment seq + if (theNode.containsKey(REQUIREMENTS)) { + checkRequirementsAssignmentDefinition( + (List<Map>) theNode.get(REQUIREMENTS), theContext); + } + + //capability assignment map: subject to augmentation + if (theNode.containsKey(CAPABILITIES)) { + checkCapabilitiesAssignmentDefinition( + (Map<String, Map>) theNode.get(CAPABILITIES), theContext); + } + + //interfaces + if (theNode.containsKey(INTERFACES)) { + checkTemplateInterfacesDefinition( + (Map<String, Map>) theNode.get(INTERFACES), theContext); + } + + //artifacts: artifacts do not have different definition forms/syntax + //depending on the context (type or template) but they are still subject + //to 'augmentation' + if (theNode.containsKey(ARTIFACTS)) { + check_template_artifacts_definition( + (Map<String, Object>) theNode.get(ARTIFACTS), theContext); + } + + /* node_filter: the context to which the node filter is applied is very + * wide here as opposed to the node filter specification in a requirement + * assignment which has a more strict context (target node/capability are + * specified). + * We could check that there are nodes in this template having the + * properties/capabilities specified in this filter, i.e. the filter has + * a chance to succeed. + */ + } finally { + theContext.exit(); + } + } + + @Checks(path = "/topology_template/relationship_templates") + protected void check_relationship_templates(Map theTemplates, + CheckContext theContext) { + theContext.enter("relationship_templates"); + + for (Iterator<Map.Entry<String, Map>> i = theTemplates.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkRelationshipTemplateDefinition(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + private void checkRelationshipTemplateDefinition( + String theName, + Map theRelationship, + CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkDefinition(theName, theRelationship, theContext)) { + return; + } + + if (!checkType(Construct.Relationship, theRelationship, theContext)) { + return; + } + + /* check that we operate on properties and attributes within the scope of + the specified relationship type */ + if (!checkFacet(Construct.Relationship, theRelationship, + Facet.properties, theContext)) { + return; + } + + if (!checkFacet(Construct.Relationship, theRelationship, + Facet.attributes, theContext)) { + return; + } + + /* interface definitions + note: augmentation is allowed here so not clear what to check .. + maybe report augmentations if so configured .. */ + + } finally { + theContext.exit(); + } + } + + //requirements and capabilities assignment appear in a node templates + private void checkRequirementsAssignmentDefinition( + List<Map> theRequirements, CheckContext theContext) { + theContext.enter(REQUIREMENTS); + try { + if (!checkDefinition(REQUIREMENTS, theRequirements, theContext)) { + return; + } + + //the node type for the node template enclosing these requirements + String nodeType = (String) catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node)) + .get("type"); + + for (Iterator<Map> ri = theRequirements.iterator(); ri.hasNext(); ) { + Map<String, Map> requirement = (Map<String, Map>) ri.next(); + + Iterator<Map.Entry<String, Map>> rai = requirement.entrySet().iterator(); + + Map.Entry<String, Map> requirementEntry = rai.next(); + assert !rai.hasNext(); + + String requirementName = requirementEntry.getKey(); + Map requirementDef = findNodeTypeRequirementByName( + nodeType, requirementName); + + if (requirementDef == null) { + theContext.addError("No requirement " + requirementName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null); + continue; + } + + checkRequirementAssignmentDefinition( + requirementName, requirementEntry.getValue(), requirementDef, theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkRequirementAssignmentDefinition( + String theRequirementName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + theContext//.enter("requirement_assignment") + .enter(theRequirementName, Construct.Requirement); + + //grab the node type definition to verify compatibility + + try { + //node assignment + boolean targetNodeIsTemplate = false; + String targetNode = (String) theAssignment.get("node"); + if (targetNode == null) { + targetNode = (String) theDefinition.get("node"); + //targetNodeIsTemplate stays false, targetNode must be a type + } else { + //the value must be a node template or a node type + targetNodeIsTemplate = isTemplateReference( + Construct.Node, theContext, targetNode); + if ((!targetNodeIsTemplate) && (!isTypeReference(Construct.Node, targetNode))){ + theContext.addError("The 'node' entry must contain a reference to a node template or node type, '" + targetNode + IS_NONE_OF_THOSE, null); + return; + } + + //additional checks + String targetNodeDef = (String) theDefinition.get("node"); + if (targetNodeDef != null && targetNode != null) { + if (targetNodeIsTemplate) { + //if the target is node template, it must be compatible with the + //node type specification in the requirement defintion + String targetNodeType = (String) + catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type"); + if (!catalog.isDerivedFrom( + Construct.Node, targetNodeType, targetNodeDef)) { + theContext.addError("The required target node type '" + targetNodeType + "' of target node " + targetNode + " is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null); + return; + } + } else { + //if the target is a node type it must be compatible (= or derived + //from) with the node type specification in the requirement definition + if (!catalog.isDerivedFrom( + Construct.Node, targetNode, targetNodeDef)) { + theContext.addError("The required target node type '" + targetNode + "' is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null); + return; + } + } + } + } + + String targetNodeType = targetNodeIsTemplate ? + (String) catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type") : + targetNode; + + //capability assignment + boolean targetCapabilityIsType = false; + String targetCapability = (String) theAssignment.get(CAPABILITY); + if (targetCapability == null) { + targetCapability = (String) theDefinition.get(CAPABILITY); + //in a requirement definition the target capability can only be a + //capability type (and not a capability name within some target node + //type) + targetCapabilityIsType = true; + } else { + targetCapabilityIsType = isTypeReference(Construct.Capability, targetCapability); + + //check compatibility with the target compatibility type specified + //in the requirement definition, if any + String targetCapabilityDef = (String) theDefinition.get(CAPABILITY); + if (targetCapabilityDef != null && targetCapability != null) { + if (targetCapabilityIsType) { + if (!catalog.isDerivedFrom( + Construct.Capability, targetCapability, targetCapabilityDef)) { + theContext.addError("The required target capability type '" + targetCapability + "' is not compatible with the target capability type found in the requirement definition: " + targetCapabilityDef, null); + return; + } + } else { + //the capability is from a target node. Find its definition and + //check that its type is compatible with the capability type + //from the requirement definition + + //check target capability compatibility with target node + if (targetNode == null) { + theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', which was not specified", null); + return; + } + if (!targetNodeIsTemplate) { + theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', but there you specified a node type", null); + return; + } + //check that the targetNode (its type) indeed has the + //targetCapability + + Map<String, Object> targetNodeCapabilityDef = + findTypeFacetByName( + Construct.Node, targetNodeType, + Facet.capabilities, targetCapability); + if (targetNodeCapabilityDef == null) { + theContext.addError("No capability '" + targetCapability + "' was specified in the node " + targetNode + " of type " + targetNodeType, null); + return; + } + + String targetNodeCapabilityType = (String) targetNodeCapabilityDef.get("type"); + + if (!catalog.isDerivedFrom(Construct.Capability, + targetNodeCapabilityType, + targetCapabilityDef)) { + theContext.addError("The required target capability type '" + targetCapabilityDef + "' is not compatible with the target capability type found in the target node type capability definition : " + targetNodeCapabilityType + ", targetNode " + targetNode + ", capability name " + targetCapability, null); + return; + } + } + } + } + + //relationship assignment + Map targetRelationship = (Map) theAssignment.get("relationship"); + if (targetRelationship != null) { + //this has to be compatible with the relationship with the same name + //from the node type + //check the type + } + + //node_filter; used jxpath to simplify the navigation somewhat + //this is too cryptic + JXPathContext jxPath = JXPathContext.newContext(theAssignment); + jxPath.setLenient(true); + + List<Map> propertiesFilter = + (List<Map>) jxPath.getValue("/node_filter/properties"); + if (propertiesFilter != null) { + for (Map propertyFilter : propertiesFilter) { + if (targetNode != null) { + //if we have a target node or node template then it must have + //have these properties + for (Object propertyName : propertyFilter.keySet()) { + if (null == findTypeFacetByName(Construct.Node, + targetNodeType, + Facet.properties, + propertyName.toString())) { + theContext.addError("The node_filter property " + propertyName + " is invalid: requirement target node " + targetNode + " does not have such a property", null); + } + } + } + } + } + + List<Map> capabilitiesFilter = + (List<Map>) jxPath.getValue("node_filter/capabilities"); + if (capabilitiesFilter != null) { + for (Map capabilityFilterDef : capabilitiesFilter) { + assert capabilityFilterDef.size() == 1; + Map.Entry<String, Map> capabilityFilterEntry = + (Map.Entry<String, Map>) capabilityFilterDef.entrySet().iterator().next(); + String targetFilterCapability = capabilityFilterEntry.getKey(); + Map<String, Object> targetFilterCapabilityDef = null; + + //if we have a targetNode capabilityName must be a capability of + //that node (type); or it can be simply capability type (but the node + //must have a capability of that type) + + String targetFilterCapabilityType = null; + if (targetNode != null) { + targetFilterCapabilityDef = + findTypeFacetByName(Construct.Node, targetNodeType, + Facet.capabilities, targetFilterCapability); + if (targetFilterCapabilityDef != null) { + targetFilterCapabilityType = + (String) targetFilterCapabilityDef/*.values().iterator().next()*/.get("type"); + } else { + Map<String, Map> targetFilterCapabilities = + findTypeFacetByType(Construct.Node, targetNodeType, + Facet.capabilities, targetFilterCapability); + + if (!targetFilterCapabilities.isEmpty()) { + if (targetFilterCapabilities.size() > 1) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "checkRequirementAssignmentDefinition: filter check, target node type '{}' has more than one capability of type '{}', not supported", targetNodeType, targetFilterCapability); + } + //pick the first entry, it represents a capability of the required type + Map.Entry<String, Map> capabilityEntry = targetFilterCapabilities.entrySet().iterator().next(); + targetFilterCapabilityDef = Collections.singletonMap(capabilityEntry.getKey(), + capabilityEntry.getValue()); + targetFilterCapabilityType = targetFilterCapability; + } + } + } else { + //no node (type) specified, it can be a straight capability type + targetFilterCapabilityDef = catalog.getTypeDefinition( + Construct.Capability, targetFilterCapability); + //here comes the odd part: it can still be a just a name in which + //case we should look at the requirement definition, see which + //capability (type) it indicates + assert targetCapabilityIsType; //cannot be otherwise, we'd need a node + targetFilterCapabilityDef = catalog.getTypeDefinition( + Construct.Capability, targetCapability); + targetFilterCapabilityType = targetCapability; + } + + if (targetFilterCapabilityDef == null) { + theContext.addError("Capability (name or type) " + targetFilterCapability + " is invalid: not a known capability (type) " + + ((targetNodeType != null) ? (" of node type" + targetNodeType) : ""), null); + continue; + } + + for (Map propertyFilter : + (List<Map>) jxPath.getValue("/node_filter/capabilities/" + targetFilterCapability + "/properties")) { + //check that the properties are in the scope of the + //capability definition + for (Object propertyName : propertyFilter.keySet()) { + if (null == findTypeFacetByName(Construct.Capability, + targetCapability, + Facet.properties, + propertyName.toString())) { + theContext.addError("The capability filter " + targetFilterCapability + " property " + propertyName + " is invalid: target capability " + targetFilterCapabilityType + " does not have such a property", null); + } + } + } + } + } + + } finally { + theContext//.exit() + .exit(); + } + } + + private void checkCapabilitiesAssignmentDefinition( + Map<String, Map> theCapabilities, CheckContext theContext) { + theContext.enter(CAPABILITIES); + try { + if (!checkDefinition(CAPABILITIES, theCapabilities, theContext)) { + return; + } + + //the node type for the node template enclosing these requirements + String nodeType = (String) catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node)) + .get("type"); + + for (Iterator<Map.Entry<String, Map>> ci = + theCapabilities.entrySet().iterator(); + ci.hasNext(); ) { + + Map.Entry<String, Map> ce = ci.next(); + + String capabilityName = ce.getKey(); + Map capabilityDef = findTypeFacetByName(Construct.Node, nodeType, + Facet.capabilities, capabilityName); + if (capabilityDef == null) { + theContext.addError("No capability " + capabilityName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null); + continue; + } + + checkCapabilityAssignmentDefinition( + capabilityName, ce.getValue(), capabilityDef, theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkCapabilityAssignmentDefinition( + String theCapabilityName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + + theContext.enter(theCapabilityName, Construct.Capability); + try { + String capabilityType = (String) theDefinition.get("type"); + //list of property and attributes assignments + checkFacet(Construct.Capability, theAssignment, capabilityType, + Facet.properties, theContext); + checkFacet(Construct.Capability, theAssignment, capabilityType, + Facet.attributes, theContext); + } finally { + theContext.exit(); + } + } + + private void checkTemplateInterfacesDefinition( + Map<String, Map> theInterfaces, + CheckContext theContext) { + theContext.enter(INTERFACES); + try { + if (!checkDefinition(INTERFACES, theInterfaces, theContext)) { + return; + } + + //the node type for the node template enclosing these requirements + String nodeType = (String) catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node)) + .get("type"); + + for (Iterator<Map.Entry<String, Map>> ii = + theInterfaces.entrySet().iterator(); + ii.hasNext(); ) { + + Map.Entry<String, Map> ie = ii.next(); + + String interfaceName = ie.getKey(); + Map interfaceDef = findTypeFacetByName(Construct.Node, nodeType, + Facet.interfaces, interfaceName); + + if (interfaceDef == null) { + /* this is subject to augmentation: this could be a warning but not an error */ + theContext.addError("No interface " + interfaceName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null); + continue; + } + + checkTemplateInterfaceDefinition( + interfaceName, ie.getValue(), interfaceDef, theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkTemplateInterfaceDefinition( + String theInterfaceName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + + theContext.enter(theInterfaceName, Construct.Interface); + try { + //check the assignment of the common inputs + checkFacet(Construct.Interface, + theAssignment, + (String) theDefinition.get("type"), + Facet.inputs, + theContext); + } finally { + theContext.exit(); + } + } + + + @Checks(path = "/topology_template/artifacts") + protected void check_template_artifacts_definition( + Map<String, Object> theDefinition, + CheckContext theContext) { + theContext.enter(ARTIFACTS); + theContext.exit(); + } + + //generic checking actions, not related to validation rules + + /* will check the validity of the type specification for any construct containing a 'type' entry */ + private boolean checkType(Construct theCategory, Map theSpec, CheckContext theContext) { + String type = (String) theSpec.get("type"); + if (type == null) { + theContext.addError("Missing type specification", null); + return false; + } + + if (!catalog.hasType(theCategory, type)) { + theContext.addError(UNKNOWN + theCategory + " type: " + type, null); + return false; + } + + return true; + } + + /* the type can be: + * a known type: predefined or user-defined + * a collection (list or map) and then check that the entry_schema points to one of the first two cases (is that it?) + */ + private boolean checkDataType(Map theSpec, CheckContext theContext) { + + if (!checkType(Construct.Data, theSpec, theContext)) { + return false; + } + + String type = (String) theSpec.get("type"); + if (/*isCollectionType(type)*/ + "list".equals(type) || "map".equals(type)) { + Map entrySchema = (Map) theSpec.get("entry_schema"); + if (entrySchema == null) { + //maybe issue a warning ?? or is 'string' the default?? + return true; + } + + if (!catalog.hasType(Construct.Data, (String) entrySchema.get("type"))) { + theContext.addError("Unknown entry_schema type: " + entrySchema, null); + return false; + } + } + return true; + } + + /* Check that a particular facet (properties, attributes) of a construct type + * (node type, capability type, etc) is correctly (consistenly) defined + * across a type hierarchy + */ + private boolean checkTypeConstructFacet(Construct theConstruct, + String theTypeName, + Map theTypeSpec, + Facet theFacet, + CheckContext theContext) { + Map<String, Map> defs = + (Map<String, Map>) theTypeSpec.get(theFacet.name()); + if (null == defs) { + return true; + } + + boolean res = true; + + //given that the type was cataloged there will be at least one entry + Iterator<Map.Entry<String, Map>> i = + catalog.hierarchy(theConstruct, theTypeName); + if (!i.hasNext()) { + theContext.addError( + "The type " + theTypeName + " needs to be cataloged before attempting 'checkTypeConstruct'", null); + return false; + } + i.next(); //skip self + while (i.hasNext()) { + Map.Entry<String, Map> e = i.next(); + Map<String, Map> superDefs = (Map<String, Map>) e.getValue() + .get(theFacet.name()); + if (null == superDefs) { + continue; + } + //this computes entries that appear on both collections but with different values, i.e. the re-defined properties + Map<String, MapDifference.ValueDifference<Map>> diff = Maps.difference(defs, superDefs).entriesDiffering(); + + for (Iterator<Map.Entry<String, MapDifference.ValueDifference<Map>>> di = diff.entrySet().iterator(); di.hasNext(); ) { + Map.Entry<String, MapDifference.ValueDifference<Map>> de = di.next(); + MapDifference.ValueDifference<Map> dediff = de.getValue(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {}: {} has been re-defined between the {} types {} and {}", theConstruct, theFacet, de.getKey(), theConstruct, e.getKey(), theTypeName); + //for now we just check that the type is consistenly re-declared + if (!this.catalog.isDerivedFrom(theFacet.construct(), + (String) dediff.leftValue().get("type"), + (String) dediff.rightValue().get("type"))) { + theContext.addError( + theConstruct + TYPE + theFacet + ", redefiniton changed its type: " + de.getKey() + " has been re-defined between the " + theConstruct + " types " + e.getKey() + " and " + theTypeName + " in an incompatible manner", null); + res = false; + } + } + } + + return res; + } + + /* + * Checks the validity of a certain facet of a construct + * (properties of a node) across a type hierarchy. + * For now the check is limited to a verifying that a a facet was declared + * somewhere in the construct type hierarchy (a node template property has + * been declared in the node type hierarchy). + * + * 2 versions with the more generic allowing the specification of the type + * to be done explicitly. + */ + private boolean checkFacet(Construct theConstruct, + Map theSpec, + Facet theFacet, + CheckContext theContext) { + return checkFacet(theConstruct, theSpec, null, theFacet, theContext); + } + + /** + * We walk the hierarchy and verify the assignment of a property with respect to its definition. + * We also collect the names of those properties defined as required but for which no assignment was provided. + */ + private boolean checkFacet(Construct theConstruct, + Map theSpec, + String theSpecType, + Facet theFacet, + CheckContext theContext) { + + Map<String, Map> defs = (Map<String, Map>) theSpec.get(theFacet.name()); + if (null == defs) { + return true; + } + defs = Maps.newHashMap(defs); // + + boolean res = true; + if (theSpecType == null) { + theSpecType = (String) theSpec.get("type"); + } + if (theSpecType == null) { + theContext.addError("No specification type available", null); + return false; + } + + Map<String, Byte> missed = new HashMap<>(); //keeps track of the missing required properties, the value is + //false if a default was found along the hierarchy + Iterator<Map.Entry<String, Map>> i = + catalog.hierarchy(theConstruct, theSpecType); + while (i.hasNext() && !defs.isEmpty()) { + Map.Entry<String, Map> type = i.next(); + + Map<String, Map> typeDefs = (Map<String, Map>) type.getValue() + .get(theFacet.name()); + if (null == typeDefs) { + continue; + } + + MapDifference<String, Map> diff = Maps.difference(defs, typeDefs); + + //this are the ones this type and the spec have in common (same key, + //different values) + Map<String, MapDifference.ValueDifference<Map>> facetDefs = + diff.entriesDiffering(); + //TODO: this assumes the definition of the facet is not cumulative, i.e. + //subtypes 'add' something to the definition provided by the super-types + //it considers the most specialized definition stands on its own + for (MapDifference.ValueDifference<Map> valdef : facetDefs.values()) { + checkDataValuation(valdef.leftValue(), valdef.rightValue(), theContext); + } + + //remove from properties all those that appear in this type: unfortunately this returns an unmodifiable map .. + defs = Maps.newHashMap(diff.entriesOnlyOnLeft()); + } + + if (!defs.isEmpty()) { + theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + defs, null); + res = false; + } + + if (!missed.isEmpty()) { + List missedNames = + missed.entrySet() + .stream() + .filter(e -> e.getValue().byteValue() == (byte) 1) + .map(e -> e.getKey()) + .collect(Collectors.toList()); + if (!missedNames.isEmpty()) { + theContext.addError(theConstruct + " " + theFacet + " missing required values for: " + missedNames, null); + res = false; + } + } + + return res; + } + + /* Augmentation occurs in cases such as the declaration of capabilities within a node type. + * In such cases the construct facets (the capabilitity's properties) can redefine (augment) the + * specification found in the construct type. + */ + private boolean checkFacetAugmentation(Construct theConstruct, + Map theSpec, + Facet theFacet, + CheckContext theContext) { + return checkFacetAugmentation(theConstruct, theSpec, null, theFacet, theContext); + } + + private boolean checkFacetAugmentation(Construct theConstruct, + Map theSpec, + String theSpecType, + Facet theFacet, + CheckContext theContext) { + + Map<String, Map> augs = (Map<String, Map>) theSpec.get(theFacet.name()); + if (null == augs) { + return true; + } + + boolean res = true; + if (theSpecType == null) { + theSpecType = (String) theSpec.get("type"); + } + if (theSpecType == null) { + theContext.addError("No specification type available", null); + return false; + } + + for (Iterator<Map.Entry<String, Map>> ai = augs.entrySet().iterator(); ai.hasNext(); ) { + Map.Entry<String, Map> ae = ai.next(); + + //make sure it was declared by the type + Map facetDef = catalog.getFacetDefinition(theConstruct, theSpecType, theFacet, ae.getKey()); + if (facetDef == null) { + theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + ae.getKey(), null); + res = false; + continue; + } + + //check the compatibility of the augmentation: only the type cannot be changed + //can the type be changed in a compatible manner ?? + if (!facetDef.get("type").equals(ae.getValue().get("type"))) { + theContext.addError(theConstruct + " " + theFacet + " " + ae.getKey() + " has a different type than its definition: " + ae.getValue().get("type") + " instead of " + facetDef.get("type"), null); + res = false; + continue; + } + + //check any valuation (here just defaults) + Object defaultValue = ae.getValue().get(DEFAULT); + if (defaultValue != null) { + checkDataValuation(defaultValue, ae.getValue(), theContext); + } + } + + return res; + } + + private boolean catalogTypes(Construct theConstruct, Map<String, Map> theTypes, CheckContext theContext) { + + boolean res = true; + for (Map.Entry<String, Map> typeEntry : theTypes.entrySet()) { + res &= catalogType(theConstruct, typeEntry.getKey(), typeEntry.getValue(), theContext); + } + + return res; + } + + private boolean catalogType(Construct theConstruct, + String theName, + Map theDef, + CheckContext theContext) { + + if (!catalog.addType(theConstruct, theName, theDef)) { + theContext.addError(theConstruct + TYPE + theName + " re-declaration", null); + return false; + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {} has been cataloged", theConstruct, theName); + + String parentType = (String) theDef.get("derived_from"); + if (parentType != null && !catalog.hasType(theConstruct, parentType)) { + theContext.addError( + theConstruct + TYPE + theName + " indicates a supertype that has not (yet) been declared: " + parentType, null); + return false; + } + return true; + } + + private boolean checkTypeReference(Construct theConstruct, + CheckContext theContext, + String... theTypeNames) { + boolean res = true; + for (String typeName : theTypeNames) { + if (!isTypeReference(theConstruct, typeName)) { + theContext.addError("Reference to " + theConstruct + " type '" + typeName + "' points to unknown type", null); + res = false; + } + } + return res; + } + + private boolean isTypeReference(Construct theConstruct, + String theTypeName) { + return this.catalog.hasType(theConstruct, theTypeName); + } + + /* node or relationship templates */ + private boolean checkTemplateReference(Construct theConstruct, + CheckContext theContext, + String... theTemplateNames) { + boolean res = true; + for (String templateName : theTemplateNames) { + if (!isTemplateReference(theConstruct, theContext, templateName)) { + theContext.addError("Reference to " + theConstruct + " template '" + templateName + "' points to unknown template", null); + res = false; + } + } + return res; + } + + private boolean isTemplateReference(Construct theConstruct, + CheckContext theContext, + String theTemplateName) { + return this.catalog.hasTemplate(theContext.target(), theConstruct, theTemplateName); + } + + /* + * For inputs/properties/attributes/(parameters). It is the caller's + * responsability to provide the value (from a 'default', inlined, ..) + * + * @param theDef the definition of the given construct/facet as it appears in + * its enclosing type definition. + * @param + */ + private boolean checkDataValuation(Object theExpr, + Map<String, ?> theDef, + CheckContext theContext) { + //first check if the expression is a function, if not handle it as a value assignment + Data.Function f = Data.function(theExpr); + if (f != null) { + return f.evaluator() + .eval(theExpr, theDef, theContext); + } else { + Data.Type type = Data.typeByName((String) theDef.get("type")); + if (type != null) { + Data.Evaluator evaluator; + + evaluator = type.evaluator(); + if (evaluator == null) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No value evaluator available for type {}", type); + } else { + if ((theExpr != null) && (!evaluator.eval(theExpr, theDef, theContext))) { + return false; + } + } + + + evaluator = type.constraintsEvaluator(); + if (evaluator == null) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No constraints evaluator available for type {}", type); + } else { + if (theExpr != null) { + if (!evaluator.eval(theExpr, theDef, theContext)) { + return false; + } + } else { + //should have a null value validatorT + } + } + + return true; + } else { + theContext.addError("Expression " + theExpr + " of " + theDef + " could not be evaluated", null); + return false; + } + } + } + + /** + * Given the type of a certain construct (node type for example), look up + * in one of its facets (properties, capabilities, ..) for one of the given + * facet type (if looking in property, one of the given data type). + * + * @return a map of all facets of the given type, will be empty to signal + * none found + * <p> + * Should we look for a facet construct of a compatible type: any type derived + * from the given facet's construct type?? + */ + private Map<String, Map> + findTypeFacetByType(Construct theTypeConstruct, + String theTypeName, + Facet theFacet, + String theFacetType) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType {}, {}: {} {}", theTypeName, theTypeConstruct, theFacetType, theFacet); + Map<String, Map> res = new HashMap<>(); + Iterator<Map.Entry<String, Map>> i = + catalog.hierarchy(theTypeConstruct, theTypeName); + while (i.hasNext()) { + Map.Entry<String, Map> typeSpec = i.next(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", theTypeConstruct, typeSpec.getKey()); + Map<String, Map> typeFacet = + (Map<String, Map>) typeSpec.getValue().get(theFacet.name()); + if (typeFacet == null) { + continue; + } + Iterator<Map.Entry<String, Map>> fi = typeFacet.entrySet().iterator(); + while (fi.hasNext()) { + Map.Entry<String, Map> facet = fi.next(); + String facetType = (String) facet.getValue().get("type"); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", facet.getKey(), facetType); + + //here is the question: do we look for an exact match or .. + //now we check that the type has a capability of a type compatible + //(equal or derived from) the given capability type. + if (catalog.isDerivedFrom( + theFacet.construct(), facetType, theFacetType)) { + res.putIfAbsent(facet.getKey(), facet.getValue()); + } + } + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, found {}", res); + + return res; + } + + private Map<String, Object> + findTypeFacetByName(Construct theTypeConstruct, + String theTypeName, + Facet theFacet, + String theFacetName) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName {} {}", theTypeConstruct, theTypeName); + Iterator<Map.Entry<String, Map>> i = + catalog.hierarchy(theTypeConstruct, theTypeName); + while (i.hasNext()) { + Map.Entry<String, Map> typeSpec = i.next(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName, Checking {} type {}", theTypeConstruct, typeSpec.getKey()); + Map<String, Map> typeFacet = + (Map<String, Map>) typeSpec.getValue().get(theFacet.name()); + if (typeFacet == null) { + continue; + } + Map<String, Object> facet = typeFacet.get(theFacetName); + if (facet != null) { + return facet; + } + } + return null; + } + + /* Requirements are the odd ball as they are structured as a sequence .. */ + private Map<String, Map> findNodeTypeRequirementByName( + String theNodeType, String theRequirementName) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName {}/{}", theNodeType, theRequirementName); + Iterator<Map.Entry<String, Map>> i = + catalog.hierarchy(Construct.Node, theNodeType); + while (i.hasNext()) { + Map.Entry<String, Map> nodeType = i.next(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName, Checking node type {}", nodeType.getKey()); + List<Map<String, Map>> nodeTypeRequirements = + (List<Map<String, Map>>) nodeType.getValue().get(REQUIREMENTS); + if (nodeTypeRequirements == null) { + continue; + } + + for (Map<String, Map> requirement : nodeTypeRequirements) { + Map requirementDef = requirement.get(theRequirementName); + if (requirementDef != null) { + return requirementDef; + } + } + } + return null; + } + + /* + * Additional generics checks to be performed on any definition: construct, + * construct types, etc .. + */ + public boolean checkDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + if (theDefinition == null) { + theContext.addError("Missing definition for " + theName, null); + return false; + } + + if (theDefinition.isEmpty()) { + theContext.addError("Empty definition for " + theName, null); + return false; + } + + return true; + } + + private boolean checkDefinition(String theName, + List theDefinition, + CheckContext theContext) { + if (theDefinition == null) { + theContext.addError("Missing definition for " + theName, null); + return false; + } + + if (theDefinition.isEmpty()) { + theContext.addError("Empty definition for " + theName, null); + return false; + } + + return true; + } + + /* plenty of one entry maps around */ + private Map.Entry mapEntry(Object theMap) { + return (Map.Entry) ((Map) theMap).entrySet().iterator().next(); + } + + /** + * Given that we remembered the canonical forms that were needed during + * validation to replace the short forms we can apply them to the target + * yaml. + * We take advantage here of the fact that the context path maintained + * during validation is compatible with (j)xpath, with the exception of + * sequence/array indentation .. + */ + + private String patchIndexes(CharSequence thePath) { + Matcher m = indexPattern.matcher(thePath); + StringBuffer path = new StringBuffer(); + while (m.find()) { + String index = m.group(); + index = "[" + (Integer.valueOf(index.substring(1)).intValue() + 1) + "]"; + m.appendReplacement(path, Matcher.quoteReplacement(index)); + } + m.appendTail(path); + return path.toString(); + } + + private String patchWhitespaces(String thePath) { + String[] elems = thePath.split("/"); + StringBuffer path = new StringBuffer(); + for (int i = 0; i < elems.length; i++) { + if (spacePattern.matcher(elems[i]).find()) { + path.append("[@name='") + .append(elems[i]) + .append("']"); + } else { + path.append("/") + .append(elems[i]); + } + } + return path.toString(); + } + + private void applyCanonicals(Object theTarget, + Map<String, Object> theCanonicals) { + if (theCanonicals.isEmpty()) { + return; + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "applying canonicals: {}", theCanonicals); + applyCanonicals(theTarget, theCanonicals, "/", false); + } + + /* + * applies canonicals selectively + */ + private void applyCanonicals(Object theTarget, + Map<String, Object> theCanonicals, + String thePrefix, + boolean doRemove) { + + JXPathContext jxPath = JXPathContext.newContext(theTarget); + for (Iterator<Map.Entry<String, Object>> ces = + theCanonicals.entrySet().iterator(); + ces.hasNext(); ) { + Map.Entry<String, Object> ce = ces.next(); + //should we check prefix before or after normalization ?? + String path = ce.getKey(); + if (path.startsWith(thePrefix)) { + path = patchWhitespaces( + patchIndexes(path)); + try { + jxPath.setValue(path, ce.getValue()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Applied canonical form (prefix '{}') at: {}", thePrefix, path); + + if (doRemove) { + ces.remove(); + } + } catch (JXPathException jxpx) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to apply canonical to {} {}", theTarget, jxpx); + } + } + } + } + + /* + * commons are built-in and supposed to be bulletproof so any error in here + * goes out loud. + */ + private static Catalog commonsCatalog() { + + synchronized (Catalog.class) { + + if (commonsCatalogInstance != null) { + return commonsCatalogInstance; + } + + //if other templates are going to be part of the common type system + //add them to this list. order is relevant. + final String[] commons = new String[]{ + "tosca/tosca-common-types.yaml"}; + + Checker commonsChecker; + try { + commonsChecker = new Checker(); + + for (String common : commons) { + commonsChecker.check(common, buildCatalog(false)); + Report commonsReport = commonsChecker.targets().iterator().next().getReport(); + + if (commonsReport.hasErrors()) { + throw new RuntimeException("Failed to process commons:\n" + + commonsReport); + } + } + } catch (CheckerException cx) { + throw new RuntimeException("Failed to process commons", cx); + } + commonsCatalogInstance = commonsChecker.catalog; + return commonsCatalogInstance; + } + } + + public static Catalog buildCatalog() { + return buildCatalog(true); + } + + private static Catalog buildCatalog(boolean doCommons) { + + Catalog catalog = new Catalog(doCommons ? commonsCatalog() : null); + if (!doCommons) { + //add core TOSCA types + for (Data.CoreType type : Data.CoreType.class.getEnumConstants()) { + catalog.addType(Construct.Data, type.toString(), Collections.emptyMap()); + } + } + return catalog; + } + + private boolean invokeHook(String theHookName, + Class[] theArgTypes, + Object... theArgs) { + + Invokable hookHandler = null; + try { + Method m = Checker.class.getDeclaredMethod( + theHookName, theArgTypes); + m.setAccessible(true); + hookHandler = Invokable.from(m); + } catch (NoSuchMethodException nsmx) { + //that's ok, not every rule has to have a handler + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "That's ok, not every rule has to have a handler. Method name =", theHookName); + } + + if (hookHandler != null) { + try { + hookHandler.invoke(this, theArgs); + } catch (InvocationTargetException | IllegalAccessException itx) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invocation failed for hook handler {} {}", theHookName, itx); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Hook handler failed {} {}", theHookName, x); + } + } + + return hookHandler != null; + } + + private void validationHook(String theTiming, + Object theTarget, + Rule theRule, + Validator.ValidationContext theContext) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "looking up validation handler for {}, {} {}", theRule.getName(), theTiming, theContext.getPath()); + if (!invokeHook(theRule.getName() + "_" + theTiming + "_validation_handler", + validationHookArgTypes, + theTarget, theRule, theContext)) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "no validation handler for {}", theRule.getName() + "_" + theTiming); + } + } + + private void checks(String theName, + Object theTarget, + CheckContext theContext) { + Map<Method, Object> handlers = checks.row(/*theName*/theContext.getPath(theName)); + if (handlers != null) { + for (Map.Entry<Method, Object> handler : handlers.entrySet()) { + try { + handler.getKey().invoke(handler.getValue(), new Object[]{theTarget, theContext}); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Check {} with {} failed {}", theName, handler.getKey(), x); + } + } + } else { + boolean hasHook = false; + for (Class[] argTypes : checkHookArgTypes) { + hasHook |= invokeHook("check_" + theName, + argTypes, + theTarget, theContext); + //shouldn't we stop as soon as hasHook is true?? + } + + if (!hasHook) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "no check handler for {}", theName); + } + } + } + + private void catalogs(String theName, + Object theTarget, + CheckContext theContext) { + + Map<Method, Object> handlers = catalogs.row(/*theName*/theContext.getPath(theName)); + if (handlers != null) { + for (Map.Entry<Method, Object> handler : handlers.entrySet()) { + try { + handler.getKey().invoke(handler.getValue(), new Object[]{theTarget, theContext}); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Cataloging {} with {} failed {}", theName, handler.getKey(), x); + } + } + } + } + + private class TOSCAValidator extends Validator { + + //what were validating + private Target target; + + /* Some of the TOSCA entries accept a 'short form/notation' instead of the canonical map representation. + * kwalify cannot easily express these alternatives and as such we handle them here. In the pre-validation phase we detect the presence of a short notation +and compute the canonical form and validate it. In the post-validation phase we +substitute the canonical form for the short form so that checking does not have to deal with it. + */ + + private Map<String, Object> canonicals = new TreeMap<>(); + + TOSCAValidator(Target theTarget, Object theSchema) { + super(theSchema); + this.target = theTarget; + } + + public Target getTarget() { + return this.target; + } + + /* hook method called by Validator#validate() + */ + @Override + protected boolean preValidationHook(Object value, Rule rule, ValidationContext context) { + + validationHook("pre", value, rule, context); + //short form handling + String hint = rule.getShort(); + if (value != null && + hint != null) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Attempting canonical at {}, rule {}", context.getPath(), rule.getName()); + + Object canonical = null; + //if the canonical form requires a collection + if (Types.isCollectionType(rule.getType())) { + //and the actual value isn't one + if (!(value instanceof Map || value instanceof List)) { + //used to use singleton map/list here (was good for catching errors) + //but there is the possibility if short forms within short forms so + //the created canonicals need to accomodate other values. + if (Types.isMapType(rule.getType())) { + canonical = new HashMap(); + ((Map) canonical).put(hint, value); + } else { + //the hint is irrelevant here but we should impose a value when the target is a list + canonical = new LinkedList(); + ((List) canonical).add(value); + } + } else { + //we can accomodate: + // map to list of map transformation + if (!Types.isMapType(rule.getType()) /* a seq */ && + value instanceof Map) { + canonical = new LinkedList(); + ((List) canonical).add(value); + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType()); + return false; + } + } + + int errc = context.errorCount(); + validateRule(canonical, rule, context); + if (errc != context.errorCount()) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} failed validation", rule.getName(), hint, context.getPath()); + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} passed validation. Canonical form is {}", rule.getName(), hint, context.getPath(), canonical); + //replace the short notation with the canonicall one so we don't + //have to deal it again during checking + this.canonicals.put(context.getPath(), canonical); + return true; + } + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType()); + } + } + + //perform default validation process + return false; + } + + /* + * Only gets invoked once the value was succesfully verified against the syntax indicated by the given rule. + */ + @Override + protected void postValidationHook(Object value, + Rule rule, + ValidationContext context) { + validationHook("post", value, rule, context); + } + + } + + /** + * Maintains state across the checking process. + */ + public class CheckContext { + + private Target target; + private ArrayList<String> elems = new ArrayList<>(10); + private ArrayList<Construct> constructs = new ArrayList<>(10); + + CheckContext(Target theTarget) { + this.target = theTarget; + } + + public CheckContext enter(String theName) { + return enter(theName, null); + } + + public CheckContext enter(String theName, Construct theConstruct) { + this.elems.add(theName); + this.constructs.add(theConstruct); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering check {} {}", theName, getPath()); + return this; + } + + public CheckContext exit() { + String path = getPath(); + String name = this.elems.remove(this.elems.size() - 1); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "exiting check {} {}", name, path); + this.constructs.remove(this.constructs.size() - 1); + return this; + } + + public String getPath() { + return buildPath(null); + } + + String getPath(String theNextElem) { + return buildPath(theNextElem); + } + + String buildPath(String theElem) { + StringBuilder sb = new StringBuilder(); + for (String e : this.elems) { + sb.append(e) + .append("/"); + } + if (theElem != null) { + sb.append(theElem) + .append("/"); + } + + return sb.substring(0, sb.length() - 1); + } + + public String enclosingConstruct(Construct theConstruct) { + for (int i = this.constructs.size() - 1; i > 0; i--) { + Construct c = this.constructs.get(i); + if (c != null && c.equals(theConstruct)) { + return this.elems.get(i); + } + } + return null; + } + + public CheckContext addError(String theMessage, Throwable theCause) { + this.target.report(new TargetError("", getPath(), theMessage, theCause)); + return this; + } + + public Checker checker() { + return Checker.this; + } + + public Catalog catalog() { + return Checker.this.catalog; + } + + public Target target() { + return this.target; + } + + public String toString() { + return "CheckContext(" + this.target.getLocation() + "," + getPath() + ")"; + } + } + + // -------------------------------------------------------------------------------------------------- // + + private String errorReport(List<Throwable> theErrors) { + StringBuilder sb = new StringBuilder(theErrors.size() + " errors"); + for (Throwable x : theErrors) { + sb.append("\n"); + if (x instanceof ValidationException) { + ValidationException vx = (ValidationException) x; + // .apend("at ") + // .append(error.getLineNumber()) + // .append(" : ") + sb.append("[").append(vx.getPath()).append("] "); + } else if (x instanceof TargetError) { + TargetError tx = (TargetError) x; + sb.append("[").append(tx.getLocation()).append("] "); + } + sb.append(x.getMessage()); + if (x.getCause() != null) { + sb.append("\n\tCaused by:\n").append(x.getCause()); + } + } + sb.append("\n"); + return sb.toString(); + } + + protected void range_definition_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering range_definition {}", + theContext.getPath()); + + assert theRule.getType().equals("seq"); + List bounds = (List) theValue; + + if (bounds.size() != 2) { + theContext.addError("Too many values in bounds specification", theRule, theValue, null); + return; + } + + try { + Double.parseDouble(bounds.get(0).toString()); + } catch (NumberFormatException nfe) { + theContext.addError("Lower bound not a number", theRule, theValue, null); + } + + try { + Double.parseDouble(bounds.get(1).toString()); + } catch (NumberFormatException nfe) { + if (!"UNBOUNDED".equals(bounds.get(1).toString())) { + theContext.addError("Upper bound not a number or 'UNBOUNDED'", theRule, theValue, null); + } + } + + } + + /* + * early processing (validation time) of the imports allows us to catalog + * their types before those declared in the main document. + */ + protected void imports_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering imports {}", theContext.getPath()); + assert theRule.getType().equals("seq"); + + Target tgt = ((TOSCAValidator) theContext.getValidator()).getTarget(); + + applyCanonicals(tgt.getTarget(), ((TOSCAValidator) theContext.getValidator()).canonicals, "/imports", true); + + for (ListIterator li = ((List) theValue).listIterator(); li.hasNext();) { + + Map.Entry importEntry = mapEntry(li.next()); + + Map def = (Map) importEntry.getValue(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Processing import {}", def); + + String tfile = (String) def.get("file"); + Target tgti = this.locator.resolve(tfile); + if (tgti == null) { + theContext.addError("Failure to resolve import '" + def + "', imported from " + tgt, theRule, null, + null); + continue; + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Import {} located at {}", def, + tgti.getLocation()); + + if (this.catalog.addTarget(tgti, tgt)) { + // we've never seen this import (location) before + try { + + List<Target> tgtis = parseTarget(tgti); + if (tgtis.isEmpty()) + continue; + + if (tgtis.size() > 1) { + theContext.addError( + "Import '" + tgti + "', imported from " + tgt + ", contains multiple yaml documents", + theRule, null, null); + continue; + } + + tgti = tgtis.get(0); + + // tgti = parseTarget(tgti); + if (tgt.getReport().hasErrors()) { + theContext.addError("Failure parsing import '" + tgti + "',imported from " + tgt, theRule, null, + null); + continue; + } + + validateTarget(tgti); + if (tgt.getReport().hasErrors()) { + theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, + null, null); + continue; + } + } catch (CheckerException cx) { + theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, cx, + null); + } + } + + // replace with the actual location (also because this is what they + // get + // index by .. bad, this exposed catalog inner workings) + + def.put("file", tgti.getLocation()); + } + } + + protected void node_templates_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering node_templates_post_validation_handler {}", + theContext.getPath()); + assert theRule.getType().equals("map"); + Map<String, Map> nodeTemplates = (Map<String, Map>) theValue; + for (Iterator<Map.Entry<String, Map>> i = nodeTemplates.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> node = i.next(); + try { + catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Node, + node.getKey(), node.getValue()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Node template {} has been cataloged", + node.getKey()); + } catch (CatalogException cx) { + theContext.addError(cx.toString(), theRule, node, null); + } + } + } + + protected void inputs_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering inputs_post_validation_handler {}", + theContext.getPath()); + assert theRule.getType().equals("map"); + + // we'll repeat this test during checking but because we index inputs + // early + // we need it here too + if (theValue == null) { + return; + } + + Map<String, Map> inputs = (Map<String, Map>) theValue; + for (Iterator<Map.Entry<String, Map>> i = inputs.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> input = i.next(); + try { + catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Data, + input.getKey(), input.getValue()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Input {} has been cataloged", + input.getKey()); + } catch (CatalogException cx) { + theContext.addError(cx.toString(), theRule, input, null); + } + } + } + + private void process(String theProcessorSpec) throws CheckerException { + + String[] spec = theProcessorSpec.split(" "); + if (spec.length == 0) + throw new IllegalArgumentException("Incomplete processor specification"); + + Class processorClass = null; + try { + processorClass = Class.forName(spec[0]); + } catch (ClassNotFoundException cnfx) { + throw new CheckerException("Cannot find processor implementation", cnfx); + } + + Processor proc = null; + try { + proc = (Processor) ConstructorUtils.invokeConstructor(processorClass, + Arrays.copyOfRange(spec, 1, spec.length)); + } catch (Exception x) { + throw new CheckerException("Cannot instantiate processor", x); + } + + process(proc); + } + + protected void check_artifact_definition(String theName, Map theDef, CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + // check artifact type + if (!checkType(Construct.Artifact, theDef, theContext)) + return; + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_policy_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Policy); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Policy, theName, theDefinition, Facet.properties, theContext); + } + + // the targets can be known node types or group types + List<String> targets = (List<String>) theDefinition.get("targets"); + if (targets != null) { + if (checkDefinition("targets", targets, theContext)) { + for (String target : targets) { + if (!(this.catalog.hasType(Construct.Node, target) + || this.catalog.hasType(Construct.Group, target))) { + theContext.addError( + "The 'targets' entry must contain a reference to a node type or group type, '" + + target + "' is none of those", + null); + } + } + } + } + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_group_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Group); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Group, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("targets")) { + checkTypeReference(Construct.Node, theContext, + ((List<String>) theDefinition.get("targets")).toArray(EMPTY_STRING_ARRAY)); + } + + // interfaces + Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces"); + if (interfaces != null) { + try { + theContext.enter("interfaces"); + for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> e = i.next(); + check_type_interface_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_node_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_properties((Map<String, Map>) theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.attributes, theContext); + } + + // requirements + if (theDefinition.containsKey("requirements")) { + check_requirements((List<Map>) theDefinition.get("requirements"), theContext); + } + + // capabilities + if (theDefinition.containsKey("capabilities")) { + check_capabilities((Map<String, Map>) theDefinition.get("capabilities"), theContext); + } + + // interfaces: + Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces"); + if (interfaces != null) { + try { + theContext.enter("interfaces"); + for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> e = i.next(); + check_type_interface_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + // artifacts + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_interface_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Interface); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + // not much else here: a list of operation_definitions, each with + // its + // implementation and inputs + + // check that common inputs are re-defined in a compatible manner + + // check that the interface operations are overwritten in a + // compatible manner + // for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet() + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_artifact_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_relationship_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_properties((Map<String, Map>) theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.attributes, theContext); + } + + Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces"); + if (interfaces != null) { + theContext.enter("interfaces"); + for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> e = i.next(); + check_type_interface_definition(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + if (theDefinition.containsKey("valid_target_types")) { + checkTypeReference(Construct.Capability, theContext, + ((List<String>) theDefinition.get("valid_target_types")).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_capability_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_attributes((Map<String, Map>) theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.attributes, theContext); + } + + // valid_source_types: see capability_type_definition + // unclear: how is the valid_source_types list definition eveolving + // across + // the type hierarchy: additive, overwriting, ?? + if (theDefinition.containsKey("valid_source_types")) { + checkTypeReference(Construct.Node, theContext, + ((List<String>) theDefinition.get("valid_source_types")).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_data_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Data); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Data, theName, theDefinition, Facet.properties, theContext); + } + } finally { + theContext.exit(); + } + } + + /* + * top level rule, we collected the whole information set. this is where + * checking starts + */ + protected void check_service_template_definition(Map<String, Object> theDef, CheckContext theContext) { + theContext.enter(""); + + if (theDef == null) { + theContext.addError("Empty template", null); + return; + } + + // !!! imports need to be processed first now that catalogging takes + // place at check time!! + + // first catalog whatever it is there to be cataloged so that the checks + // can perform cross-checking + for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); ri.hasNext();) { + Map.Entry<String, Object> e = ri.next(); + catalogs(e.getKey(), e.getValue(), theContext); + } + + for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); ri.hasNext();) { + Map.Entry<String, Object> e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + protected void check_attribute_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + if (!checkDataType(theDefinition, theContext)) { + return; + } + } finally { + theContext.exit(); + } + } + + public void check_attributes(Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter("attributes"); + try { + if (!checkDefinition("attributes", theDefinitions, theContext)) + return; + + for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> e = i.next(); + check_attribute_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + protected void check_property_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + // check the type + if (!checkDataType(theDefinition, theContext)) { + return; + } + // check default value is compatible with type + Object defaultValue = theDefinition.get("default"); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDefinition, theContext); + } + + theContext.exit(); + } + + public void check_properties(Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter("properties"); + try { + if (!checkDefinition("properties", theDefinitions, theContext)) + return; + + for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> e = i.next(); + check_property_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CheckerException.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CheckerException.java new file mode 100644 index 0000000..1963c28 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CheckerException.java @@ -0,0 +1,18 @@ +package org.onap.sdc.dcae.checker; + + +/** + * A checker exception represents an error that stops the checker from + * completing its task. + */ +public class CheckerException extends Exception { + + public CheckerException(String theMsg, Throwable theCause) { + super(theMsg, theCause); + } + + public CheckerException(String theMsg) { + super(theMsg); + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java new file mode 100644 index 0000000..295a1f2 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java @@ -0,0 +1,144 @@ +package org.onap.sdc.dcae.checker; + +import java.io.InputStream; +import java.io.IOException; + +import java.net.URL; +import java.net.URI; +import java.net.URISyntaxException; + +import java.nio.file.Paths; + +import java.util.Set; +import java.util.LinkedHashSet; + +import com.google.common.collect.Iterables; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + + +public class CommonLocator implements TargetLocator { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private Set<URI> searchPaths = new LinkedHashSet(); + + /* will create a locator with 2 default search paths: the file directory + * from where the app was and the jar from which this checker (actually this + * class) was loaded */ + public CommonLocator() { + addSearchPath( + Paths.get(".").toAbsolutePath().normalize().toUri()); + } + + public CommonLocator(String... theSearchPaths) { + for (String path: theSearchPaths) { + addSearchPath(path); + } + } + + public boolean addSearchPath(URI theURI) { + + if (!theURI.isAbsolute()) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Search paths must be absolute uris: {}", theURI); + return false; + } + + return searchPaths.add(theURI); + } + + public boolean addSearchPath(String thePath) { + URI suri = null; + try { + suri = new URI(thePath); + } + catch(URISyntaxException urisx) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid search path: {} {}", thePath, urisx); + return false; + } + + return addSearchPath(suri); + } + + public Iterable<URI> searchPaths() { + return Iterables.unmodifiableIterable(this.searchPaths); + } + + /** + * Takes the given path and first URI resolves it and then attempts to open + * it (a way of verifying its existence) against each search path and stops + * at the first succesful test. + */ + public Target resolve(String theName) { + URI puri = null; + InputStream pis = null; + + //try classpath + URL purl = getClass().getClassLoader().getResource(theName); + if (purl != null) { + try { + return new Target(theName, purl.toURI()); + } + catch (URISyntaxException urisx) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "The file {} wasn't found {}", theName, urisx); + } + } + + //try absolute + try { + puri = new URI(theName); + if (puri.isAbsolute()) { + try { + pis = puri.toURL().openStream(); + } + catch (IOException iox) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "The path {} is an absolute uri but it cannot be opened {}", theName, iox); + return null; + } + } + } + catch(URISyntaxException urisx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, urisx); + //keep it silent but what are the chances .. + } + + //try relative to the search paths + for (URI suri: searchPaths) { + try { + puri = suri.resolve(theName); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver trying {}", puri); + pis = puri.toURL().openStream(); + return new Target(theName, puri.normalize()); + } + catch (Exception x) { + debugLogger.log(LogLevel.ERROR, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, x); + continue; + } + finally { + if (pis!= null) { + try { + pis.close(); + } + catch (IOException iox) { + } + } + } + } + + return null; + } + + public String toString() { + return "CommonLocator(" + this.searchPaths + ")"; + } + + + public static void main(String[] theArgs) { + TargetLocator tl = new CommonLocator(); + tl.addSearchPath(java.nio.file.Paths.get("").toUri()); + tl.addSearchPath("file:///"); + debugLogger.log(LogLevel.DEBUG, CommonLocator.class.getName(), tl.resolve(theArgs[0]).toString()); + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java new file mode 100644 index 0000000..b05cff9 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java @@ -0,0 +1,22 @@ +package org.onap.sdc.dcae.checker; + +/* + * What exactly is allowed to go in here is a subject of meditation :) I would have said 'elements with a type' but + * that will no cover Requirement and Workflow, or topology template top elements but won't cover others .. + * + * Properties/Attributes/Inputs/Outputs are just Data constructs under a particular name. + */ +public enum Construct { + Data, + Requirement, + Capability, + Relationship, + Artifact, + Interface, + Node, + Group, + Policy, + Workflow +} + + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java new file mode 100644 index 0000000..70552bb --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java @@ -0,0 +1,895 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Collection; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.HashMap; +import java.util.Iterator; +import java.util.EnumSet; + +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +/* + * String -- 'primitive tosca type' converters, used in verifying valuations + */ +public class Data { + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private Data() { + } + + /* + */ + @FunctionalInterface + public static interface Evaluator { + + public boolean eval(Object theExpr, Map theDef, Checker.CheckContext theCtx); + } + + + /* data type processing */ + + private static Map<String,Type> typesByName = new HashMap<String,Type>(); + static { + //CoreType.String.toString(); + //CoreFunction.concat.toString(); + //Constraint.equal.toString(); + } + + + public static Data.Type typeByName(String theName) { + return typesByName.getOrDefault(theName, userType); + } +/* + public static Evaluator getTypeEvaluator(Type theType) { + } +*/ + + /* Needs a better name ?? RValue?? + * This is not an rvalue (C def) per se but the construct who's instances + * yield rvalues. It is a construct that yields data, not the data (yield) + * itself. + */ + public static interface Type { + + public String name(); + + public Evaluator evaluator(); + + public Evaluator constraintsEvaluator(); + } + + /* generic placeholder + */ + private static Type userType = new Type() { + + public String name() { + return null; + } + + public Evaluator evaluator() { + return Data::evalUser; + } + + public Evaluator constraintsEvaluator() { + return Data::evalUserConstraints; + } + }; + + + public static enum CoreType implements Type { + + String("string", + (expr,def,ctx) -> expr != null && expr instanceof String, + Data::evalScalarConstraints), + Integer("integer", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Integer.class), + Data::evalScalarConstraints), + Float("float", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Double.class, Integer.class), + Data::evalScalarConstraints), + Boolean("boolean", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Boolean.class), + Data::evalScalarConstraints), + Null("null", + (expr,def,ctx) -> expr.equals("null"), + null), + Timestamp("timestamp", + (expr,def,ctx) -> timestampRegex.matcher(expr.toString()).matches(), + null), + List("list", Data::evalList, Data::evalListConstraints), + Map("map", Data::evalMap, Data::evalMapConstraints), + Version("version", + (expr,def,ctx) -> versionRegex.matcher(expr.toString()).matches(), + null), + /* use a scanner and check that the upper bound is indeed greater than + * the lower bound */ + Range("range", + (expr,def,ctx) -> { return rangeRegex.matcher(expr.toString()).matches();}, + null ), + Size("scalar-unit.size", + (expr,def,ctx) -> sizeRegex.matcher(expr.toString()).matches(), + null), + Time("scalar-unit.time", + (expr,def,ctx) -> timeRegex.matcher(expr.toString()).matches(), + null), + Frequency("scalar-unit.frequency", + (expr,def,ctx) -> frequencyRegex.matcher(expr.toString()).matches(), + null); + + + private String toscaName; + private Evaluator valueEvaluator, + constraintsEvaluator; + + private CoreType(String theName, Evaluator theValueEvaluator, Evaluator theConstraintsEvaluator) { + this.toscaName = theName; + this.valueEvaluator = theValueEvaluator; + this.constraintsEvaluator = theConstraintsEvaluator; + + if (typesByName == null) + throw new RuntimeException("No type index available!"); + + typesByName.put(this.toscaName, this); + } + + public String toString() { + return this.toscaName; + } + + public Evaluator evaluator() { + return this.valueEvaluator; + } + + public Evaluator constraintsEvaluator() { + return this.constraintsEvaluator; + } + } + + private static Pattern timestampRegex = null, + versionRegex = null, + rangeRegex = null, + sizeRegex = null, + timeRegex = null, + frequencyRegex = null; + + static { + try { + timestampRegex = Pattern.compile( + "\\p{Digit}+"); //?? where to find the definition + + //<major_version>.<minor_version>[.<fix_version>[.<qualifier>[-<build_version]]] + versionRegex = Pattern.compile( + "\\p{Digit}+\\.\\p{Digit}+?(\\.\\p{Digit}+(\\.\\p{Alpha}+(\\-\\p{Digit}+))*)*"); + + rangeRegex = Pattern.compile( + "\\[[ ]*\\p{Digit}+(\\.\\p{Digit}+)?[ ]*\\,[ ]*(\\p{Digit}+(\\.\\p{Digit}+)?|UNBOUNDED)[ ]*\\]"); + + sizeRegex = Pattern.compile( + "\\p{Digit}+(\\.\\p{Digit}+)?[ ]*(B|kB|KiB|MB|MiB|GB|GiB|TB|TiB)"); + + timeRegex = Pattern.compile( + "\\p{Digit}+(\\.\\p{Digit}+)?[ ]*(d|h|m|s|ms|us|ns)"); + + frequencyRegex = Pattern.compile( + "\\p{Digit}+(\\.\\p{Digit}+)?[ ]*(Hz|kHz|MHz|GHz)"); + } + catch (PatternSyntaxException psx) { + throw new RuntimeException("Bad patterns", psx); + } + } + + /* */ + public static boolean evalScalarConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + Data.Type type = typeByName((String)theDef.get("type")); + List<Map<String,Object>> constraints = + (List<Map<String,Object>>)theDef.get("constraints"); + if (constraints == null) { + return true; + } + + //check value against constraints + boolean res = true; + for (Map<String,Object> constraintDef: constraints) { + Map.Entry<String,Object> constraintEntry = + constraintDef.entrySet().iterator().next(); + Data.Constraint constraint = constraintByName(constraintEntry.getKey()); + +// the def passed here includes all constraints, not necessary! we can pass +// simple constraintEntry.getValue() + Evaluator constraintEvaluator = getTypeConstraintEvaluator(type, constraint); + if (constraintEvaluator == null) { + debugLogger.log(LogLevel.DEBUG, Data.class.getName(), "No constant evaluator available for {}/{}", type, constraint); + continue; + } + + if (!constraintEvaluator.eval(theVal, theDef, theCtx)) { + theCtx.addError("Value " + theVal + " failed constraint " + constraintEntry, null); + res = false; + } + } + return res; + } + + /* + * It assumes the specification is complete, i.e. it contains a valid + * entry_schema section. + * TODO: check constraints, i.e. entrySchema.get("constraints") + */ + public static boolean evalList(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + try { + return evalCollection((List)theVal, theDef, theCtx); + } + catch (ClassCastException ccx) { + theCtx.addError("Value " + theVal + " not a list", null); + return false; + } + } + + public static boolean evalMap(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + try { + return evalCollection(((Map)theVal).values(), theDef, theCtx); + } + catch (ClassCastException ccx) { + theCtx.addError("Value " + theVal + " not a map", null); + return false; + } + } + + + /** + * The elements of a collection can be of a core type or user defined type. + */ + private static boolean evalCollection(Collection theVals, + Map theDef, + Checker.CheckContext theCtx) { + Data.Type entryType = null; + Map entryTypeDef = (Map)theDef.get("entry_schema"); + if (null != entryTypeDef) + entryType = typeByName((String)entryTypeDef.get("type")); + + boolean res = true; + for (Object val: theVals) { + //check if the value is not a function call + Data.Function f = Data.function(val); + if (f != null && + f.evaluator().eval(val, entryTypeDef, theCtx)) { + res = false; + } + else if (entryType != null && + !entryType.evaluator().eval(val, entryTypeDef, theCtx)) { + res= false; + //the error should hav been reported by the particular evaluator + //theCtx.addError("Value " + val + " failed evaluation", null); + } + } + return res; + } + + public static boolean evalListConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + return evalCollectionConstraints((List)theVal, theDef, theCtx); + } + + public static boolean evalMapConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + return evalCollectionConstraints(((Map)theVal).values(), theDef, theCtx); + } + + private static boolean evalCollectionConstraints(Collection theVals, + Map theDef, + Checker.CheckContext theCtx) { + //should check overall constraints + + if (theVals == null) + return true; + + Map entryTypeDef = (Map)theDef.get("entry_schema"); + if (null == entryTypeDef) + return true; + + String entryTypeName = (String)entryTypeDef.get("type"); + Data.Type entryType = typeByName(entryTypeName); + + boolean res = true; + for (Object val: theVals) { + Evaluator entryEvaluator = entryType.constraintsEvaluator(); + if (entryEvaluator != null && + !entryEvaluator.eval(val, entryTypeDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error, but it also adds some context + //theCtx.addError("Value " + val + " failed evaluation", null); + } + } + return res; + } + + /* + * All required properties across the hierarchical defintion must be present + * TODO: The expr cannot contain any entry not specified in the type definition + */ + public static boolean evalUser(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + + boolean res = true; + Map val = (Map)theVal; + //must be done with respect to the super-type(s) definition + Iterator<Map.Entry> props = theCtx.catalog() + .facets(Construct.Data, + Facet.properties, + (String)theDef.get("type")); + while (props.hasNext()) { + Map.Entry propEntry = props.next(); + Map propDef = (Map)propEntry.getValue(); + Object propVal = val.get(propEntry.getKey()); + + if (propVal != null) { + Data.Type propType = typeByName((String)propDef.get("type")); + + if (!propType.evaluator().eval(propVal, propDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error + //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null); + } + } + } + return res; + } + + public static boolean evalUserConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + boolean res = true; + Map val = (Map)theVal; + Iterator<Map.Entry> props = theCtx.catalog() + .facets(Construct.Data, + Facet.properties, + (String)theDef.get("type")); + while (props.hasNext()) { + Map.Entry propEntry = props.next(); + Map propDef = (Map)propEntry.getValue(); + Object propVal = val.get(propEntry.getKey()); + + if (propVal != null) { + Data.Type propType = typeByName((String)propDef.get("type")); + + if (propType.constraintsEvaluator() != null && + !propType.constraintsEvaluator().eval(propVal, propDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error + //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null); + } + } + else { + if (Boolean.TRUE == (Boolean)propDef.getOrDefault("required", Boolean.FALSE) && + !propDef.containsKey("default")) { + theCtx.addError("Property " + propEntry.getKey() + " failed 'required' constraint; definition is " + propDef, null); + res = false; + } + } + } + return res; + } + + private static boolean valueOf(Checker.CheckContext theCtx, + Object theExpr, + Class ... theTypes) { + for (Class type: theTypes) { + if (type.isAssignableFrom(theExpr.getClass())) { + return true; + } + } + + theCtx.addError("Expression " + theExpr + " as " + theExpr.getClass().getName() + " is not compatible with any of required types: " + Arrays.toString(theTypes), null); + return false; + } + +/* + private static boolean valueOf(Class theTarget, + String theExpr, + Checker.CheckContext theCtx) { + try { + theTarget.getMethod("valueOf", new Class[] {String.class}) + .invoke(null, theExpr); + return true; + } + catch (InvocationTargetException itx) { + theCtx.addError("Failed to parse " + theExpr + " as a " + theTarget.getName(), itx.getCause()); + return false; + } + catch (Exception x) { + theCtx.addError("Failed to valueOf " + theExpr + " as a " + theTarget.getName(), x); + return false; + } + } +*/ + + /* + * Function e(valuation) + * ? + * note to self : is there a more efficient way of retrieving a map's + * single entry? (without knowing the key) + * + * ! Function evaluators have to handle null definition (i.e. perform argument checking) so that + * we can use them in the context of collections with without entry_schemas + */ + + //just as Type but is it worth expressing this 'commonality'?? + + public static interface Function { + + public String name(); + + public Evaluator evaluator(); + } + + /* + * This is a heuristic induced from the tosca specification .. it answers the + * question of wether the given expression is a function + */ + public static Function function(Object theExpr) { + if (theExpr instanceof Map && + ((Map)theExpr).size() == 1) { + try { + return Enum.valueOf(CoreFunction.class, functionName(theExpr)); + } + catch (IllegalArgumentException iax) { + //no such function but we cannot really record an error as we only guessed the expression as being a function .. + debugLogger.log(LogLevel.DEBUG, Data.class.getName(), "Failed attempt to interpret {} as a function call", theExpr); + } + } + + return null; + } + + /* + */ + public static String functionName(Object theExpr) { + return (String) + ((Map.Entry) + ((Map)theExpr).entrySet().iterator().next()) + .getKey(); + } + + /* + */ + public static Data.Function functionByName(String theName) { + return Enum.valueOf(CoreFunction.class, theName); + } + + /* + */ + public static enum CoreFunction implements Function { + + concat(Data::evalConcat), + token(Data::evalToken), + get_input(Data::evalGetInput), + get_property(Data::evalGetProperty), + get_attribute(Data::evalGetAttribute), + get_operation_output((expr,def,ctx) -> true), + get_nodes_of_type(Data::evalGetNodesOfType), + get_artifact((expr,def,ctx) -> true); + + private Evaluator evaluator; + + private CoreFunction(Evaluator theEval) { + this.evaluator = theEval; + } + + public Evaluator evaluator() { + return this.evaluator; + } + } + + private static boolean evalConcat( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return true; + } + + private static boolean evalToken( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return true; + } + + private static boolean evalGetInput( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof String)) { + theCtx.addError("get_input: argument must be a String" ,null); + return false; + } + + //check that an input with the given name exists and has a compatible type + Map inputDef = theCtx.catalog() + .getTemplate(theCtx.target(), Construct.Data, (String)entry.getValue()); + if (inputDef == null) { + theCtx.addError("get_input: no such input " + entry.getValue(), null); + return false; + } + + if (theDef == null) + return true; + + //the output must be type compatible with the input + String targetType = (String)theDef.get("type"); + if (targetType != null) { + String inputType = (String)inputDef.get("type"); + + if (!theCtx.catalog() + .isDerivedFrom(Construct.Data, inputType, targetType)) { + theCtx.addError("get_input: input type " + inputType + " is incompatible with the target type " + targetType, null); + return false; + } + } + + return true; + } + + /* + * Who's the smarty that decided to define optional arguments in between + * required ones ?! + * (factors the evaluation of get_attribute and get_property) + */ + private static boolean evalGetData( + Object theVal, Map theDef, + EnumSet<Facet> theFacets, Checker.CheckContext theCtx) { + + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof List)) { + theCtx.addError("get_property: argument must be a List" ,null); + return false; + } + + List args = (List)entry.getValue(); + if (args.size() < 2) { + theCtx.addError("'get_property' has at least 2 arguments", null); + return false; + } + + //the first argument is a node or relationship template + String tmpl = (String)args.get(0); + Construct tmplConstruct = null; + Map tmplSpec = null; + + if ("SELF".equals(tmpl)) { + tmpl = theCtx.enclosingConstruct(Construct.Node); + if (tmpl == null) { + tmpl = theCtx.enclosingConstruct(Construct.Relationship); + if (tmpl == null) { + theCtx.addError("'get_property' invalid SELF reference: no node or relationship template in scope at " + theCtx.getPath(), null); + return false; + } + else { + tmplConstruct = Construct.Relationship; + } + } + else { + tmplConstruct = Construct.Node; + } + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), tmplConstruct, tmpl); + } + else if ("SOURCE".equals("tmpl")) { + //we are in the scope of a relationship template and this is the source node template. + tmpl = theCtx.enclosingConstruct(Construct.Relationship); + if (tmpl == null) { + theCtx.addError("'get_property' invalid SOURCE reference: no relationship template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else if ("TARGET".equals("tmpl")) { + //we are in the scope of a relationship template and this is the target node template. + tmpl = theCtx.enclosingConstruct(Construct.Relationship); + if (tmpl == null) { + theCtx.addError("'get_property' invalid TARGET reference: no relationship template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else if ("HOST".equals("tmpl")) { + tmpl = theCtx.enclosingConstruct(Construct.Node); + if (tmpl == null) { + theCtx.addError("'get_property' invalid HOST reference: no node template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else { + //try node template first + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Node, tmpl); + if (tmplSpec == null) { + //try relationship + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Relationship, tmpl); + if (tmplSpec == null) { + theCtx.addError("'get_data' invalid template reference '" + tmpl + "': no node or relationship template with this name", null); + return false; + } + else { + tmplConstruct = Construct.Relationship; + } + } + else { + tmplConstruct = Construct.Node; + } + } + + int facetNameIndex = 1; + Construct facetConstruct = tmplConstruct; //who's construct the facet is supposed to belong to + Map facetConstructSpec = null; + String facetConstructType = null; + + if (tmplConstruct.equals(Construct.Node) && + args.size() > 2) { + //the second arg might be a capability or requirement name. If it is a + //capability than the third argument becomes a property of the + //coresponding capability type. If it is a requirement than the + //requirement definition indicates a capability who's type has a + //property with the name indicated in the third argument .. + // + //while the spec does not make it explicit this can only take place + //if the first argument turned out to be a node template (as relationship + //templates/types do not have capabilities/requirements + String secondArg = (String)args.get(1); + if ((facetConstructSpec = theCtx.catalog().getFacetDefinition( + tmplConstruct, + (String)tmplSpec.get("type"), + Facet.capabilities, + secondArg)) != null) { + facetNameIndex = 2; + facetConstruct = Construct.Capability; + facetConstructType = (String)facetConstructSpec.get("type"); + } + else if ((facetConstructSpec = theCtx.catalog().getRequirementDefinition( + tmplConstruct, + (String)tmplSpec.get("type"), + secondArg)) != null) { + facetNameIndex = 2; + facetConstruct = Construct.Capability; + + //find the specof the capability this requirement points to + //TODO: check, can the capability reference be anything else but a capability tyep? + facetConstructType = (String)facetConstructSpec.get("capability"); + } + } + else { + //we'll attempt to handle it as a property of the node template + facetConstruct = Construct.Node; + facetConstructSpec = tmplSpec; + facetConstructType = (String)facetConstructSpec.get("type"); + } + + //validate the facet name + Map facetSpec = null; + { + String facetName = (String)args.get(facetNameIndex); + for (Facet facet: theFacets) { + facetSpec = theCtx.catalog() + .getFacetDefinition( + facetConstruct, + facetConstructType, + facet, + facetName); + if (facetSpec != null) + break; + } + + if (facetSpec == null) { +//TODO: not the greatest message if the call strated with a requirement .. + theCtx.addError("'get_data' invalid reference, '" + facetConstruct + "' " + facetConstructType + " has no " + theFacets + " with name " + facetName, null); + return false; + } + } + + //the rest of the arguments have to resolve to a field of the property's + //data type; the propertySpec contains the type specification + for (int i = facetNameIndex + 1; i < args.size(); i++) { + } + + return true; + } + + /**/ + private static boolean evalGetProperty( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return evalGetData(theVal, theDef, EnumSet.of(Facet.properties), theCtx); + } + + /* + * get_property and get_attribute are identical, just operating on different + * facets, with one exception: there is an intrinsec attribute for every + * declared property. + */ + private static boolean evalGetAttribute( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return evalGetData(theVal, theDef, EnumSet.of(Facet.attributes, Facet.properties), theCtx); + } + + private static boolean evalGetNodesOfType( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof String)) { + theCtx.addError("get_nodes_of_type: argument must be a String", null); + return false; + } + + String arg = (String)entry.getValue(); + + if (null == theCtx.catalog().getTypeDefinition(Construct.Node, arg)) { + theCtx.addError("get_nodes_of_type: no such node type " + arg, null); + return false; + } + else { + return true; + } + } + + /* */ + public static Constraint constraintByName(String theName) { + return Enum.valueOf(Constraint.class, theName); + } + + /* */ + public static Constraint constraint(Object theExpr) { + if (theExpr instanceof Map && + ((Map)theExpr).size() == 1) { + return constraintByName(constraintName(theExpr)); + } + + return null; + } + + /* */ + public static String constraintName(Object theExpr) { + return (String) + ((Map.Entry) + ((Map)theExpr).entrySet().iterator().next()) + .getKey(); + } + + private static Object getConstraintValue(Map theDef, + Constraint theConstraint) { + List<Map> constraints = (List<Map>)theDef.get("constraints"); + if (null == constraints) + return null; + + for(Map constraint: constraints) { + Object val = constraint.get(theConstraint.toString()); + if (val != null) + return val; + } + return null; + } + + public static enum Constraint { + equal, + greater_than, + greater_or_equal, + less_than, + less_or_equal, + in_range, + valid_values, + length, + min_length, + max_length, + pattern; + } + + + /* hold the constraint evaluators for pairs of type/constraint. + * If a pair is not present than the given constraint does not apply + * to the type. + */ + private static Table<Type,Constraint,Evaluator> typeConstraintEvaluator =null; + + public static Evaluator + getTypeConstraintEvaluator(Type theType, Constraint theConstraint) { + if (typeConstraintEvaluator == null) { + typeConstraintEvaluator = HashBasedTable.create(); + + typeConstraintEvaluator.put(CoreType.String, Constraint.equal, + (val,def,ctx) -> val.equals(getConstraintValue(def,Constraint.equal))); + typeConstraintEvaluator.put(CoreType.String, Constraint.valid_values, + (val,def,ctx) -> { + return ((List)getConstraintValue(def,Constraint.valid_values)).contains(val); + }); + typeConstraintEvaluator.put(CoreType.String, Constraint.length, + (val,def,ctx) -> ((String)val).length() == ((Number)getConstraintValue(def,Constraint.length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.min_length, + (val,def,ctx) -> ((String)val).length() >= ((Number)getConstraintValue(def,Constraint.min_length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.max_length, + (val,def,ctx) -> ((String)val).length() <= ((Number)getConstraintValue(def,Constraint.max_length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.pattern, + (val,def,ctx) -> Pattern.compile((String)getConstraintValue(def,Constraint.pattern)) + .matcher((String)val) + .matches()); + + typeConstraintEvaluator.put(CoreType.Integer, Constraint.equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.equal)) == 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.greater_than, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.greater_than)) > 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.greater_or_equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.greater_or_equal)) >= 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.less_than, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.less_than)) < 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.less_or_equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.less_or_equal)) <= 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.in_range, + (val,def,ctx) -> { List<Integer> range = (List<Integer>)getConstraintValue(def, Constraint.in_range); + return ((Integer)val).compareTo(range.get(0)) >= 0 && + ((Integer)val).compareTo(range.get(1)) <= 0; + }); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.valid_values, + (val,def,ctx) -> ((List<Integer>)getConstraintValue(def, Constraint.valid_values)).contains((Integer)val)); + +//yaml parser represents yaml floats as java Double and we are even more tolerant as many double values +//get represented as ints and the parser will substitute an Integer + typeConstraintEvaluator.put(CoreType.Float, Constraint.equal, + (val,def,ctx) -> ((Number)val).doubleValue() == ((Number)getConstraintValue(def,Constraint.equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.greater_than, + (val,def,ctx) -> ((Number)val).doubleValue() > ((Number)getConstraintValue(def,Constraint.greater_than)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.greater_or_equal, + (val,def,ctx) -> ((Number)val).doubleValue() >= ((Number)getConstraintValue(def,Constraint.greater_or_equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.less_than, + (val,def,ctx) -> ((Number)val).doubleValue() < ((Number)getConstraintValue(def,Constraint.less_than)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.less_or_equal, + (val,def,ctx) -> ((Number)val).doubleValue() <= ((Number)getConstraintValue(def,Constraint.less_or_equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.in_range, + (val,def,ctx) -> { List<Number> range = (List<Number>)getConstraintValue(def, Constraint.in_range); + return ((Number)val).doubleValue() >= range.get(0).doubleValue() && + ((Number)val).doubleValue() <= range.get(1).doubleValue(); + }); + typeConstraintEvaluator.put(CoreType.Float, Constraint.valid_values, + (val,def,ctx) -> ((List<Number>)getConstraintValue(def, Constraint.valid_values)).contains((Number)val)); + } + + Evaluator eval = typeConstraintEvaluator.get(theType, theConstraint); + + return eval == null ? (expr,def,ctx) -> true + : eval; + } + + + private static boolean stringValidValues(String theVal, + List<String> theValidValues, + Checker.CheckContext theCtx) { + if (!theValidValues.contains(theVal)) { + theCtx.addError("not a valid value: " + theVal + " not part of " + theValidValues, null); + return false; + } + + return true; + } + + public static final void main(String[] theArgs) { + Data.CoreType dt = Enum.valueOf(Data.CoreType.class, theArgs[0]); + debugLogger.log(LogLevel.DEBUG, Data.class.getName(), "{} > {}", theArgs[1], dt.evaluator().eval(theArgs[1], null, null)); + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Facet.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Facet.java new file mode 100644 index 0000000..3dfd140 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Facet.java @@ -0,0 +1,37 @@ +package org.onap.sdc.dcae.checker; + +/* + * Oddballs: + * - requirements (a requirement does not have a type - i.e. is not based + * on a Construct) and can target a node, a capability or both .. When present + * as a facet of another Construct it is also the only one represented as a + * sequence so it will need special handling anyway. + */ +public enum Facet { + + inputs(Construct.Data), + outputs(Construct.Data), + properties(Construct.Data), + attributes(Construct.Data), + capabilities(Construct.Capability), + //requirements(Construct.Capability),//?? + artifacts(Construct.Artifact), + interfaces(Construct.Interface); + /* + Node + Relationship + they can be considered as facets of the topology template ... + */ + + private Construct construct; + + private Facet(Construct theConstruct) { + this.construct = theConstruct; + } + + public Construct construct() { + return this.construct; + } +} + + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/JSP.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/JSP.java new file mode 100644 index 0000000..797b4e2 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/JSP.java @@ -0,0 +1,624 @@ +package org.onap.sdc.dcae.checker; + +import java.io.IOException; +import java.io.File; + +import java.net.URI; + +import java.util.Set; +import java.util.Map; +import java.util.List; +import java.util.Arrays; +import java.util.Iterator; +import java.util.Collection; +import java.util.Collections; +import java.util.stream.Collectors; +import java.util.function.Consumer; +import java.util.function.BiFunction; + +import javax.script.Compilable; +import javax.script.CompiledScript; +import javax.script.Bindings; +import javax.script.ScriptContext; +import javax.script.SimpleScriptContext; +import javax.script.ScriptEngine; +import javax.script.ScriptEngineManager; +import javax.script.ScriptException; + +import jdk.nashorn.api.scripting.JSObject; +import jdk.nashorn.api.scripting.AbstractJSObject; + +import org.apache.commons.jxpath.JXPathContext; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + + +/** + * Java Script Processor + * Each script is represented by a Target and the JSP processor maintains a collection of Targets, i.e. scripts. + * A collection of targets can be used with only one JSP processor at a time (as the processor stores processor specific * compiled versions within the target). + */ +public class JSP implements Processor<JSP> { + + private ScriptEngine engine; + private Collection<? extends Target> targets; + + public JSP(String[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s, new File(s).toURI())) + .collect(Collectors.toList())); + } + + public JSP(File[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s.getName(), s.toURI())) + .collect(Collectors.toList())); + } + + public JSP(URI[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s.toString(), s)) + .collect(Collectors.toList())); + } + + /** + * The given collection is allowed to change while used by the JSP engine but access to it needs to be synchronized. + * The engine uses the target field of each Target to store a compiled version of each script. An external reset of + * this field (maybe in order to indicate some change in the Target) will caue a re-compilation of the Target. + */ + public JSP(Collection<? extends Target> theTargets) { + this.targets = theTargets; + ScriptEngineManager engineManager = new ScriptEngineManager(); + this.engine = engineManager.getEngineByName("nashorn"); + } + + public Collection<? extends Target> targets() { + return this.targets; + } + + /* pre-compiles all known targets + */ + protected void compile() throws ProcessorException { + synchronized (this.targets) { + for (Target t: this.targets) + compile(t); + } + } + + protected CompiledScript compile(Target theTarget) throws ProcessorException { + + CompiledScript cs = null; + + synchronized(theTarget) { + try { + cs = (CompiledScript)theTarget.getTarget(); + } + catch(ClassCastException ccx) { + throw new ProcessorException(theTarget, "Unexpected target content"); + } + + if (cs == null) { + try { + cs = ((Compilable)this.engine).compile(theTarget.open()); + theTarget.setTarget(cs); + } + catch (IOException iox) { + throw new ProcessorException(theTarget, "Failed to read script", iox); + } + catch (ScriptException sx) { + throw new ProcessorException(theTarget, "Failed to compile script", sx); + } + } + } + + return cs; + } + + public ContextBuilder process(Catalog theCatalog) { + return new ContextBuilder( + this.engine.createBindings()) + //new DelegateBindings(this.engine.getBindings(ScriptContext.ENGINE_SCOPE))) + .with("catalog", new JSCatalog(theCatalog)); + } + + /** + */ + public class ContextBuilder implements ProcessBuilder<JSP> { + + private ScriptContext context; + + protected ContextBuilder(Bindings theBindings) { + this.context = new SimpleScriptContext(); + this.context.setBindings(theBindings, Process.PROCESS_SCOPE /*ScriptContext.ENGINE_SCOPE*/); + } + + public ContextBuilder withPreprocessing(BiFunction<Target, ScriptContext, Boolean> thePreprocessing) { + this.context.setAttribute("preprocessor", thePreprocessing, Process.PROCESS_SCOPE); + return this; + } + + public ContextBuilder withPostprocessing(BiFunction<Target, ScriptContext, Boolean> thePostprocessing) { + this.context.setAttribute("postprocessor", thePostprocessing, Process.PROCESS_SCOPE); + return this; + } + + public ContextBuilder with(String theName, Object theValue) { + this.context.getBindings(Process.PROCESS_SCOPE).put(theName, theValue); + return this; + } + + public ContextBuilder withOpt(String theName, Object theValue) { + if (theValue != null) + this.context.getBindings(Process.PROCESS_SCOPE).put(theName, theValue); + return this; + } + + public JSProcess process() { + return new JSProcess(this.context); + } + + } + + /** + */ + public class JSProcess implements Process<JSP> { + + private Report report = new Report(); + private Iterator<? extends Target> scripts; + private JScriptInfo scriptInfo = new JScriptInfo(); + private Target script; //script currently being evaluated + private boolean stopped = false; + private ScriptContext context; + + private JSProcess(ScriptContext theContext) { + + this.context = theContext; + this.context.getBindings(Process.PROCESS_SCOPE) + .put("stop", new Consumer<String>() { + public void accept(String theMsg) { + JSProcess.this.stopped = true; + //log the message?? + } + }); + this.context.getBindings(Process.PROCESS_SCOPE) + .put("report", new Consumer<String>() { + public void accept(String theMsg) { + JSProcess.this.report.add(new ProcessorException(script, theMsg)); + } + }); + this.context.getBindings(Process.PROCESS_SCOPE) + .put("reportOnce", new Consumer<String>() { + public void accept(String theMsg) { + JSProcess.this.report.addOnce(new ProcessorException(script, theMsg)); + } + }); + this.scripts = JSP.this.targets.iterator(); + } + + protected String infoName(Target theTarget) { + String name = theTarget.getName(); + return name.substring(0, name.indexOf(".")) + "_info"; + } + + public JSP processor() { + return JSP.this; + } + + public boolean hasNext() { + return !this.stopped && this.scripts.hasNext(); + } + + protected Target next() { + if (hasNext()) + return this.script = this.scripts.next(); + else + throw new RuntimeException("Process is completed"); + } + + protected boolean runProcessor(String theName) throws ProcessorException { + BiFunction<Target, ScriptContext, Boolean> proc = (BiFunction<Target, ScriptContext, Boolean>) + this.context.getAttribute(theName, Process.PROCESS_SCOPE); + if (proc != null) { + try { + return proc.apply(this.script, this.context).booleanValue(); + } + catch (Exception x) { + throw new ProcessorException(this.script, theName + "failed", x); + } + } + + return true; + } + + public Process runNext() throws ProcessorException { + Target target = next(); + synchronized(target) { + String name = infoName(target); + try { + if (runProcessor("preprocessor")) { + compile(target).eval(this.context); + runProcessor("postprocessor"); + } + } + catch (ScriptException sx) { + throw new ProcessorException(target, "Failed to execute validation script", sx); + } + } + + return this; + } + + public Process runNextSilently() { + try { + return runNext(); + } + catch (ProcessorException px) { + this.report.add(px); + } + return this; + } + + public Report run() { + while (hasNext()) + runNextSilently(); + return this.report; + } + + public void stop() { + this.stopped = true; + } + + public Report report() { + return this.report; + } + } + + private static class JScriptInfo implements TargetInfo { + + private JSObject info; + + protected JScriptInfo() { + } + + protected JScriptInfo setInfo(JSObject theInfo) { + this.info = theInfo; + return this; + } + + public Set<String> entryNames() { + return this.info == null ? Collections.EMPTY_SET : this.info.keySet(); + } + + public boolean hasEntry(String theName) { + return this.info == null ? false : this.info.hasMember(theName); + } + + public Object getEntry(String theName) { + return this.info == null ? null : + this.info.hasMember(theName) ? this.info.getMember(theName) : null; + } + } + + + /* Exposes the catalog information in a more Java Script friendly manner. + */ + public static class JSCatalog { + + private Catalog catalog; + + private JSCatalog(Catalog theCatalog) { + this.catalog = theCatalog; + } + + /** */ + public JSTarget[] targets() { + return + this.catalog.targets() + .stream() + .map(t -> { return new JSTarget(t); }) + .toArray(size -> new JSTarget[size]); //or toArray(JSNode[]::new) + } + + public JSTarget[] topTargets() { + return + this.catalog.topTargets() + .stream() + .map(t -> { return new JSTarget(t); }) + .toArray(size -> new JSTarget[size]); //or toArray(JSNode[]::new) + } + + /** */ + public String[] types(String theConstruct) { + Set<String> names = + this.catalog.getConstructTypes(Enum.valueOf(Construct.class,theConstruct)).keySet(); + return names.toArray(new String[names.size()]); + } + + /** */ + public boolean isDerivedFrom(String theConstruct, String theType, String theSuperType) { + return this.catalog.isDerivedFrom(Enum.valueOf(Construct.class,theConstruct), theType, theSuperType); + } + + /** */ + public JSObject facetDefinition(String theConstruct, String theType, String theFacet, String theName) { + return new JSElement(theName, + this.catalog.getFacetDefinition( + Enum.valueOf(Construct.class, theConstruct), theType, + Enum.valueOf(Facet.class, theFacet), theName)); + } + + + /** */ +/* + public JSElement[] targetNodes(Target theTarget) { + return + this.catalog.getTargetTemplates(theTarget, Construct.Node) + .entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); //or toArray(JSNode[]::new) + } +*/ + + public class JSTarget { + + private Target tgt; + private JXPathContext jxPath; + + private JSTarget(Target theTarget) { + this.tgt = theTarget; + this.jxPath = JXPathContext.newContext(this.tgt.getTarget()); + this.jxPath.setLenient(true); + } + + public String getName() { return this.tgt.getName(); } + + public JSElement resolve(String thePath) { + Object res = jxPath.getValue(thePath); + if (res instanceof Map) { + return new JSElement(thePath, (Map)res); + } + //?? + return null; + } + + public JSElement[] getInputs() { + + Map<String,Map> inputs = (Map<String,Map>)jxPath.getValue("/topology_template/inputs"); + return (inputs == null) ? + new JSElement[0] + : inputs.entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); + } + +// public JSElement[] getOutputs() { +// } + + public JSElement getMetadata() { + return new JSElement("metadata", (Map)jxPath.getValue("/metadata")); + } + + public JSElement[] getNodes() { + return + JSCatalog.this.catalog.getTargetTemplates(this.tgt, Construct.Node) + .entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); //or toArray(JSElement[]::new) + } + +// public JSElement[] getPolicies() { +// } + + } + + + /* + */ + public class JSElement extends AbstractJSObject { + + + private String name; + private Map def; + + private JSElement(String theName, Object theDef) { + this.name = theName; + this.def = theDef == null ? Collections.emptyMap() + : (theDef instanceof Map) ? (Map)theDef + : Collections.singletonMap("value",theDef); + } + + public String getName() { return this.name; } + + public boolean hasMember(String theMember) { + return this.def.containsKey(theMember); + } + + public Object getMember(final String theMember) { + Object val = this.def.get(theMember); + if (val != null) { + if (val instanceof Map) { + return new JSElement(theMember, val); + /* + return ((Map<String,?>)obj).entrySet() + .stream() + .map((Map.Entry<String,?> e) -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); + */ + } + + if (val instanceof List) { + //a property value can be a list of: primitive types or maps (for a user defined type) + //requirements are exposed as a list of maps + List lval = (List)val; + if (lval.get(0) instanceof Map) { + return lval + .stream() + .map((e) -> new JSElement(theMember, e)) + .toArray(size -> new JSElement[size]); + + /* + return val + .stream() + .map((e) -> { + Map.Entry<String,?> re = ((Map<String,?>)e).entrySet().iterator().next(); + return new JSElement(re.getKey(), re.getValue()); + }) + .toArray(size -> new JSElement[size]); + */ + } + } + + return val; + } + else { + if ("name".equals(theMember)) + return this.name; + if ("toString".equals(theMember)) + return _toString; + if ("hasOwnProperty".equals(theMember)) + return _hasOwnProperty; + return super.getMember(theMember); + } + } + /* TODO: we do not expose 'name' in here */ + public Set<String> keySet() { + return this.def.keySet(); + } + + } + + + static final JSObject _toString = + new TracerJSObject("_toString") { + public Object call(Object thiz, Object... args) { + return ((JSElement)thiz).def.toString(); + } + + public boolean isFunction() { return true; } + }; + + static final JSObject _hasOwnProperty = + new TracerJSObject("_hasOwnProperty") { + public Object call(Object thiz, Object... args) { + return ((JSElement)thiz).def.containsKey(args[0]); + } + + public boolean isFunction() { return true; } + }; + + }//JSCatalog + + + + private static class TracerJSObject extends AbstractJSObject { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private String mark; + + TracerJSObject(String theMark) { + this.mark = theMark; + } + + public Object call(Object thiz, Object... args) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:call", this.mark); + return super.call(thiz, args); + } + + public Object newObject(Object... args) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:newObject", this.mark); + return super.newObject(args); + } + + public Object eval(String s) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:eval", this.mark); + return super.eval(s); + } + + public Object getMember(String name) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getMember", this.mark); + return super.getMember(name); + } + + public Object getSlot(int index) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getSlot", this.mark); + return super.getSlot(index); + } + + public boolean hasMember(String name) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:hasMember", this.mark); + return super.hasMember(name); + } + + public boolean hasSlot(int slot) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:hasSlot", this.mark); + return super.hasSlot(slot); + } + + public void removeMember(String name) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:removeMember", this.mark); + super.removeMember(name); + } + + public void setMember(String name, Object value) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:setMember", this.mark); + super.setMember(name,value); + } + + public void setSlot(int index, Object value) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:setSlot", this.mark); + super.setSlot(index,value); + } + + public Set<String> keySet() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:JSObject:keySet", this.mark); + return super.keySet(); + } + + public Collection<Object> values() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:values", this.mark); + return super.values(); + } + + public boolean isInstance(Object instance) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isInstance", this.mark); + return super.isInstance(instance); + } + + public boolean isInstanceOf(Object clazz) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isInstanceOf", this.mark); + return super.isInstance(clazz); + } + + public String getClassName() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getClassName", this.mark); + return super.getClassName(); + } + + public boolean isFunction() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isFunction", this.mark); + return super.isFunction(); + } + + public boolean isStrictFunction() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isStrictFunction", this.mark); + return super.isStrictFunction(); + } + + public boolean isArray() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isArray", this.mark); + return super.isArray(); + } + + public Object getDefaultValue(Class<?> hint) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getDefaultValue({})", this.mark, hint); + return super.getDefaultValue(hint); + } + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Process.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Process.java new file mode 100644 index 0000000..0f529af --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Process.java @@ -0,0 +1,29 @@ +package org.onap.sdc.dcae.checker; + +/** + * + */ +public interface Process<T extends Processor> { + + public static final int PROCESS_SCOPE = 100; + + /** + * the processor running this process + */ + public T processor(); + + /* */ + public boolean hasNext(); + + /* */ + public Process runNext() throws ProcessorException; + + /* execute all steps to completion + */ + public Report run(); + + /* execution report + */ + public Report report(); + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessBuilder.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessBuilder.java new file mode 100644 index 0000000..8295055 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessBuilder.java @@ -0,0 +1,24 @@ +package org.onap.sdc.dcae.checker; + + +/** + * Just in case you might want to do something with a template (set) once it was checked + */ +public interface ProcessBuilder<T extends Processor> { + + /* */ + public ProcessBuilder<T> with(String theName, Object theValue); + + /* */ + public ProcessBuilder<T> withOpt(String theName, Object theValue); + + /* */ + public Process<T> process(); + + /* */ + default public Report run() { + return process() + .run(); + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Processor.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Processor.java new file mode 100644 index 0000000..7f29d23 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Processor.java @@ -0,0 +1,11 @@ +package org.onap.sdc.dcae.checker; + + +/** + * Just in case you might want to do something with a template (set) once it was checked + */ +public interface Processor<T extends Processor<T>> { + + /* */ + public ProcessBuilder<T> process(Catalog theCatalog); +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessorException.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessorException.java new file mode 100644 index 0000000..d4c5571 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessorException.java @@ -0,0 +1,28 @@ +package org.onap.sdc.dcae.checker; + + +/** + */ +public class ProcessorException extends CheckerException { + + private Target target; + + public ProcessorException(Target theTarget, String theMsg, Throwable theCause) { + super(theMsg, theCause); + this.target = theTarget; + } + + public ProcessorException(Target theTarget, String theMsg) { + super(theMsg); + this.target = theTarget; + } + + public Target getTarget() { + return this.target; + } + + @Override + public String getMessage() { + return this.target + ":" + super.getMessage() + (getCause() == null ? "" : ("(" + getCause() + ")")); + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Report.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Report.java new file mode 100644 index 0000000..0f1b7c3 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Report.java @@ -0,0 +1,102 @@ +package org.onap.sdc.dcae.checker; + +import java.io.IOException; + +import java.util.LinkedList; +import java.util.Collections; + +import org.yaml.snakeyaml.error.MarkedYAMLException; +import kwalify.ValidationException; + +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; + +/** + * Represents a collection of errors that occured during one of the stages + * of the checker: yaml parsing, yaml validation (tosca syntax), tosca checking + */ +/* + * This needs some re-thinking: while it is useful to have all original errors introducing + * the custom json conversion (just to help the service) is not great either. + * I was torn between this approach or creating a custom deserializer and object mapper (which + * would have kept all the customized serialization in the service but then the error analysis + * would be duplicated there too ..). + */ +@JsonSerialize(contentUsing=org.onap.sdc.dcae.checker.Report.ReportEntrySerializer.class) +public class Report<T extends Throwable> extends LinkedList<T> { + + public Report() { + } + + public Report(T[] theErrors) { + Collections.addAll(this, theErrors); + } + + public boolean hasErrors() { + return !this.isEmpty(); + } + + public boolean addOnce(T theError) { + for (T e: this) { + if (e.getMessage().equals(theError.getMessage())) + return false; + } + return add(theError); + } + + public String toString() { + StringBuilder sb = new StringBuilder(this.size() + " errors"); + for (Throwable x: this) { + sb.append("\n") + .append("[") + .append(location(x)) + .append("] ") + .append(x.getMessage()); + if (x.getCause() != null) { + sb.append("\n\tCaused by:\n") + .append(x.getCause()); + } + } + sb.append("\n"); + return sb.toString(); + } + + private static String location(Throwable theError) { + if (theError instanceof MarkedYAMLException) { + MarkedYAMLException mx = (MarkedYAMLException)theError; + return "line " + mx.getProblemMark().getLine() + ", column " + mx.getProblemMark().getColumn(); + } + if (theError instanceof ValidationException) { + ValidationException vx = (ValidationException)theError; + return vx.getPath(); + } + if (theError instanceof TargetError) { + TargetError tx = (TargetError)theError; + return tx.getLocation(); + } + return "unknown"; + } + + + public static class ReportEntrySerializer extends StdSerializer<Throwable> { + + public ReportEntrySerializer() { + super(Throwable.class); + } + + @Override + public void serialize(Throwable theError, JsonGenerator theGenerator, SerializerProvider theProvider) + throws IOException, JsonProcessingException { + theGenerator.writeStartObject(); + theGenerator.writeStringField("location", location(theError)); + theGenerator.writeStringField("message", theError.getMessage()); + if (theError.getCause() != null) + theGenerator.writeStringField("cause", theError.getCause().toString()); + theGenerator.writeEndObject(); + } + } +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Repository.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Repository.java new file mode 100644 index 0000000..9cb853b --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Repository.java @@ -0,0 +1,50 @@ +package org.onap.sdc.dcae.checker; + + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; + +import java.net.URI; +import java.net.URL; +import java.net.MalformedURLException; + +import java.util.Map; + +/** + * Represents a 'container' of (yaml) TOSCA documents + */ +public abstract class Repository { + + protected OnapLoggerError errLogger = OnapLoggerError.getInstance(); + protected OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private String name, + description; + protected URI rootURI; + protected Map credential; //TOSCA type tosca.datatype.Credential + + public Repository(String theName, URI theRoot) { + this.name = theName; + this.rootURI = theRoot; + } + + public String getName() { + return this.name; + } + + public URI getRoot() { + return this.rootURI; + } + + /** optional */ + public abstract Iterable<Target> targets(); + + /** */ + public abstract Target resolve(URI theURI); + + @Override + public String toString() { + return "Repository " + this.name + " at " + this.rootURI; + } +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java new file mode 100644 index 0000000..b630564 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java @@ -0,0 +1,80 @@ +package org.onap.sdc.dcae.checker; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.io.BufferedReader; +import java.io.IOException; + +import java.net.URI; +import java.net.URL; +import java.net.MalformedURLException; + +/** + * Represents a yaml document to be parsed/validated/checked + */ +public class Target { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + + private String name; //maintained mainly for logging + private URI location; + private Object target; //this is the parsed form of the target + + private Report report = new Report(); //collects the errors related to this target + + public Target(String theName, URI theLocation) { + this.name = theName; + this.location = theLocation; + } + + public String getName() { + return this.name; + } + + public URI getLocation() { + return this.location; + } + + public Report getReport() { + return this.report; + } + + public void report(Throwable theError) { + this.report.add(theError); + } + + public void report(String theErrMsg) { + this.report.add(new Exception(theErrMsg)); + } + + public void setTarget(Object theTarget) { + this.target = theTarget; + } + + public Object getTarget() { + return this.target; + } + + /* + * @return a reader for the source or null if failed + */ + public Reader open() throws IOException { + + return new BufferedReader( + new InputStreamReader( + this.location.toURL().openStream())); + } + + public String toString() { + //return String.format("Target %s (%.20s ...)", this.location, this.target == null ? "" : this.target.toString()); + return String.format("Target %s at %s", this.name, this.location); + + } +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetError.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetError.java new file mode 100644 index 0000000..0764a56 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetError.java @@ -0,0 +1,43 @@ +package org.onap.sdc.dcae.checker; + + +/** + * A target error represents an error in target the resource being checked. + * We only represent it as a Throwable because the libraries that perform parsing and syntax validation + * represent their errors as such .. + */ +public class TargetError extends Throwable { + + /* + public static enum Level { + error, + warning + } + */ + + private String location; //we might need an more detailed representation + //here: it could be a YAML document jpath or + //document location (line). + private String target; + + public TargetError(String theTarget, String theLocation, String theMessage, Throwable theCause) { + super(theMessage, theCause); + this.target = theTarget; + this.location = theLocation; + } + + public TargetError(String theTarget, String theLocation, String theMessage) { + this(theTarget, theLocation, theMessage, null); + } + + public String getTarget() { + return this.target; + } + + public String getLocation() { + return this.location; + } + + +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetInfo.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetInfo.java new file mode 100644 index 0000000..480b6a8 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetInfo.java @@ -0,0 +1,20 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Set; + + +/** + * Exposes target properties. How they are obtained/calculated not of importance here. + */ +public interface TargetInfo { + + /** */ + public Set<String> entryNames(); + + /** */ + public boolean hasEntry(String theName); + + /** */ + public Object getEntry(String theName); + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetLocator.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetLocator.java new file mode 100644 index 0000000..9b82f16 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetLocator.java @@ -0,0 +1,20 @@ +package org.onap.sdc.dcae.checker; + +import java.net.URI; + + +public interface TargetLocator { + + /** */ + public boolean addSearchPath(URI theURI); + + /** */ + public boolean addSearchPath(String thePath); + + /** */ + public Iterable<URI> searchPaths(); + + /** */ + public Target resolve(String theName); + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Workflows.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Workflows.java new file mode 100644 index 0000000..88eb192 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Workflows.java @@ -0,0 +1,120 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Map; + +import org.onap.sdc.dcae.checker.annotations.Checks; + +import java.util.List; +import java.util.Iterator; + +@Checks +public class Workflows { + + @Checks(path="/topology_template/workflows") + public void check_workflows(Map theDefinition, Checker.CheckContext theContext) { + + theContext.enter("workflows"); + + try { + if(!theContext.checker().checkDefinition("workflows", theDefinition, theContext)) + return; + + for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String,Map> e = i.next(); + check_workflow_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + + public void check_workflow_definition(String theName, Map theDef, Checker.CheckContext theContext) { + + theContext.enter("workflow", Construct.Workflow); + + if (theDef.containsKey("inputs")) { + theContext + .checker() + .checkProperties((Map<String,Map>)theDef.get("inputs"), theContext); + } + + if (theDef.containsKey("preconditions")) { + check_workflow_preconditions_definition((List<Map>)theDef.get("preconditions"), theContext); + } + + if (theDef.containsKey("steps")) { + check_workflow_steps_definition((Map<String, Map>)theDef.get("steps"), theContext); + } + + theContext.exit(); + } + + + public void check_workflow_steps_definition(Map theSteps, Checker.CheckContext theContext) { + + theContext.enter("steps"); + + try { + for (Iterator<Map.Entry<String,Map>> i = theSteps.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String,Map> e = i.next(); + check_workflow_step_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + + } + + public void check_workflow_step_definition(String theName, Map theDef, Checker.CheckContext theContext) { + + theContext.enter(theName); + try { + //requireed entry, must be a node or group template + String target = (String)theDef.get("target"); + Construct targetConstruct = null; + + if (theContext.catalog().hasTemplate(theContext.target(), Construct.Group, target)) { + targetConstruct = Construct.Group; + } + else if (theContext.catalog().hasTemplate(theContext.target(), Construct.Node, target)) { + targetConstruct = Construct.Node; + } + else { + theContext.addError("The 'target' entry must contain a reference to a node template or group template, '" + target + "' is none of those", null); + } + + String targetRelationship = (String)theDef.get("target_relationship"); + if (targetConstruct.equals(Construct.Node)) { + if (targetRelationship != null) { + //must be a requirement of the target Node + } + } + + + } + finally { + theContext.exit(); + } + } + + public void check_workflow_preconditions_definition(List<Map> thePreconditions, Checker.CheckContext theContext) { + + theContext.enter("preconditions"); + + try { + for (Map precondition: thePreconditions) { + check_workflow_precondition_definition(precondition, theContext); + } + } + finally { + theContext.exit(); + } + } + + public void check_workflow_precondition_definition(Map theDef, Checker.CheckContext theContext) { + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/.Validates.java.swp b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/.Validates.java.swp Binary files differnew file mode 100644 index 0000000..dae35da --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/.Validates.java.swp diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Catalogs.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Catalogs.java new file mode 100644 index 0000000..8dbe275 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Catalogs.java @@ -0,0 +1,14 @@ +package org.onap.sdc.dcae.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.METHOD}) +public @interface Catalogs { + String path() default "/"; +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Checks.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Checks.java new file mode 100644 index 0000000..96349d7 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Checks.java @@ -0,0 +1,19 @@ +package org.onap.sdc.dcae.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +/* The iffy part: as a type annotaton we do not need a path or a version specification, + as a method annotation it is mandatory (cannot be the default) + We could forsee that a version indcation at type level would cover all check handler within the type + */ +public @interface Checks { + String path() default "/"; + String[] version() default { "1.0", "1.0.0", "1.1", "1.1.0" }; +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Validates.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Validates.java new file mode 100644 index 0000000..29e080d --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Validates.java @@ -0,0 +1,15 @@ +package org.onap.sdc.dcae.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +public @interface Validates { + String rule() default "/"; + String[] timing() default { "post" }; +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/package-info.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/package-info.java new file mode 100644 index 0000000..da2c5ba --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/package-info.java @@ -0,0 +1,101 @@ +/** + * The checker provides an api/tool for the verification of TOSCA yaml files + * as specified in the OASIS specification found at: + * http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/TOSCA-Simple-Profile-YAML-v1.0.pdf + * + * It provides a three stage processing of a tosca yaml file: + * - yaml verification: is the document a valid yaml document as per yaml.org/spec. In particular we're using the snakeyaml library for parsing the yaml document to a nested structure of java objects. + * - tosca yaml grammar validation: is the document a valid tosca yaml + * document, as per the the TOSCA simple profile for yaml. We use a modified + * version of the kwalify library for this task. The grammar for TOSCA yaml + * is itself a yaml document (found in the package in + * resources/tosca-schema.yaml). There are certain limitations on how far + * this grammar can go. + * - consistency verification: we check the type hierarchies for all TOSCA + * constructs (data types, capability types, node types, etc), the definition + * of all facets of a construct (properties, attributes, etc) across the type + * hierachies, the conformity of construct templates (node templates, ..) with + * their types, data valuations(input assignements, constants, function calls). + * + * Each stage is blocking, i.e. a stage will be performed only if the previous + * one completed successfully. + * + * The verification is done across all the imported documents. The common TOSCA + * types are by default made available to all documents being processed (the + * specification is in resources/tosca-common-types.yaml). Networking related + * types can be made available by importing resources/tosca-network-types.yaml + * while the tosca nfv profile definitions are available at + * resources/tosca-nfv-types.yaml. + * + * Besides snakeyaml and kwalify this package also has dependencies on Google's + * guava library and Apache's jxpath. + * + * The three java interfaces exposed by the package are the Checker, Target + * and Report. A Target represents a document processed by the Checker. While + * the Checker starts with a top Target, through import statements it can end up + * processing a number of Targets. The results of processing a Target are made + * available through a Report which currently is nothing more that a list of + * recorded errors. + * + * <div> + * {@code + * Checker checker = new Checker(); + * checker.check("tests/example.yaml"); + * + * for (Target t: checker.targets()) + * System.out.println(t.getLocation() + "\n" + t.getReport()); + * } + * </div> + * + * The errors are recorded as instances of Exception, mostly due to the fact + * snakeyaml and kwalify do report errors as exceptions. As such there are 3 + * basic types of errros to be expected in a report: YAMLException (from + * snakeyaml, related to parsing), ValidationException (from kwalify, tosca + * grammar validation), TargetException (from the checker itself). This might + * change as we're looking to unify the way errors are reported. A Report + * object has a user friendly toString function. + * + * A CheckerException thrown during the checking process is an indication of a + * malfunction in the checker itself. + * + * The checker handles targets as URIs. The resolution of a target consists in + * going from a string representing some path/uri to the absolute URI. URIs can + * be of any java recognizable schema: file, http, etc. A TargetResolver (not + * currently exposed through the API) attempts in order: + * - if the String is an absolute URI, keep it as such + * - if the String is a relative URI attempt to resolve it as relative to + * know search paths (pre-configured absolute URIs: current directory and the + * root of the main target's URI). The option of adding custom search paths will + * be added. + * - attempt to resolve as a classpath resource (a jar:file: URI) + * + * At this time there are no options for the checker (please provide + * requirements to be considered). + * + * + * + * Other: + * - the checker performs during tosca grammar validation a 'normalization' + * process as the tosca yaml profile allows for short forms in the + * specification of a number of its constructs (see spec). The checker changes + * the actual structure of the parsed document such that only normalized + * (complete) forms of specification are present before the checking phase. + * (the kwalify library was extended in order to be able to specify these + * short forms in the grammar itself and process/tolerate them at validation + * time). + * + * - the checker contains an internal catalog where the types and templates + * of different constructs are aggregated and indexed across all targets in + * order to facilitate the checking phase. Catalogs can be 'linked' and the + * resolution process delegated (the checker maintains a basic catalog with + * the core and common types and there is always a second catalog maintaining + * the information related to the current targets). + * The catalog is currently not exposed by the library. + * + * - imports processing: the import statements present in a target are first + * 'detected' during tosca yaml grammar validation phase. At that stage all + * imports are (recursively) parsed and validated (first 2 phases). Checking + * off all imports (recursively) is done during stage 3. + * + */ +package org.onap.sdc.dcae.checker;
\ No newline at end of file |