summaryrefslogtreecommitdiffstats
path: root/dcaedt_validator
diff options
context:
space:
mode:
authorEran (ev672n), Vosk <ev672n@att.com>2018-08-06 17:02:39 +0300
committerEran (ev672n), Vosk <ev672n@att.com>2018-08-06 17:02:39 +0300
commit86457ed120fc236b1485ad3251589aedad2401bd (patch)
treea473c2faf16ffbb34aad75d4fef8afc3b2541f21 /dcaedt_validator
parent735b58119b37ead5013c40afd941d63ef28ca053 (diff)
Changing the dcae dt main code
Updating DCAE-dt-main code for Dockerizing the DCAE-CI code Change-Id: Ia50d24e60e9ddc9bbc58dd8651d7a4f7e0dc8270 Issue-ID: SDC-1605 Signed-off-by: Eran (ev672n), Vosk <ev672n@att.com>
Diffstat (limited to 'dcaedt_validator')
-rw-r--r--dcaedt_validator/README9
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java3432
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/IChecker.java21
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java16
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/ArtifactCommon.java31
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/AttributesCommon.java72
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/BaseCommon.java9
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/CapabilityCommon.java190
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/CheckCommon.java79
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/ConstCommon.java38
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/DataCommon.java102
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/FacetCommon.java320
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/GroupCommon.java118
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/InputsOutputsCommon.java66
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/InterfaceCommon.java164
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/NodeCommon.java197
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/PolicyCommon.java134
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/PropertiesCommon.java97
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/RelationshipCommon.java112
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/RequirementCommon.java464
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/TypeCommon.java115
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/validation/TOSCAValidator.java115
22 files changed, 3111 insertions, 2790 deletions
diff --git a/dcaedt_validator/README b/dcaedt_validator/README
new file mode 100644
index 0000000..0291fa2
--- /dev/null
+++ b/dcaedt_validator/README
@@ -0,0 +1,9 @@
+Build commons-pipeline separately, it is not a sub-project.
+It also needs to be installed in the local repository with 'mvn install'
+
+Build the validator itself
+
+
+If runnign into problems, clear the local maven repo,
+ mvn dependency:purge-local-repository -DreResolve=false
+and start from the beginning.
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java
index bf1843e..c75dc8d 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java
@@ -1,7 +1,7 @@
package org.onap.sdc.dcae.checker;
-import java.lang.reflect.Method;
import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
import java.io.File;
import java.io.Reader;
@@ -11,12 +11,9 @@ import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
-import java.util.TreeMap;
import java.util.Iterator;
-import java.util.ListIterator;
import java.util.Map;
import java.util.List;
-import java.util.LinkedList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Set;
@@ -24,23 +21,22 @@ import java.util.Collection;
import java.util.Collections;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
-import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import com.google.common.reflect.Invokable;
import org.onap.sdc.common.onaplog.OnapLoggerDebug;
import org.onap.sdc.common.onaplog.OnapLoggerError;
import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.checker.common.*;
+import org.onap.sdc.dcae.checker.validation.TOSCAValidator;
import org.yaml.snakeyaml.Yaml;
-import com.google.common.collect.Maps;
-import com.google.common.collect.MapDifference;
-import com.google.common.reflect.Invokable;
import com.google.common.collect.Table;
import com.google.common.collect.HashBasedTable;
import kwalify.Validator;
import kwalify.Rule;
-import kwalify.Types;
import kwalify.ValidationException;
import kwalify.SchemaException;
@@ -53,41 +49,34 @@ import org.reflections.Reflections;
import org.reflections.util.FilterBuilder;
import org.reflections.util.ConfigurationBuilder;
import org.reflections.scanners.TypeAnnotationsScanner;
-import org.reflections.scanners.SubTypesScanner;
+import org.reflections.scanners.SubTypesScanner;
import org.reflections.scanners.MethodAnnotationsScanner;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.*;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.INTERFACE_TYPES;
+
/*
* To consider: model consistency checking happens now along with validation
* (is implemented as part of the validation hooks). It might be better to
* separate the 2 stages and perform all the consistency checking once
* validation is completed.
*/
-public class Checker {
- private static final String PROPERTIES = "properties";
- private static final String DEFAULT = "default";
- private static final String ATTRIBUTES = "attributes";
- private static final String DATA_TYPES = "data_types";
- private static final String CAPABILITY_TYPES = "capability_types";
- private static final String VALID_SOURCE_TYPES = "valid_source_types";
- private static final String RELATIONSHIP_TYPES = "relationship_types";
- private static final String INTERFACES = "interfaces";
- private static final String VALID_TARGET_TYPES = "valid_target_types";
- private static final String ARTIFACT_TYPES = "artifact_types";
- private static final String INTERFACE_TYPES = "interface_types";
- private static final String NODE_TYPES = "node_types";
- private static final String REQUIREMENTS = "requirements";
- private static final String CAPABILITIES = "capabilities";
- private static final String GROUP_TYPES = "group_types";
- private static final String TARGETS_CONSTANT = "targets";
- private static final String POLICY_TYPES = "policy_types";
- private static final String IS_NONE_OF_THOSE = "' is none of those";
- private static final String INPUTS = "inputs";
- private static final String CAPABILITY = "capability";
- private static final String ARTIFACTS = "artifacts";
- private static final String WAS_DEFINED_FOR_THE_NODE_TYPE = " was defined for the node type ";
- private static final String UNKNOWN = "Unknown ";
- private static final String TYPE = " type ";
- public static final String IMPORTED_FROM = "',imported from ";
+public class Checker implements IChecker {
+
+ private CheckCommon checkCommon;
+ private TypeCommon typeCommon;
+ private ArtifactCommon artifactCommon;
+ private CapabilityCommon capabilityCommon;
+ private FacetCommon facetCommon;
+ private GroupCommon groupCommon;
+ private InputsOutputsCommon inputsOutputsCommon;
+ private InterfaceCommon interfaceCommon;
+ private PropertiesCommon propertiesCommon;
+ private RelationshipCommon relationshipCommon;
+ private NodeCommon nodeCommon;
+ private PolicyCommon policyCommon;
+ private RequirementCommon requirementCommon;
+ private AttributesCommon attributesCommon;
private Target target = null; //what we're validating at the moment
@@ -104,7 +93,10 @@ public class Checker {
private static Catalog commonsCatalogInstance = null;
- private static final String[] EMPTY_STRING_ARRAY = new String[0];
+ private static Class[] validationHookArgTypes =
+ new Class[]{Object.class, Rule.class, Validator.ValidationContext.class};
+
+
/* Need a proper way to indicate where the grammars are and how they should be identified */
private static final String[] grammarFiles = new String[]{"tosca/tosca_simple_yaml_1_0.grammar",
@@ -120,14 +112,90 @@ public class Checker {
new Class[]{Map.class, CheckContext.class},
new Class[]{List.class, CheckContext.class}};
- private static Class[] validationHookArgTypes =
- new Class[]{Object.class, Rule.class, Validator.ValidationContext.class};
+
public Checker() throws CheckerException {
+ initCommons();
+
loadGrammars();
loadAnnotations();
}
+ private void initCommons() {
+ NodeCommon.init(this);
+ InterfaceCommon.init(this);
+ checkCommon = CheckCommon.getInstance();
+ typeCommon = TypeCommon.getInstance();
+ artifactCommon = ArtifactCommon.getInstance();
+ capabilityCommon = CapabilityCommon.getInstance();
+ facetCommon = FacetCommon.getInstance();
+ groupCommon = GroupCommon.getInstance();
+ inputsOutputsCommon = InputsOutputsCommon.getInstance();
+ interfaceCommon = InterfaceCommon.getInstance();
+ propertiesCommon = PropertiesCommon.getInstance();
+ relationshipCommon = RelationshipCommon.getInstance();
+ nodeCommon = NodeCommon.getInstance();
+ policyCommon = PolicyCommon.getInstance();
+ requirementCommon = RequirementCommon.getInstance();
+ attributesCommon = AttributesCommon.getInstance();
+ }
+
+ @FunctionalInterface
+ interface Function<A, B, C, D> {
+ void function(String key, Map value, Checker.CheckContext theContext, Catalog catalog);
+
+ }
+
+ @FunctionalInterface
+ interface FunctionWithoutCatalog<A, B, C> {
+ void function(String key, Map value, Checker.CheckContext theContext);
+
+ }
+ @FunctionalInterface
+ interface FunctionWithTarget<A, B, C, D, E> {
+ void function(String key, Map value, Checker.CheckContext theContext, Catalog catalog, Target target);
+ }
+
+ private void abstractCheck(Function function, Map<String, Map> stringMapMap, Checker.CheckContext theContext, String type) {
+ theContext.enter(type);
+ try {
+ if (!checkCommon.checkDefinition(type, stringMapMap, theContext)) {
+ return;
+ }
+
+ stringMapMap.forEach((key, value) -> function.function(key, value, theContext, catalog));
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ private void abstractCheck(FunctionWithoutCatalog function, Map<String, Map> stringMapMap, Checker.CheckContext theContext, String type) {
+ theContext.enter(type);
+ try {
+ if (!checkCommon.checkDefinition(type, stringMapMap, theContext)) {
+ return;
+ }
+
+ stringMapMap.forEach((key, value) -> function.function(key, value, theContext));
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ private void abstractCheck(FunctionWithTarget function, Map<String, Map> stringMapMap, Checker.CheckContext theContext, String type) {
+ theContext.enter(type);
+ try {
+ if (!checkCommon.checkDefinition(type, stringMapMap, theContext)) {
+ return;
+ }
+
+ stringMapMap.forEach((key, value) -> function.function(key, value, theContext, catalog, target));
+ } finally {
+ theContext.exit();
+ }
+ }
+
+
public static void main(String[] theArgs) {
if (theArgs.length == 0) {
errLogger.log(LogLevel.ERROR, Checker.class.getName(), "checker resource_to_validate [processor]*");
@@ -137,13 +205,9 @@ public class Checker {
try {
Catalog cat = Checker.check(new File(theArgs[0]));
- for (Target t : cat.targets()) {
- errLogger.log(LogLevel.ERROR, Checker.class.getName(), "{}\n{}\n{}", t.getLocation(), cat.importString(t), t.getReport());
- }
+ cat.targets().forEach(t -> errLogger.log(LogLevel.ERROR, Checker.class.getName(), "{}\n{}\n{}", t.getLocation(), cat.importString(t), t.getReport()));
- for (Target t : cat.sortedTargets()) {
- errLogger.log(LogLevel.ERROR, Checker.class.getName(), t.toString());
- }
+ cat.sortedTargets().forEach(t -> errLogger.log(LogLevel.ERROR, Checker.class.getName(), t.toString()));
} catch (Exception x) {
errLogger.log(LogLevel.ERROR, Checker.class.getName(),"Exception {}", x);
@@ -206,34 +270,30 @@ public class Checker {
Map<Class, Object> handlers = new HashMap<>();
Set<Method> checkHandlers = reflections.getMethodsAnnotatedWith(Checks.class);
- for (Method checkHandler : checkHandlers) {
- checks.put(checkHandler.getAnnotation(Checks.class).path(),
- checkHandler,
- handlers.computeIfAbsent(checkHandler.getDeclaringClass(),
- type -> {
- try {
- return (getClass() == type) ? this
- : type.newInstance();
- } catch (Exception x) {
- throw new RuntimeException(x);
- }
- }));
- }
+ checkHandlers.forEach(checkHandler -> checks.put(checkHandler.getAnnotation(Checks.class).path(),
+ checkHandler,
+ handlers.computeIfAbsent(checkHandler.getDeclaringClass(),
+ type -> {
+ try {
+ return (getClass() == type) ? this
+ : type.newInstance();
+ } catch (Exception x) {
+ throw new RuntimeException(x);
+ }
+ })));
Set<Method> catalogHandlers = reflections.getMethodsAnnotatedWith(Catalogs.class);
- for (Method catalogHandler : catalogHandlers) {
- catalogs.put(catalogHandler.getAnnotation(Catalogs.class).path(),
- catalogHandler,
- handlers.computeIfAbsent(catalogHandler.getDeclaringClass(),
- type -> {
- try {
- return (getClass() == type) ? this
- : type.newInstance();
- } catch (Exception x) {
- throw new RuntimeException(x);
- }
- }));
- }
+ catalogHandlers.forEach(catalogHandler -> catalogs.put(catalogHandler.getAnnotation(Catalogs.class).path(),
+ catalogHandler,
+ handlers.computeIfAbsent(catalogHandler.getDeclaringClass(),
+ type -> {
+ try {
+ return (getClass() == type) ? this
+ : type.newInstance();
+ } catch (Exception x) {
+ throw new RuntimeException(x);
+ }
+ })));
}
@@ -341,7 +401,7 @@ public class Checker {
throws CheckerException {
debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "parseTarget {}", theTarget);
- Reader source = null;
+ Reader source;
try {
source = theTarget.open();
} catch (IOException iox) {
@@ -375,15 +435,15 @@ public class Checker {
targets.add(theTarget);
} else {
//the target turned out to be a stream containing multiple documents
- for (int i = 0; i < yamlRoots.size(); i++) {
-/*
-!!We're changing the target below, i.e. we're changing the target implementation hence caching implementation will suffer!!
-*/
+ /*
+ !!We're changing the target below, i.e. we're changing the target implementation hence caching implementation will suffer!!
+ */
+ IntStream.range(0, yamlRoots.size()).forEach(i -> {
Target newTarget = new Target(theTarget.getName(),
fragmentTargetURI(theTarget.getLocation(), String.valueOf(i)));
newTarget.setTarget(yamlRoots.get(i));
targets.add(newTarget);
- }
+ });
}
debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), " exiting parseTarget {}", theTarget);
@@ -418,7 +478,7 @@ public class Checker {
TOSCAValidator validator = null;
try {
- validator = new TOSCAValidator(theTarget, grammar.getTarget());
+ validator = new TOSCAValidator(theTarget, grammar.getTarget(), this);
} catch (SchemaException sx) {
throw new CheckerException("Grammar error at: " + sx.getPath(), sx);
}
@@ -427,7 +487,7 @@ public class Checker {
validator.validate(theTarget.getTarget()));
if (!theTarget.getReport().hasErrors()) {
- applyCanonicals(theTarget.getTarget(), validator.canonicals);
+ applyCanonicals(theTarget.getTarget(), validator.getCanonicals());
}
debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), " exiting validateTarget {}", theTarget);
@@ -447,74 +507,6 @@ public class Checker {
return theTarget;
}
- public void checkProperties(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(PROPERTIES);
- try {
- if (!checkDefinition(PROPERTIES, theDefinitions, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkPropertyDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkPropertyDefinition(
- String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName);
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
- //check the type
- if (!checkDataType(theDefinition, theContext)) {
- return;
- }
- //check default value is compatible with type
- Object defaultValue = theDefinition.get(DEFAULT);
- if (defaultValue != null) {
- checkDataValuation(defaultValue, theDefinition, theContext);
- }
-
- theContext.exit();
- }
-
- private void checkAttributes(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(ATTRIBUTES);
- try {
- if (!checkDefinition(ATTRIBUTES, theDefinitions, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkAttributeDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkAttributeDefinition(
- String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName);
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
- if (!checkDataType(theDefinition, theContext)) {
- return;
- }
- } finally {
- theContext.exit();
- }
- }
-
/* top level rule, we collected the whole information set.
* this is where checking starts
*/
@@ -530,760 +522,12 @@ public class Checker {
//!!! imports need to be processed first now that catalogging takes place at check time!!
//first catalog whatever it is there to be cataloged so that the checks can perform cross-checking
- for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator();
- ri.hasNext(); ) {
- Map.Entry<String, Object> e = ri.next();
- catalogs(e.getKey(), e.getValue(), theContext);
- }
+ theDef.forEach((key, value) -> catalogs(key, value, theContext));
- for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator();
- ri.hasNext(); ) {
- Map.Entry<String, Object> e = ri.next();
- checks(e.getKey(), e.getValue(), theContext);
- }
+ theDef.forEach((key, value) -> checks(key, value, theContext));
theContext.exit();
}
- @Catalogs(path = "/data_types")
- protected void catalog_data_types(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(DATA_TYPES);
- try {
- catalogTypes(Construct.Data, theDefinitions, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- @Checks(path = "/data_types")
- protected void check_data_types(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(DATA_TYPES);
-
- try {
- if (!checkDefinition(DATA_TYPES, theDefinitions, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkDataTypeDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkDataTypeDefinition(String theName,
- Map theDefinition,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Data);
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
-
- if (theDefinition.containsKey(PROPERTIES)) {
- checkProperties(
- (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Data, theName, theDefinition,
- Facet.properties, theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- @Catalogs(path = "/capability_types")
- protected void catalog_capability_types(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(CAPABILITY_TYPES);
- try {
- catalogTypes(Construct.Capability, theDefinitions, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- /* */
- @Checks(path = "/capability_types")
- protected void check_capability_types(
- Map<String, Map> theTypes, CheckContext theContext) {
- theContext.enter(CAPABILITY_TYPES);
- try {
- if (!checkDefinition(CAPABILITY_TYPES, theTypes, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theTypes.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkCapabilityTypeDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkCapabilityTypeDefinition(String theName,
- Map theDefinition,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Capability);
-
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
-
- if (theDefinition.containsKey(PROPERTIES)) {
- checkProperties(
- (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Capability, theName, theDefinition,
- Facet.properties, theContext);
- }
-
- if (theDefinition.containsKey(ATTRIBUTES)) {
- checkAttributes(
- (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext);
- checkTypeConstructFacet(Construct.Capability, theName, theDefinition,
- Facet.attributes, theContext);
- }
-
- //valid_source_types: see capability_type_definition
- //unclear: how is the valid_source_types list definition eveolving across
- //the type hierarchy: additive, overwriting, ??
- if (theDefinition.containsKey(VALID_SOURCE_TYPES)) {
- checkTypeReference(Construct.Node, theContext,
- ((List<String>) theDefinition.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY));
- }
- } finally {
- theContext.exit();
- }
- }
-
- @Catalogs(path = "/relationship_types")
- protected void catalog_relationship_types(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(RELATIONSHIP_TYPES);
- try {
- catalogTypes(Construct.Relationship, theDefinitions, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- /* */
- @Checks(path = "/relationship_types")
- protected void check_relationship_types(
- Map<String, Map> theDefinition, CheckContext theContext) {
- theContext.enter(RELATIONSHIP_TYPES);
- try {
- if (!checkDefinition(RELATIONSHIP_TYPES, theDefinition, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkRelationshipTypeDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkRelationshipTypeDefinition(String theName,
- Map theDefinition,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Relationship);
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
-
- if (theDefinition.containsKey(PROPERTIES)) {
- checkProperties(
- (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Relationship, theName, theDefinition,
- Facet.properties, theContext);
- }
-
- if (theDefinition.containsKey(ATTRIBUTES)) {
- checkProperties(
- (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext);
- checkTypeConstructFacet(Construct.Relationship, theName, theDefinition,
- Facet.attributes, theContext);
- }
-
- Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get(INTERFACES);
- if (interfaces != null) {
- theContext.enter(INTERFACES);
- for (Iterator<Map.Entry<String, Map>> i =
- interfaces.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- check_type_interface_definition(
- e.getKey(), e.getValue(), theContext);
- }
- theContext.exit();
- }
-
- if (theDefinition.containsKey(VALID_TARGET_TYPES)) {
- checkTypeReference(Construct.Capability, theContext,
- ((List<String>) theDefinition.get(VALID_TARGET_TYPES)).toArray(EMPTY_STRING_ARRAY));
- }
- } finally {
- theContext.exit();
- }
- }
-
- @Catalogs(path = "/artifact_types")
- protected void catalog_artifact_types(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(ARTIFACT_TYPES);
- try {
- catalogTypes(Construct.Artifact, theDefinitions, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- /* */
- @Checks(path = "/artifact_types")
- protected void check_artifact_types(
- Map<String, Map> theDefinition, CheckContext theContext) {
- theContext.enter(ARTIFACT_TYPES);
- try {
- if (!checkDefinition(ARTIFACT_TYPES, theDefinition, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkArtifactTypeDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkArtifactTypeDefinition(String theName,
- Map theDefinition,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Artifact);
- try {
- checkDefinition(theName, theDefinition, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- @Catalogs(path = "/interface_types")
- protected void catalog_interface_types(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(INTERFACE_TYPES);
- try {
- catalogTypes(Construct.Interface, theDefinitions, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- @Checks(path = "/interface_types")
- protected void check_interface_types(
- Map<String, Map> theDefinition, CheckContext theContext) {
- theContext.enter(INTERFACE_TYPES);
- try {
- if (!checkDefinition(INTERFACE_TYPES, theDefinition, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkInterfaceTypeDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkInterfaceTypeDefinition(String theName,
- Map theDefinition,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Interface);
- try {
- checkDefinition(theName, theDefinition, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- @Catalogs(path = "/node_types")
- protected void catalog_node_types(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(NODE_TYPES);
- try {
- catalogTypes(Construct.Node, theDefinitions, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- /* */
- @Checks(path = "/node_types")
- protected void check_node_types(
- Map<String, Map> theDefinition, CheckContext theContext) {
- theContext.enter(NODE_TYPES);
- try {
- if (!checkDefinition(NODE_TYPES, theDefinition, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkNodeTypeDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkNodeTypeDefinition(String theName,
- Map theDefinition,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Node);
-
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
-
- if (theDefinition.containsKey(PROPERTIES)) {
- checkProperties(
- (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Node, theName, theDefinition,
- Facet.properties, theContext);
- }
-
- if (theDefinition.containsKey(ATTRIBUTES)) {
- checkProperties(
- (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext);
- checkTypeConstructFacet(Construct.Node, theName, theDefinition,
- Facet.attributes, theContext);
- }
-
- //requirements
- if (theDefinition.containsKey(REQUIREMENTS)) {
- check_requirements(
- (List<Map>) theDefinition.get(REQUIREMENTS), theContext);
- }
-
- //capabilities
- if (theDefinition.containsKey(CAPABILITIES)) {
- check_capabilities(
- (Map<String, Map>) theDefinition.get(CAPABILITIES), theContext);
- }
-
- //interfaces:
- Map<String, Map> interfaces =
- (Map<String, Map>) theDefinition.get(INTERFACES);
- checkMapTypeInterfaceDefinition(theContext, interfaces);
- } finally {
- theContext.exit();
- }
- }
-
- private void checkMapTypeInterfaceDefinition(CheckContext theContext, Map<String, Map> interfaces) {
- if (interfaces != null) {
- try {
- theContext.enter(INTERFACES);
- for (Iterator<Map.Entry<String, Map>> i =
- interfaces.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- check_type_interface_definition(
- e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
- }
-
- @Catalogs(path = "/group_types")
- protected void catalog_group_types(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(GROUP_TYPES);
- try {
- catalogTypes(Construct.Group, theDefinitions, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- @Checks(path = "/group_types")
- protected void check_group_types(
- Map<String, Map> theDefinition, CheckContext theContext) {
- theContext.enter(GROUP_TYPES);
- try {
- if (!checkDefinition(GROUP_TYPES, theDefinition, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkGroupTypeDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkGroupTypeDefinition(String theName,
- Map theDefinition,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Group);
-
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
-
- if (theDefinition.containsKey(PROPERTIES)) {
- checkProperties(
- (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Group, theName, theDefinition,
- Facet.properties, theContext);
- }
-
- if (theDefinition.containsKey(TARGETS_CONSTANT)) {
- checkTypeReference(Construct.Node, theContext,
- ((List<String>) theDefinition.get(TARGETS_CONSTANT)).toArray(EMPTY_STRING_ARRAY));
- }
-
- //interfaces
- Map<String, Map> interfaces =
- (Map<String, Map>) theDefinition.get(INTERFACES);
- checkMapTypeInterfaceDefinition(theContext, interfaces);
-
- } finally {
- theContext.exit();
- }
- }
-
- @Catalogs(path = "/policy_types")
- protected void catalog_policy_types(
- Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(POLICY_TYPES);
- try {
- catalogTypes(Construct.Policy, theDefinitions, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- /* */
- @Checks(path = "/policy_types")
- protected void check_policy_types(
- Map<String, Map> theDefinition, CheckContext theContext) {
- theContext.enter(POLICY_TYPES);
- try {
- if (!checkDefinition(POLICY_TYPES, theDefinition, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkPolicyTypeDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkPolicyTypeDefinition(String theName,
- Map theDefinition,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Policy);
-
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
-
- if (theDefinition.containsKey(PROPERTIES)) {
- checkProperties(
- (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Policy, theName, theDefinition,
- Facet.properties, theContext);
- }
-
- //the targets can be known node types or group types
- List<String> targets = (List<String>) theDefinition.get(TARGETS_CONSTANT);
- if ((targets != null) && (checkDefinition(TARGETS_CONSTANT, targets, theContext))) {
- for (String targetItr : targets) {
- if (!(this.catalog.hasType(Construct.Node, targetItr) ||
- this.catalog.hasType(Construct.Group, targetItr))) {
- theContext.addError("The 'targets' entry must contain a reference to a node type or group type, '" + target + IS_NONE_OF_THOSE, null);
- }
- }
- }
- } finally {
- theContext.exit();
- }
- }
-
- //checking of actual constructs (capability, ..)
-
- /* First, interface types do not have a hierarchical organization (no
- * 'derived_from' in a interface type definition).
- * So, when interfaces (with a certain type) are defined in a node
- * or relationship type (and they can define new? operations), what
- * is there to check:
- * Can operations here re-define their declaration from the interface
- * type spec?? From A.5.11.3 we are to understand indicates override to be
- * the default interpretation .. but they talk about sub-classing so it
- * probably intended as a reference to the node or relationship type
- * hierarchy and not the interface type (no hierarchy there).
- * Or is this a a case of augmentation where new operations can be added??
- */
- private void check_type_interface_definition(
- String theName, Map theDef, CheckContext theContext) {
- theContext.enter(theName);
- try {
- if (!checkDefinition(theName, theDef, theContext)) {
- return;
- }
-
- if (!checkType(Construct.Interface, theDef, theContext)) {
- return;
- }
-
- if (theDef.containsKey(INPUTS)) {
- check_inputs((Map<String, Map>) theDef.get(INPUTS), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void check_capabilities(Map<String, Map> theDefinition,
- CheckContext theContext) {
- theContext.enter(CAPABILITIES);
- try {
- if (!checkDefinition(CAPABILITIES, theDefinition, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkCapabilityDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- /* A capability definition appears within the context ot a node type */
- private void checkCapabilityDefinition(String theName,
- Map theDef,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Capability);
-
- try {
- if (!checkDefinition(theName, theDef, theContext)) {
- return;
- }
-
- //check capability type
- if (!checkType(Construct.Capability, theDef, theContext)) {
- return;
- }
-
- //check properties
- if (!checkFacetAugmentation(
- Construct.Capability, theDef, Facet.properties, theContext)) {
- return;
- }
-
- //check attributes
- if (!checkFacetAugmentation(
- Construct.Capability, theDef, Facet.attributes, theContext)) {
- return;
- }
-
- //valid_source_types: should point to valid template nodes
- if (theDef.containsKey(VALID_SOURCE_TYPES)) {
- checkTypeReference(Construct.Node, theContext,
- ((List<String>) theDef.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY));
- //per A.6.1.4 there is an additinal check to be performed here:
- //"Any Node Type (names) provides as values for the valid_source_types keyname SHALL be type-compatible (i.e., derived from the same parent Node Type) with any Node Types defined using the same keyname in the parent Capability Type."
- }
- //occurences: were verified in range_definition
-
- } finally {
- theContext.exit();
- }
- }
-
- private void check_requirements(List<Map> theDefinition,
- CheckContext theContext) {
- theContext.enter(REQUIREMENTS);
- try {
- if (!checkDefinition(REQUIREMENTS, theDefinition, theContext)) {
- return;
- }
-
- for (Iterator<Map> i = theDefinition.iterator(); i.hasNext(); ) {
- Map e = i.next();
- Iterator<Map.Entry<String, Map>> ei =
- (Iterator<Map.Entry<String, Map>>) e.entrySet().iterator();
- Map.Entry<String, Map> eie = ei.next();
- checkRequirementDefinition(eie.getKey(), eie.getValue(), theContext);
- assert !ei.hasNext();
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkRequirementDefinition(String theName,
- Map theDef,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Requirement);
-
- try {
- if (!checkDefinition(theName, theDef, theContext)) {
- return;
- }
- //check capability type
- String capabilityType = (String) theDef.get(CAPABILITY);
- if (null != capabilityType) {
- checkTypeReference(Construct.Capability, theContext, capabilityType);
- }
-
- //check node type
- String nodeType = (String) theDef.get("node");
- if (null != nodeType) {
- checkTypeReference(Construct.Node, theContext, nodeType);
- }
-
- //check relationship type
- Map relationshipSpec = (Map) theDef.get("relationship");
- String relationshipType = null;
- if (null != relationshipSpec) {
- relationshipType = (String) relationshipSpec.get("type");
- if (relationshipType != null) { //should always be the case
- checkTypeReference(Construct.Relationship, theContext, relationshipType);
- }
-
- Map<String, Map> interfaces = (Map<String, Map>)
- relationshipSpec.get(INTERFACES);
- if (interfaces != null) {
- //augmentation (additional properties or operations) of the interfaces
- //defined by the above relationship types
-
- //check that the interface types are known
- for (Map interfaceDef : interfaces.values()) {
- checkType(Construct.Interface, interfaceDef, theContext);
- }
- }
- }
-
- //cross checks
-
- //the capability definition might come from the capability type or from the capability definition
- //within the node type. We might have more than one as a node might specify multiple capabilities of the
- //same type.
- //the goal here is to cross check the compatibility of the valid_source_types specification in the
- //target capability definition (if that definition contains a valid_source_types entry).
- List<Map> capabilityDefs = new LinkedList<>();
- //nodeType exposes capabilityType
- if (nodeType != null) {
- Map<String, Map> capabilities =
- findTypeFacetByType(Construct.Node, nodeType,
- Facet.capabilities, capabilityType);
- if (capabilities.isEmpty()) {
- theContext.addError("The node type " + nodeType + " does not appear to expose a capability of a type compatible with " + capabilityType, null);
- } else {
- for (Map.Entry<String, Map> capability : capabilities.entrySet()) {
- //this is the capability as it was defined in the node type
- Map capabilityDef = capability.getValue();
- //if it defines a valid_source_types then we're working with it,
- //otherwise we're working with the capability type it points to.
- //The spec does not make it clear if the valid_source_types in a capability definition augments or
- //overwrites the one from the capabilityType (it just says they must be compatible).
- if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) {
- capabilityDefs.add(capabilityDef);
- } else {
- capabilityDef =
- catalog.getTypeDefinition(Construct.Capability, (String) capabilityDef.get("type"));
- if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) {
- capabilityDefs.add(capabilityDef);
- } else {
- //!!if there is a capability that does not have a valid_source_type than there is no reason to
- //make any further verification (as there is a valid node_type/capability target for this requirement)
- capabilityDefs.clear();
- break;
- }
- }
- }
- }
- } else {
- Map capabilityDef = catalog.getTypeDefinition(Construct.Capability, capabilityType);
- if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) {
- capabilityDefs.add(capabilityDef);
- }
- }
-
- //check that the node type enclosing this requirement definition
- //is in the list of valid_source_types
- if (!capabilityDefs.isEmpty()) {
- String enclosingNodeType =
- theContext.enclosingConstruct(Construct.Node);
- assert enclosingNodeType != null;
-
- if (!capabilityDefs.stream().anyMatch(
- (Map capabilityDef) -> {
- List<String> valid_source_types =
- (List<String>) capabilityDef.get(VALID_SOURCE_TYPES);
- return valid_source_types.stream().anyMatch(
- (String source_type) -> catalog.isDerivedFrom(
- Construct.Node, enclosingNodeType, source_type));
- })) {
- theContext.addError("Node type: " + enclosingNodeType + " not compatible with any of the valid_source_types provided in the definition of compatible capabilities", null);
- }
- }
-
- //if we have a relationship type, check if it has a valid_target_types
- //if it does, make sure that the capability type is compatible with one
- //of them
- if (relationshipType != null) { //should always be the case
- Map relationshipTypeDef = catalog.getTypeDefinition(
- Construct.Relationship, relationshipType);
- if (relationshipTypeDef != null) {
- List<String> valid_target_types =
- (List<String>) relationshipTypeDef.get(VALID_TARGET_TYPES);
- if (valid_target_types != null) {
- boolean found = false;
- for (String target_type : valid_target_types) {
- if (catalog.isDerivedFrom(
- Construct.Capability, capabilityType, target_type)) {
- found = true;
- break;
- }
- }
- if (!found) {
- theContext.addError("Capability type: " + capabilityType + " not compatible with any of the valid_target_types " + valid_target_types + " provided in the definition of relationship type " + relationshipType, null);
- }
- }
- }
- }
-
- //relationship declares the capabilityType in its valid_target_type set
- //in A.6.9 'Relationship Type' the spec does not indicate how inheritance
- //is to be applied to the valid_target_type spec: cumulative, overwrites,
- //so we treat it as an overwrite.
- } finally {
- theContext.exit();
- }
- }
//topology_template_definition and sub-rules
/* */
@@ -1308,8 +552,7 @@ public class Checker {
protected void check_imports(List theImports, CheckContext theContext) {
theContext.enter("imports");
- for (ListIterator li = theImports.listIterator(); li.hasNext(); ) {
- Object importEntry = li.next();
+ for (Object importEntry : theImports) {
Object importFile = ((Map) mapEntry(importEntry).getValue()).get("file");
Target tgt = null;
try {
@@ -1331,1360 +574,11 @@ public class Checker {
theContext.exit();
}
- /* */
- @Checks(path = "/topology_template/substitution_mappings")
- protected void check_substitution_mappings(Map<String, Object> theSub,
- CheckContext theContext) {
- theContext.enter("substitution_mappings");
- try {
- //type is mandatory
- String type = (String) theSub.get("node_type");
- if (!checkTypeReference(Construct.Node, theContext, type)) {
- theContext.addError("Unknown node type: " + type + "", null);
- return; //not much to go on with
- }
-
- Map<String, List> capabilities = (Map<String, List>) theSub.get(CAPABILITIES);
- if (null != capabilities) {
- for (Map.Entry<String, List> ce : capabilities.entrySet()) {
- //the key must be a capability of the type
- if (null == findTypeFacetByName(Construct.Node, type,
- Facet.capabilities, ce.getKey())) {
- theContext.addError("Unknown node type capability: " + ce.getKey() + ", type " + type, null);
- }
- //the value is a 2 element list: first is a local node,
- //second is the name of one of its capabilities
- List targetList = ce.getValue();
- if (targetList.size() != 2) {
- theContext.addError("Invalid capability mapping: " + target + ", expecting 2 elements", null);
- continue;
- }
-
- String targetNode = (String) targetList.get(0);
- String targetCapability = (String) targetList.get(1);
-
- Map<String, Object> targetNodeDef = (Map<String, Object>)
- this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode);
- if (null == targetNodeDef) {
- theContext.addError("Invalid capability mapping node template: " + targetNode, null);
- continue;
- }
-
- String targetNodeType = (String) targetNodeDef.get("type");
- if (null == findTypeFacetByName(Construct.Node, targetNodeType,
- Facet.capabilities, targetCapability)) {
- theContext.addError("Invalid capability mapping capability: " + targetCapability + ". No such capability found for node template " + targetNode + ", of type " + targetNodeType, null);
- }
- }
- }
-
- Map<String, List> requirements = (Map<String, List>) theSub.get(REQUIREMENTS);
- if (null != requirements) {
- for (Map.Entry<String, List> re : requirements.entrySet()) {
- //the key must be a requirement of the type
- if (null == findNodeTypeRequirementByName(type, re.getKey())) {
- theContext.addError("Unknown node type requirement: " + re.getKey() + ", type " + type, null);
- }
-
- List targetList = re.getValue();
- if (targetList.size() != 2) {
- theContext.addError("Invalid requirement mapping: " + targetList + ", expecting 2 elements", null);
- continue;
- }
-
- String targetNode = (String) targetList.get(0);
- String targetRequirement = (String) targetList.get(1);
-
- Map<String, Object> targetNodeDef = (Map<String, Object>)
- this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode);
- if (null == targetNodeDef) {
- theContext.addError("Invalid requirement mapping node template: " + targetNode, null);
- continue;
- }
-
- String targetNodeType = (String) targetNodeDef.get("type");
- if (null == findNodeTypeRequirementByName(targetNodeType, targetRequirement)) {
- theContext.addError("Invalid requirement mapping requirement: " + targetRequirement + ". No such requirement found for node template " + targetNode + ", of type " + targetNodeType, null);
- }
- }
- }
- } finally {
- theContext.exit();
- }
- }
-
-
- /* */
- @Checks(path = "/topology_template/inputs")
- protected void check_inputs(Map<String, Map> theInputs,
- CheckContext theContext) {
- theContext.enter(INPUTS);
-
- try {
- if (!checkDefinition(INPUTS, theInputs, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theInputs.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkInputDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkInputDefinition(String theName,
- Map theDef,
- CheckContext theContext) {
- theContext.enter(theName);
- try {
- if (!checkDefinition(theName, theDef, theContext)) {
- return;
- }
- //
- if (!checkDataType(theDef, theContext)) {
- return;
- }
- //check default value
- Object defaultValue = theDef.get(DEFAULT);
- if (defaultValue != null) {
- checkDataValuation(defaultValue, theDef, theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- @Checks(path = "topology_template/outputs")
- protected void check_outputs(Map<String, Map> theOutputs,
- CheckContext theContext) {
- theContext.enter("outputs");
-
- try {
- if (!checkDefinition("outputs", theOutputs, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theOutputs.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkOutputDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkOutputDefinition(String theName,
- Map theDef,
- CheckContext theContext) {
- theContext.enter(theName);
- try {
- checkDefinition(theName, theDef, theContext);
- //check the expression
- } finally {
- theContext.exit();
- }
- }
-
- @Checks(path = "/topology_template/groups")
- protected void check_groups(Map<String, Map> theGroups,
- CheckContext theContext) {
- theContext.enter("groups");
-
- try {
- if (!checkDefinition("groups", theGroups, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theGroups.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkGroupDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkGroupDefinition(String theName,
- Map theDef,
- CheckContext theContext) {
- theContext.enter(theName);
- try {
- if (!checkDefinition(theName, theDef, theContext)) {
- return;
- }
-
- if (!checkType(Construct.Group, theDef, theContext)) {
- return;
- }
-
- if (!checkFacet(
- Construct.Group, theDef, Facet.properties, theContext)) {
- return;
- }
-
- if (theDef.containsKey(TARGETS_CONSTANT)) {
-
- List<String> targetsTypes = (List<String>)
- this.catalog.getTypeDefinition(Construct.Group,
- (String) theDef.get("type"))
- .get(TARGETS_CONSTANT);
-
- List<String> targets = (List<String>) theDef.get(TARGETS_CONSTANT);
- for (String targetItr : targets) {
- if (!this.catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) {
- theContext.addError("The 'targets' entry must contain a reference to a node template, '" + targetItr + "' is not one", null);
- } else {
- if (targetsTypes != null) {
- String targetType = (String)
- this.catalog.getTemplate(theContext.target(), Construct.Node, targetItr).get("type");
-
- boolean found = false;
- for (String type : targetsTypes) {
- found = this.catalog
- .isDerivedFrom(Construct.Node, targetType, type);
- if (found) {
- break;
- }
- }
-
- if (!found) {
- theContext.addError("The 'targets' entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null);
- }
- }
- }
- }
- }
- } finally {
- theContext.exit();
- }
- }
-
- @Checks(path = "/topology_template/policies")
- protected void check_policies(List<Map<String, Map>> thePolicies,
- CheckContext theContext) {
- theContext.enter("policies");
-
- try {
- if (!checkDefinition("policies", thePolicies, theContext)) {
- return;
- }
-
- for (Map<String, Map> policy : thePolicies) {
- assert policy.size() == 1;
- Map.Entry<String, Map> e = policy.entrySet().iterator().next();
- checkPolicyDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkPolicyDefinition(String theName,
- Map theDef,
- CheckContext theContext) {
- theContext.enter(theName);
- try {
- if (!checkDefinition(theName, theDef, theContext)) {
- return;
- }
-
- if (!checkType(Construct.Policy, theDef, theContext)) {
- return;
- }
-
- if (!checkFacet(
- Construct.Policy, theDef, Facet.properties, theContext)) {
- return;
- }
-
- //targets: must point to node or group templates (that are of a type
- //specified in the policy type definition, if targets were specified
- //there).
- if (theDef.containsKey(TARGETS_CONSTANT)) {
- List<String> targetsTypes = (List<String>)
- this.catalog.getTypeDefinition(Construct.Policy,
- (String) theDef.get("type"))
- .get(TARGETS_CONSTANT);
-
- List<String> targets = (List<String>) theDef.get(TARGETS_CONSTANT);
- for (String targetItr : targets) {
- Construct targetConstruct = null;
-
- if (this.catalog.hasTemplate(theContext.target(), Construct.Group, targetItr)) {
- targetConstruct = Construct.Group;
- } else if (this.catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) {
- targetConstruct = Construct.Node;
- } else {
- theContext.addError("The 'targets' entry must contain a reference to a node template or group template, '" + target + IS_NONE_OF_THOSE, null);
- }
-
- if (targetConstruct != null &&
- targetsTypes != null) {
- //get the target type and make sure is compatible with the types
- //indicated in the type spec
- String targetType = (String)
- this.catalog.getTemplate(theContext.target(), targetConstruct, targetItr).get("type");
-
- boolean found = false;
- for (String type : targetsTypes) {
- found = this.catalog
- .isDerivedFrom(targetConstruct, targetType, type);
- if (found) {
- break;
- }
- }
-
- if (!found) {
- theContext.addError("The 'targets' " + targetConstruct + " entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null);
- }
- }
- }
- }
-
- } finally {
- theContext.exit();
- }
- }
-
- /* */
- @Checks(path = "/topology_template/node_templates")
- protected void check_node_templates(Map<String, Map> theTemplates,
- CheckContext theContext) {
- theContext.enter("node_templates");
- try {
- if (!checkDefinition("node_templates", theTemplates, theContext)) {
- return;
- }
-
- for (Iterator<Map.Entry<String, Map>> i = theTemplates.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkNodeTemplateDefinition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- /* */
- private void checkNodeTemplateDefinition(String theName,
- Map theNode,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Node);
-
- try {
- if (!checkDefinition(theName, theNode, theContext)) {
- return;
- }
-
- if (!checkType(Construct.Node, theNode, theContext)) {
- return;
- }
-
- //copy
- String copy = (String) theNode.get("copy");
- if (copy != null) {
- if (!checkTemplateReference(Construct.Node, theContext, copy)) {
- theContext.addError("The 'copy' reference " + copy + " does not point to a known node template", null);
- } else {
- //the 'copy' node specification should be used to provide 'defaults'
- //for this specification
- }
- }
-
- /* check that we operate on properties and attributes within the scope of
- the specified node type */
- if (!checkFacet(
- Construct.Node, /*theName,*/theNode, Facet.properties, theContext)) {
- return;
- }
-
- if (!checkFacet(
- Construct.Node, /*theName,*/theNode, Facet.attributes, theContext)) {
- return;
- }
-
- //requirement assignment seq
- if (theNode.containsKey(REQUIREMENTS)) {
- checkRequirementsAssignmentDefinition(
- (List<Map>) theNode.get(REQUIREMENTS), theContext);
- }
-
- //capability assignment map: subject to augmentation
- if (theNode.containsKey(CAPABILITIES)) {
- checkCapabilitiesAssignmentDefinition(
- (Map<String, Map>) theNode.get(CAPABILITIES), theContext);
- }
-
- //interfaces
- if (theNode.containsKey(INTERFACES)) {
- checkTemplateInterfacesDefinition(
- (Map<String, Map>) theNode.get(INTERFACES), theContext);
- }
-
- //artifacts: artifacts do not have different definition forms/syntax
- //depending on the context (type or template) but they are still subject
- //to 'augmentation'
- if (theNode.containsKey(ARTIFACTS)) {
- check_template_artifacts_definition(
- (Map<String, Object>) theNode.get(ARTIFACTS), theContext);
- }
-
- /* node_filter: the context to which the node filter is applied is very
- * wide here as opposed to the node filter specification in a requirement
- * assignment which has a more strict context (target node/capability are
- * specified).
- * We could check that there are nodes in this template having the
- * properties/capabilities specified in this filter, i.e. the filter has
- * a chance to succeed.
- */
- } finally {
- theContext.exit();
- }
- }
-
- @Checks(path = "/topology_template/relationship_templates")
- protected void check_relationship_templates(Map theTemplates,
- CheckContext theContext) {
- theContext.enter("relationship_templates");
-
- for (Iterator<Map.Entry<String, Map>> i = theTemplates.entrySet().iterator(); i.hasNext(); ) {
- Map.Entry<String, Map> e = i.next();
- checkRelationshipTemplateDefinition(e.getKey(), e.getValue(), theContext);
- }
- theContext.exit();
- }
-
- private void checkRelationshipTemplateDefinition(
- String theName,
- Map theRelationship,
- CheckContext theContext) {
- theContext.enter(theName, Construct.Relationship);
- try {
- if (!checkDefinition(theName, theRelationship, theContext)) {
- return;
- }
-
- if (!checkType(Construct.Relationship, theRelationship, theContext)) {
- return;
- }
-
- /* check that we operate on properties and attributes within the scope of
- the specified relationship type */
- if (!checkFacet(Construct.Relationship, theRelationship,
- Facet.properties, theContext)) {
- return;
- }
-
- if (!checkFacet(Construct.Relationship, theRelationship,
- Facet.attributes, theContext)) {
- return;
- }
-
- /* interface definitions
- note: augmentation is allowed here so not clear what to check ..
- maybe report augmentations if so configured .. */
-
- } finally {
- theContext.exit();
- }
- }
-
- //requirements and capabilities assignment appear in a node templates
- private void checkRequirementsAssignmentDefinition(
- List<Map> theRequirements, CheckContext theContext) {
- theContext.enter(REQUIREMENTS);
- try {
- if (!checkDefinition(REQUIREMENTS, theRequirements, theContext)) {
- return;
- }
-
- //the node type for the node template enclosing these requirements
- String nodeType = (String) catalog.getTemplate(
- theContext.target(),
- Construct.Node,
- theContext.enclosingConstruct(Construct.Node))
- .get("type");
-
- for (Iterator<Map> ri = theRequirements.iterator(); ri.hasNext(); ) {
- Map<String, Map> requirement = (Map<String, Map>) ri.next();
-
- Iterator<Map.Entry<String, Map>> rai = requirement.entrySet().iterator();
-
- Map.Entry<String, Map> requirementEntry = rai.next();
- assert !rai.hasNext();
-
- String requirementName = requirementEntry.getKey();
- Map requirementDef = findNodeTypeRequirementByName(
- nodeType, requirementName);
-
- if (requirementDef == null) {
- theContext.addError("No requirement " + requirementName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null);
- continue;
- }
-
- checkRequirementAssignmentDefinition(
- requirementName, requirementEntry.getValue(), requirementDef, theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkRequirementAssignmentDefinition(
- String theRequirementName,
- Map theAssignment,
- Map theDefinition,
- CheckContext theContext) {
- theContext//.enter("requirement_assignment")
- .enter(theRequirementName, Construct.Requirement);
-
- //grab the node type definition to verify compatibility
-
- try {
- //node assignment
- boolean targetNodeIsTemplate = false;
- String targetNode = (String) theAssignment.get("node");
- if (targetNode == null) {
- targetNode = (String) theDefinition.get("node");
- //targetNodeIsTemplate stays false, targetNode must be a type
- } else {
- //the value must be a node template or a node type
- targetNodeIsTemplate = isTemplateReference(
- Construct.Node, theContext, targetNode);
- if ((!targetNodeIsTemplate) && (!isTypeReference(Construct.Node, targetNode))){
- theContext.addError("The 'node' entry must contain a reference to a node template or node type, '" + targetNode + IS_NONE_OF_THOSE, null);
- return;
- }
-
- //additional checks
- String targetNodeDef = (String) theDefinition.get("node");
- if (targetNodeDef != null && targetNode != null) {
- if (targetNodeIsTemplate) {
- //if the target is node template, it must be compatible with the
- //node type specification in the requirement defintion
- String targetNodeType = (String)
- catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type");
- if (!catalog.isDerivedFrom(
- Construct.Node, targetNodeType, targetNodeDef)) {
- theContext.addError("The required target node type '" + targetNodeType + "' of target node " + targetNode + " is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null);
- return;
- }
- } else {
- //if the target is a node type it must be compatible (= or derived
- //from) with the node type specification in the requirement definition
- if (!catalog.isDerivedFrom(
- Construct.Node, targetNode, targetNodeDef)) {
- theContext.addError("The required target node type '" + targetNode + "' is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null);
- return;
- }
- }
- }
- }
-
- String targetNodeType = targetNodeIsTemplate ?
- (String) catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type") :
- targetNode;
-
- //capability assignment
- boolean targetCapabilityIsType = false;
- String targetCapability = (String) theAssignment.get(CAPABILITY);
- if (targetCapability == null) {
- targetCapability = (String) theDefinition.get(CAPABILITY);
- //in a requirement definition the target capability can only be a
- //capability type (and not a capability name within some target node
- //type)
- targetCapabilityIsType = true;
- } else {
- targetCapabilityIsType = isTypeReference(Construct.Capability, targetCapability);
-
- //check compatibility with the target compatibility type specified
- //in the requirement definition, if any
- String targetCapabilityDef = (String) theDefinition.get(CAPABILITY);
- if (targetCapabilityDef != null && targetCapability != null) {
- if (targetCapabilityIsType) {
- if (!catalog.isDerivedFrom(
- Construct.Capability, targetCapability, targetCapabilityDef)) {
- theContext.addError("The required target capability type '" + targetCapability + "' is not compatible with the target capability type found in the requirement definition: " + targetCapabilityDef, null);
- return;
- }
- } else {
- //the capability is from a target node. Find its definition and
- //check that its type is compatible with the capability type
- //from the requirement definition
-
- //check target capability compatibility with target node
- if (targetNode == null) {
- theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', which was not specified", null);
- return;
- }
- if (!targetNodeIsTemplate) {
- theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', but there you specified a node type", null);
- return;
- }
- //check that the targetNode (its type) indeed has the
- //targetCapability
-
- Map<String, Object> targetNodeCapabilityDef =
- findTypeFacetByName(
- Construct.Node, targetNodeType,
- Facet.capabilities, targetCapability);
- if (targetNodeCapabilityDef == null) {
- theContext.addError("No capability '" + targetCapability + "' was specified in the node " + targetNode + " of type " + targetNodeType, null);
- return;
- }
-
- String targetNodeCapabilityType = (String) targetNodeCapabilityDef.get("type");
-
- if (!catalog.isDerivedFrom(Construct.Capability,
- targetNodeCapabilityType,
- targetCapabilityDef)) {
- theContext.addError("The required target capability type '" + targetCapabilityDef + "' is not compatible with the target capability type found in the target node type capability definition : " + targetNodeCapabilityType + ", targetNode " + targetNode + ", capability name " + targetCapability, null);
- return;
- }
- }
- }
- }
-
- //relationship assignment
- Map targetRelationship = (Map) theAssignment.get("relationship");
- if (targetRelationship != null) {
- //this has to be compatible with the relationship with the same name
- //from the node type
- //check the type
- }
-
- //node_filter; used jxpath to simplify the navigation somewhat
- //this is too cryptic
- JXPathContext jxPath = JXPathContext.newContext(theAssignment);
- jxPath.setLenient(true);
-
- List<Map> propertiesFilter =
- (List<Map>) jxPath.getValue("/node_filter/properties");
- if (propertiesFilter != null) {
- for (Map propertyFilter : propertiesFilter) {
- if (targetNode != null) {
- //if we have a target node or node template then it must have
- //have these properties
- for (Object propertyName : propertyFilter.keySet()) {
- if (null == findTypeFacetByName(Construct.Node,
- targetNodeType,
- Facet.properties,
- propertyName.toString())) {
- theContext.addError("The node_filter property " + propertyName + " is invalid: requirement target node " + targetNode + " does not have such a property", null);
- }
- }
- }
- }
- }
-
- List<Map> capabilitiesFilter =
- (List<Map>) jxPath.getValue("node_filter/capabilities");
- if (capabilitiesFilter != null) {
- for (Map capabilityFilterDef : capabilitiesFilter) {
- assert capabilityFilterDef.size() == 1;
- Map.Entry<String, Map> capabilityFilterEntry =
- (Map.Entry<String, Map>) capabilityFilterDef.entrySet().iterator().next();
- String targetFilterCapability = capabilityFilterEntry.getKey();
- Map<String, Object> targetFilterCapabilityDef = null;
-
- //if we have a targetNode capabilityName must be a capability of
- //that node (type); or it can be simply capability type (but the node
- //must have a capability of that type)
-
- String targetFilterCapabilityType = null;
- if (targetNode != null) {
- targetFilterCapabilityDef =
- findTypeFacetByName(Construct.Node, targetNodeType,
- Facet.capabilities, targetFilterCapability);
- if (targetFilterCapabilityDef != null) {
- targetFilterCapabilityType =
- (String) targetFilterCapabilityDef/*.values().iterator().next()*/.get("type");
- } else {
- Map<String, Map> targetFilterCapabilities =
- findTypeFacetByType(Construct.Node, targetNodeType,
- Facet.capabilities, targetFilterCapability);
-
- if (!targetFilterCapabilities.isEmpty()) {
- if (targetFilterCapabilities.size() > 1) {
- errLogger.log(LogLevel.WARN, this.getClass().getName(), "checkRequirementAssignmentDefinition: filter check, target node type '{}' has more than one capability of type '{}', not supported", targetNodeType, targetFilterCapability);
- }
- //pick the first entry, it represents a capability of the required type
- Map.Entry<String, Map> capabilityEntry = targetFilterCapabilities.entrySet().iterator().next();
- targetFilterCapabilityDef = Collections.singletonMap(capabilityEntry.getKey(),
- capabilityEntry.getValue());
- targetFilterCapabilityType = targetFilterCapability;
- }
- }
- } else {
- //no node (type) specified, it can be a straight capability type
- targetFilterCapabilityDef = catalog.getTypeDefinition(
- Construct.Capability, targetFilterCapability);
- //here comes the odd part: it can still be a just a name in which
- //case we should look at the requirement definition, see which
- //capability (type) it indicates
- assert targetCapabilityIsType; //cannot be otherwise, we'd need a node
- targetFilterCapabilityDef = catalog.getTypeDefinition(
- Construct.Capability, targetCapability);
- targetFilterCapabilityType = targetCapability;
- }
-
- if (targetFilterCapabilityDef == null) {
- theContext.addError("Capability (name or type) " + targetFilterCapability + " is invalid: not a known capability (type) " +
- ((targetNodeType != null) ? (" of node type" + targetNodeType) : ""), null);
- continue;
- }
-
- for (Map propertyFilter :
- (List<Map>) jxPath.getValue("/node_filter/capabilities/" + targetFilterCapability + "/properties")) {
- //check that the properties are in the scope of the
- //capability definition
- for (Object propertyName : propertyFilter.keySet()) {
- if (null == findTypeFacetByName(Construct.Capability,
- targetCapability,
- Facet.properties,
- propertyName.toString())) {
- theContext.addError("The capability filter " + targetFilterCapability + " property " + propertyName + " is invalid: target capability " + targetFilterCapabilityType + " does not have such a property", null);
- }
- }
- }
- }
- }
-
- } finally {
- theContext//.exit()
- .exit();
- }
- }
-
- private void checkCapabilitiesAssignmentDefinition(
- Map<String, Map> theCapabilities, CheckContext theContext) {
- theContext.enter(CAPABILITIES);
- try {
- if (!checkDefinition(CAPABILITIES, theCapabilities, theContext)) {
- return;
- }
-
- //the node type for the node template enclosing these requirements
- String nodeType = (String) catalog.getTemplate(
- theContext.target(),
- Construct.Node,
- theContext.enclosingConstruct(Construct.Node))
- .get("type");
-
- for (Iterator<Map.Entry<String, Map>> ci =
- theCapabilities.entrySet().iterator();
- ci.hasNext(); ) {
-
- Map.Entry<String, Map> ce = ci.next();
-
- String capabilityName = ce.getKey();
- Map capabilityDef = findTypeFacetByName(Construct.Node, nodeType,
- Facet.capabilities, capabilityName);
- if (capabilityDef == null) {
- theContext.addError("No capability " + capabilityName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null);
- continue;
- }
-
- checkCapabilityAssignmentDefinition(
- capabilityName, ce.getValue(), capabilityDef, theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkCapabilityAssignmentDefinition(
- String theCapabilityName,
- Map theAssignment,
- Map theDefinition,
- CheckContext theContext) {
-
- theContext.enter(theCapabilityName, Construct.Capability);
- try {
- String capabilityType = (String) theDefinition.get("type");
- //list of property and attributes assignments
- checkFacet(Construct.Capability, theAssignment, capabilityType,
- Facet.properties, theContext);
- checkFacet(Construct.Capability, theAssignment, capabilityType,
- Facet.attributes, theContext);
- } finally {
- theContext.exit();
- }
- }
-
- private void checkTemplateInterfacesDefinition(
- Map<String, Map> theInterfaces,
- CheckContext theContext) {
- theContext.enter(INTERFACES);
- try {
- if (!checkDefinition(INTERFACES, theInterfaces, theContext)) {
- return;
- }
-
- //the node type for the node template enclosing these requirements
- String nodeType = (String) catalog.getTemplate(
- theContext.target(),
- Construct.Node,
- theContext.enclosingConstruct(Construct.Node))
- .get("type");
-
- for (Iterator<Map.Entry<String, Map>> ii =
- theInterfaces.entrySet().iterator();
- ii.hasNext(); ) {
-
- Map.Entry<String, Map> ie = ii.next();
-
- String interfaceName = ie.getKey();
- Map interfaceDef = findTypeFacetByName(Construct.Node, nodeType,
- Facet.interfaces, interfaceName);
-
- if (interfaceDef == null) {
- /* this is subject to augmentation: this could be a warning but not an error */
- theContext.addError("No interface " + interfaceName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null);
- continue;
- }
-
- checkTemplateInterfaceDefinition(
- interfaceName, ie.getValue(), interfaceDef, theContext);
- }
- } finally {
- theContext.exit();
- }
- }
-
- private void checkTemplateInterfaceDefinition(
- String theInterfaceName,
- Map theAssignment,
- Map theDefinition,
- CheckContext theContext) {
-
- theContext.enter(theInterfaceName, Construct.Interface);
- try {
- //check the assignment of the common inputs
- checkFacet(Construct.Interface,
- theAssignment,
- (String) theDefinition.get("type"),
- Facet.inputs,
- theContext);
- } finally {
- theContext.exit();
- }
- }
-
-
- @Checks(path = "/topology_template/artifacts")
- protected void check_template_artifacts_definition(
- Map<String, Object> theDefinition,
- CheckContext theContext) {
- theContext.enter(ARTIFACTS);
- theContext.exit();
- }
-
- //generic checking actions, not related to validation rules
-
- /* will check the validity of the type specification for any construct containing a 'type' entry */
- private boolean checkType(Construct theCategory, Map theSpec, CheckContext theContext) {
- String type = (String) theSpec.get("type");
- if (type == null) {
- theContext.addError("Missing type specification", null);
- return false;
- }
-
- if (!catalog.hasType(theCategory, type)) {
- theContext.addError(UNKNOWN + theCategory + " type: " + type, null);
- return false;
- }
-
- return true;
- }
-
- /* the type can be:
- * a known type: predefined or user-defined
- * a collection (list or map) and then check that the entry_schema points to one of the first two cases (is that it?)
- */
- private boolean checkDataType(Map theSpec, CheckContext theContext) {
-
- if (!checkType(Construct.Data, theSpec, theContext)) {
- return false;
- }
-
- String type = (String) theSpec.get("type");
- if (/*isCollectionType(type)*/
- "list".equals(type) || "map".equals(type)) {
- Map entrySchema = (Map) theSpec.get("entry_schema");
- if (entrySchema == null) {
- //maybe issue a warning ?? or is 'string' the default??
- return true;
- }
-
- if (!catalog.hasType(Construct.Data, (String) entrySchema.get("type"))) {
- theContext.addError("Unknown entry_schema type: " + entrySchema, null);
- return false;
- }
- }
- return true;
- }
-
- /* Check that a particular facet (properties, attributes) of a construct type
- * (node type, capability type, etc) is correctly (consistenly) defined
- * across a type hierarchy
- */
- private boolean checkTypeConstructFacet(Construct theConstruct,
- String theTypeName,
- Map theTypeSpec,
- Facet theFacet,
- CheckContext theContext) {
- Map<String, Map> defs =
- (Map<String, Map>) theTypeSpec.get(theFacet.name());
- if (null == defs) {
- return true;
- }
-
- boolean res = true;
-
- //given that the type was cataloged there will be at least one entry
- Iterator<Map.Entry<String, Map>> i =
- catalog.hierarchy(theConstruct, theTypeName);
- if (!i.hasNext()) {
- theContext.addError(
- "The type " + theTypeName + " needs to be cataloged before attempting 'checkTypeConstruct'", null);
- return false;
- }
- i.next(); //skip self
- while (i.hasNext()) {
- Map.Entry<String, Map> e = i.next();
- Map<String, Map> superDefs = (Map<String, Map>) e.getValue()
- .get(theFacet.name());
- if (null == superDefs) {
- continue;
- }
- //this computes entries that appear on both collections but with different values, i.e. the re-defined properties
- Map<String, MapDifference.ValueDifference<Map>> diff = Maps.difference(defs, superDefs).entriesDiffering();
-
- for (Iterator<Map.Entry<String, MapDifference.ValueDifference<Map>>> di = diff.entrySet().iterator(); di.hasNext(); ) {
- Map.Entry<String, MapDifference.ValueDifference<Map>> de = di.next();
- MapDifference.ValueDifference<Map> dediff = de.getValue();
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {}: {} has been re-defined between the {} types {} and {}", theConstruct, theFacet, de.getKey(), theConstruct, e.getKey(), theTypeName);
- //for now we just check that the type is consistenly re-declared
- if (!this.catalog.isDerivedFrom(theFacet.construct(),
- (String) dediff.leftValue().get("type"),
- (String) dediff.rightValue().get("type"))) {
- theContext.addError(
- theConstruct + TYPE + theFacet + ", redefiniton changed its type: " + de.getKey() + " has been re-defined between the " + theConstruct + " types " + e.getKey() + " and " + theTypeName + " in an incompatible manner", null);
- res = false;
- }
- }
- }
-
- return res;
- }
-
- /*
- * Checks the validity of a certain facet of a construct
- * (properties of a node) across a type hierarchy.
- * For now the check is limited to a verifying that a a facet was declared
- * somewhere in the construct type hierarchy (a node template property has
- * been declared in the node type hierarchy).
- *
- * 2 versions with the more generic allowing the specification of the type
- * to be done explicitly.
- */
- private boolean checkFacet(Construct theConstruct,
- Map theSpec,
- Facet theFacet,
- CheckContext theContext) {
- return checkFacet(theConstruct, theSpec, null, theFacet, theContext);
- }
-
- /**
- * We walk the hierarchy and verify the assignment of a property with respect to its definition.
- * We also collect the names of those properties defined as required but for which no assignment was provided.
- */
- private boolean checkFacet(Construct theConstruct,
- Map theSpec,
- String theSpecType,
- Facet theFacet,
- CheckContext theContext) {
-
- Map<String, Map> defs = (Map<String, Map>) theSpec.get(theFacet.name());
- if (null == defs) {
- return true;
- }
- defs = Maps.newHashMap(defs); //
-
- boolean res = true;
- if (theSpecType == null) {
- theSpecType = (String) theSpec.get("type");
- }
- if (theSpecType == null) {
- theContext.addError("No specification type available", null);
- return false;
- }
-
- Map<String, Byte> missed = new HashMap<>(); //keeps track of the missing required properties, the value is
- //false if a default was found along the hierarchy
- Iterator<Map.Entry<String, Map>> i =
- catalog.hierarchy(theConstruct, theSpecType);
- while (i.hasNext() && !defs.isEmpty()) {
- Map.Entry<String, Map> type = i.next();
-
- Map<String, Map> typeDefs = (Map<String, Map>) type.getValue()
- .get(theFacet.name());
- if (null == typeDefs) {
- continue;
- }
-
- MapDifference<String, Map> diff = Maps.difference(defs, typeDefs);
-
- //this are the ones this type and the spec have in common (same key,
- //different values)
- Map<String, MapDifference.ValueDifference<Map>> facetDefs =
- diff.entriesDiffering();
- //TODO: this assumes the definition of the facet is not cumulative, i.e.
- //subtypes 'add' something to the definition provided by the super-types
- //it considers the most specialized definition stands on its own
- for (MapDifference.ValueDifference<Map> valdef : facetDefs.values()) {
- checkDataValuation(valdef.leftValue(), valdef.rightValue(), theContext);
- }
-
- //remove from properties all those that appear in this type: unfortunately this returns an unmodifiable map ..
- defs = Maps.newHashMap(diff.entriesOnlyOnLeft());
- }
-
- if (!defs.isEmpty()) {
- theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + defs, null);
- res = false;
- }
-
- if (!missed.isEmpty()) {
- List missedNames =
- missed.entrySet()
- .stream()
- .filter(e -> e.getValue().byteValue() == (byte) 1)
- .map(e -> e.getKey())
- .collect(Collectors.toList());
- if (!missedNames.isEmpty()) {
- theContext.addError(theConstruct + " " + theFacet + " missing required values for: " + missedNames, null);
- res = false;
- }
- }
-
- return res;
- }
-
- /* Augmentation occurs in cases such as the declaration of capabilities within a node type.
- * In such cases the construct facets (the capabilitity's properties) can redefine (augment) the
- * specification found in the construct type.
- */
- private boolean checkFacetAugmentation(Construct theConstruct,
- Map theSpec,
- Facet theFacet,
- CheckContext theContext) {
- return checkFacetAugmentation(theConstruct, theSpec, null, theFacet, theContext);
- }
-
- private boolean checkFacetAugmentation(Construct theConstruct,
- Map theSpec,
- String theSpecType,
- Facet theFacet,
- CheckContext theContext) {
-
- Map<String, Map> augs = (Map<String, Map>) theSpec.get(theFacet.name());
- if (null == augs) {
- return true;
- }
-
- boolean res = true;
- if (theSpecType == null) {
- theSpecType = (String) theSpec.get("type");
- }
- if (theSpecType == null) {
- theContext.addError("No specification type available", null);
- return false;
- }
-
- for (Map.Entry<String, Map> ae : augs.entrySet()) {
- //make sure it was declared by the type
- Map facetDef = catalog.getFacetDefinition(theConstruct, theSpecType, theFacet, ae.getKey());
- if (facetDef == null) {
- theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + ae.getKey(), null);
- res = false;
- continue;
- }
-
- //check the compatibility of the augmentation: only the type cannot be changed
- //can the type be changed in a compatible manner ??
- if (!facetDef.get("type").equals(ae.getValue().get("type"))) {
- theContext.addError(theConstruct + " " + theFacet + " " + ae.getKey() + " has a different type than its definition: " + ae.getValue().get("type") + " instead of " + facetDef.get("type"), null);
- res = false;
- continue;
- }
-
- //check any valuation (here just defaults)
- Object defaultValue = ae.getValue().get(DEFAULT);
- if (defaultValue != null) {
- checkDataValuation(defaultValue, ae.getValue(), theContext);
- }
- }
-
- return res;
- }
-
- private boolean catalogTypes(Construct theConstruct, Map<String, Map> theTypes, CheckContext theContext) {
-
- boolean res = true;
- for (Map.Entry<String, Map> typeEntry : theTypes.entrySet()) {
- res &= catalogType(theConstruct, typeEntry.getKey(), typeEntry.getValue(), theContext);
- }
-
- return res;
- }
-
- private boolean catalogType(Construct theConstruct,
- String theName,
- Map theDef,
- CheckContext theContext) {
-
- if (!catalog.addType(theConstruct, theName, theDef)) {
- theContext.addError(theConstruct + TYPE + theName + " re-declaration", null);
- return false;
- }
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {} has been cataloged", theConstruct, theName);
-
- String parentType = (String) theDef.get("derived_from");
- if (parentType != null && !catalog.hasType(theConstruct, parentType)) {
- theContext.addError(
- theConstruct + TYPE + theName + " indicates a supertype that has not (yet) been declared: " + parentType, null);
- return false;
- }
- return true;
- }
-
- private boolean checkTypeReference(Construct theConstruct,
- CheckContext theContext,
- String... theTypeNames) {
- boolean res = true;
- for (String typeName : theTypeNames) {
- if (!isTypeReference(theConstruct, typeName)) {
- theContext.addError("Reference to " + theConstruct + " type '" + typeName + "' points to unknown type", null);
- res = false;
- }
- }
- return res;
- }
-
- private boolean isTypeReference(Construct theConstruct,
- String theTypeName) {
- return this.catalog.hasType(theConstruct, theTypeName);
- }
-
- /* node or relationship templates */
- private boolean checkTemplateReference(Construct theConstruct,
- CheckContext theContext,
- String... theTemplateNames) {
- boolean res = true;
- for (String templateName : theTemplateNames) {
- if (!isTemplateReference(theConstruct, theContext, templateName)) {
- theContext.addError("Reference to " + theConstruct + " template '" + templateName + "' points to unknown template", null);
- res = false;
- }
- }
- return res;
- }
-
- private boolean isTemplateReference(Construct theConstruct,
- CheckContext theContext,
- String theTemplateName) {
- return this.catalog.hasTemplate(theContext.target(), theConstruct, theTemplateName);
- }
-
- /*
- * For inputs/properties/attributes/(parameters). It is the caller's
- * responsability to provide the value (from a 'default', inlined, ..)
- *
- * @param theDef the definition of the given construct/facet as it appears in
- * its enclosing type definition.
- * @param
- */
- private boolean checkDataValuation(Object theExpr,
- Map<String, ?> theDef,
- CheckContext theContext) {
- //first check if the expression is a function, if not handle it as a value assignment
- Data.Function f = Data.function(theExpr);
- if (f != null) {
- return f.evaluator()
- .eval(theExpr, theDef, theContext);
- } else {
- Data.Type type = Data.typeByName((String) theDef.get("type"));
- if (type != null) {
- Data.Evaluator evaluator;
-
- evaluator = type.evaluator();
- if (evaluator == null) {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No value evaluator available for type {}", type);
- } else {
- if ((theExpr != null) && (!evaluator.eval(theExpr, theDef, theContext))) {
- return false;
- }
- }
-
-
- evaluator = type.constraintsEvaluator();
- if (evaluator == null) {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No constraints evaluator available for type {}", type);
- } else {
- if (theExpr != null) {
- if (!evaluator.eval(theExpr, theDef, theContext)) {
- return false;
- }
- } else {
- //should have a null value validatorT
- }
- }
-
- return true;
- } else {
- theContext.addError("Expression " + theExpr + " of " + theDef + " could not be evaluated", null);
- return false;
- }
- }
- }
-
- /**
- * Given the type of a certain construct (node type for example), look up
- * in one of its facets (properties, capabilities, ..) for one of the given
- * facet type (if looking in property, one of the given data type).
- *
- * @return a map of all facets of the given type, will be empty to signal
- * none found
- * <p>
- * Should we look for a facet construct of a compatible type: any type derived
- * from the given facet's construct type??
- */
- private Map<String, Map>
- findTypeFacetByType(Construct theTypeConstruct,
- String theTypeName,
- Facet theFacet,
- String theFacetType) {
-
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType {}, {}: {} {}", theTypeName, theTypeConstruct, theFacetType, theFacet);
- Map<String, Map> res = new HashMap<>();
- Iterator<Map.Entry<String, Map>> i =
- catalog.hierarchy(theTypeConstruct, theTypeName);
- while (i.hasNext()) {
- Map.Entry<String, Map> typeSpec = i.next();
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", theTypeConstruct, typeSpec.getKey());
- Map<String, Map> typeFacet =
- (Map<String, Map>) typeSpec.getValue().get(theFacet.name());
- if (typeFacet == null) {
- continue;
- }
- Iterator<Map.Entry<String, Map>> fi = typeFacet.entrySet().iterator();
- while (fi.hasNext()) {
- Map.Entry<String, Map> facet = fi.next();
- String facetType = (String) facet.getValue().get("type");
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", facet.getKey(), facetType);
-
- //here is the question: do we look for an exact match or ..
- //now we check that the type has a capability of a type compatible
- //(equal or derived from) the given capability type.
- if (catalog.isDerivedFrom(
- theFacet.construct(), facetType, theFacetType)) {
- res.putIfAbsent(facet.getKey(), facet.getValue());
- }
- }
- }
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, found {}", res);
-
- return res;
- }
-
- private Map<String, Object>
- findTypeFacetByName(Construct theTypeConstruct,
- String theTypeName,
- Facet theFacet,
- String theFacetName) {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName {} {}", theTypeConstruct, theTypeName);
- Iterator<Map.Entry<String, Map>> i =
- catalog.hierarchy(theTypeConstruct, theTypeName);
- while (i.hasNext()) {
- Map.Entry<String, Map> typeSpec = i.next();
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName, Checking {} type {}", theTypeConstruct, typeSpec.getKey());
- Map<String, Map> typeFacet =
- (Map<String, Map>) typeSpec.getValue().get(theFacet.name());
- if (typeFacet == null) {
- continue;
- }
- Map<String, Object> facet = typeFacet.get(theFacetName);
- if (facet != null) {
- return facet;
- }
- }
- return null;
- }
-
- /* Requirements are the odd ball as they are structured as a sequence .. */
- private Map<String, Map> findNodeTypeRequirementByName(
- String theNodeType, String theRequirementName) {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName {}/{}", theNodeType, theRequirementName);
- Iterator<Map.Entry<String, Map>> i =
- catalog.hierarchy(Construct.Node, theNodeType);
- while (i.hasNext()) {
- Map.Entry<String, Map> nodeType = i.next();
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName, Checking node type {}", nodeType.getKey());
- List<Map<String, Map>> nodeTypeRequirements =
- (List<Map<String, Map>>) nodeType.getValue().get(REQUIREMENTS);
- if (nodeTypeRequirements == null) {
- continue;
- }
-
- for (Map<String, Map> requirement : nodeTypeRequirements) {
- Map requirementDef = requirement.get(theRequirementName);
- if (requirementDef != null) {
- return requirementDef;
- }
- }
- }
- return null;
- }
-
/*
* Additional generics checks to be performed on any definition: construct,
* construct types, etc ..
*/
- public boolean checkDefinition(String theName,
- Map theDefinition,
- CheckContext theContext) {
- if (theDefinition == null) {
- theContext.addError("Missing definition for " + theName, null);
- return false;
- }
-
- if (theDefinition.isEmpty()) {
- theContext.addError("Empty definition for " + theName, null);
- return false;
- }
-
- return true;
- }
- private boolean checkDefinition(String theName,
- List theDefinition,
- CheckContext theContext) {
- if (theDefinition == null) {
- theContext.addError("Missing definition for " + theName, null);
- return false;
- }
-
- if (theDefinition.isEmpty()) {
- theContext.addError("Empty definition for " + theName, null);
- return false;
- }
-
- return true;
- }
/* plenty of one entry maps around */
private Map.Entry mapEntry(Object theMap) {
@@ -2715,16 +609,16 @@ public class Checker {
private String patchWhitespaces(String thePath) {
String[] elems = thePath.split("/");
StringBuilder path = new StringBuilder();
- for (int i = 0; i < elems.length; i++) {
- if (spacePattern.matcher(elems[i]).find()) {
+ Arrays.stream(elems).forEach(elem -> {
+ if (spacePattern.matcher(elem).find()) {
path.append("[@name='")
- .append(elems[i])
+ .append(elem)
.append("']");
} else {
path.append("/")
- .append(elems[i]);
+ .append(elem);
}
- }
+ });
return path.toString();
}
@@ -2816,66 +710,23 @@ public class Checker {
Catalog catalog = new Catalog(doCommons ? commonsCatalog() : null);
if (!doCommons) {
//add core TOSCA types
- for (Data.CoreType type : Data.CoreType.class.getEnumConstants()) {
- catalog.addType(Construct.Data, type.toString(), Collections.emptyMap());
- }
+ Arrays.stream(Data.CoreType.class.getEnumConstants()).forEach(type -> catalog.addType(Construct.Data, type.toString(), Collections.emptyMap()));
}
return catalog;
}
- private boolean invokeHook(String theHookName,
- Class[] theArgTypes,
- Object... theArgs) {
-
- Invokable hookHandler = null;
- try {
- Method m = Checker.class.getDeclaredMethod(
- theHookName, theArgTypes);
- m.setAccessible(true);
- hookHandler = Invokable.from(m);
- } catch (NoSuchMethodException nsmx) {
- //that's ok, not every rule has to have a handler
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "That's ok, not every rule has to have a handler. Method name is:{}. Exception:{}", theHookName,nsmx);
- }
-
- if (hookHandler != null) {
- try {
- hookHandler.invoke(this, theArgs);
- } catch (InvocationTargetException | IllegalAccessException itx) {
- errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invocation failed for hook handler {} {}", theHookName, itx);
- } catch (Exception x) {
- errLogger.log(LogLevel.WARN, this.getClass().getName(), "Hook handler failed {} {}", theHookName, x);
- }
- }
-
- return hookHandler != null;
- }
-
- private void validationHook(String theTiming,
- Object theTarget,
- Rule theRule,
- Validator.ValidationContext theContext) {
-
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "looking up validation handler for {}, {} {}", theRule.getName(), theTiming, theContext.getPath());
- if (!invokeHook(theRule.getName() + "_" + theTiming + "_validation_handler",
- validationHookArgTypes,
- theTarget, theRule, theContext)) {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "no validation handler for {}", theRule.getName() + "_" + theTiming);
- }
- }
-
private void checks(String theName,
Object theTarget,
CheckContext theContext) {
Map<Method, Object> handlers = checks.row(/*theName*/theContext.getPath(theName));
if (handlers != null) {
- for (Map.Entry<Method, Object> handler : handlers.entrySet()) {
+ handlers.entrySet().forEach(handler -> {
try {
handler.getKey().invoke(handler.getValue(), new Object[]{theTarget, theContext});
} catch (Exception x) {
errLogger.log(LogLevel.WARN, this.getClass().getName(), "Check {} with {} failed {}", theName, handler.getKey(), x);
}
- }
+ });
} else {
boolean hasHook = false;
for (Class[] argTypes : checkHookArgTypes) {
@@ -2897,112 +748,25 @@ public class Checker {
Map<Method, Object> handlers = catalogs.row(/*theName*/theContext.getPath(theName));
if (handlers != null) {
- for (Map.Entry<Method, Object> handler : handlers.entrySet()) {
+ handlers.forEach((key, value) -> {
try {
- handler.getKey().invoke(handler.getValue(), new Object[]{theTarget, theContext});
+ key.invoke(value, theTarget, theContext);
} catch (Exception x) {
- errLogger.log(LogLevel.WARN, this.getClass().getName(), "Cataloging {} with {} failed {}", theName, handler.getKey(), x);
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "Cataloging {} with {} failed {}", theName, key, x);
}
- }
+ });
}
}
- private class TOSCAValidator extends Validator {
-
- //what were validating
- private Target target;
-
- /* Some of the TOSCA entries accept a 'short form/notation' instead of the canonical map representation.
- * kwalify cannot easily express these alternatives and as such we handle them here. In the pre-validation phase we detect the presence of a short notation
-and compute the canonical form and validate it. In the post-validation phase we
-substitute the canonical form for the short form so that checking does not have to deal with it.
- */
-
- private Map<String, Object> canonicals = new TreeMap<>();
-
- TOSCAValidator(Target theTarget, Object theSchema) {
- super(theSchema);
- this.target = theTarget;
- }
-
- public Target getTarget() {
- return this.target;
- }
-
- /* hook method called by Validator#validate()
- */
- @Override
- protected boolean preValidationHook(Object value, Rule rule, ValidationContext context) {
-
- validationHook("pre", value, rule, context);
- //short form handling
- String hint = rule.getShort();
- if (value != null &&
- hint != null) {
-
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Attempting canonical at {}, rule {}", context.getPath(), rule.getName());
-
- Object canonical = null;
- //if the canonical form requires a collection
- if (Types.isCollectionType(rule.getType())) {
- //and the actual value isn't one
- if (!(value instanceof Map || value instanceof List)) {
- //used to use singleton map/list here (was good for catching errors)
- //but there is the possibility if short forms within short forms so
- //the created canonicals need to accomodate other values.
- if (Types.isMapType(rule.getType())) {
- canonical = new HashMap();
- ((Map) canonical).put(hint, value);
- } else {
- //the hint is irrelevant here but we should impose a value when the target is a list
- canonical = new LinkedList();
- ((List) canonical).add(value);
- }
- } else {
- //we can accomodate:
- // map to list of map transformation
- if (!Types.isMapType(rule.getType()) /* a seq */ &&
- value instanceof Map) {
- canonical = new LinkedList();
- ((List) canonical).add(value);
- } else {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType());
- return false;
- }
- }
-
- int errc = context.errorCount();
- validateRule(canonical, rule, context);
- if (errc != context.errorCount()) {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} failed validation", rule.getName(), hint, context.getPath());
- } else {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} passed validation. Canonical form is {}", rule.getName(), hint, context.getPath(), canonical);
- //replace the short notation with the canonicall one so we don't
- //have to deal it again during checking
- this.canonicals.put(context.getPath(), canonical);
- return true;
- }
- } else {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType());
- }
- }
-
- //perform default validation process
- return false;
- }
-
- /*
- * Only gets invoked once the value was succesfully verified against the syntax indicated by the given rule.
- */
- @Override
- protected void postValidationHook(Object value,
- Rule rule,
- ValidationContext context) {
- validationHook("post", value, rule, context);
- }
+ public boolean checkDefinition(String workflows, Map theDefinition, CheckContext theContext) {
+ return checkCommon.checkDefinition(workflows, theDefinition, theContext);
+ }
+ public void checkProperties(Map<String, Map> inputs, CheckContext theContext) {
+ propertiesCommon.checkProperties(inputs, theContext, catalog);
}
+
/**
* Maintains state across the checking process.
*/
@@ -3093,23 +857,20 @@ substitute the canonical form for the short form so that checking does not have
private String errorReport(List<Throwable> theErrors) {
StringBuilder sb = new StringBuilder(theErrors.size() + " errors");
- for (Throwable x : theErrors) {
- sb.append("\n");
- if (x instanceof ValidationException) {
- ValidationException vx = (ValidationException) x;
- // .apend("at ")
- // .append(error.getLineNumber())
- // .append(" : ")
- sb.append("[").append(vx.getPath()).append("] ");
- } else if (x instanceof TargetError) {
- TargetError tx = (TargetError) x;
- sb.append("[").append(tx.getLocation()).append("] ");
- }
- sb.append(x.getMessage());
- if (x.getCause() != null) {
- sb.append("\n\tCaused by:\n").append(x.getCause());
- }
- }
+ theErrors.forEach(x -> {
+ sb.append("\n");
+ if (x instanceof ValidationException) {
+ ValidationException vx = (ValidationException) x;
+ sb.append("[").append(vx.getPath()).append("] ");
+ } else if (x instanceof TargetError) {
+ TargetError tx = (TargetError) x;
+ sb.append("[").append(tx.getLocation()).append("] ");
+ }
+ sb.append(x.getMessage());
+ if (x.getCause() != null) {
+ sb.append("\n\tCaused by:\n").append(x.getCause());
+ }
+ });
sb.append("\n");
return sb.toString();
}
@@ -3154,42 +915,42 @@ substitute the canonical form for the short form so that checking does not have
Target tgt = ((TOSCAValidator) theContext.getValidator()).getTarget();
- applyCanonicals(tgt.getTarget(), ((TOSCAValidator) theContext.getValidator()).canonicals, "/imports", true);
+ applyCanonicals(tgt.getTarget(), ((TOSCAValidator) theContext.getValidator()).getCanonicals(), "/imports", true);
- for (ListIterator li = ((List) theValue).listIterator(); li.hasNext();) {
+ for (Object o : ((List) theValue)) {
- Map.Entry importEntry = mapEntry(li.next());
+ Map.Entry importEntry = mapEntry(o);
- Map def = (Map) importEntry.getValue();
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Processing import {}", def);
+ Map def = (Map) importEntry.getValue();
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Processing import {}", def);
- String tfile = (String) def.get("file");
- Target tgti = this.locator.resolve(tfile);
- if (tgti == null) {
- theContext.addError("Failure to resolve import '" + def + "', imported from " + tgt, theRule, null,
- null);
- continue;
- }
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Import {} located at {}", def,
- tgti.getLocation());
+ String tfile = (String) def.get("file");
+ Target tgti = this.locator.resolve(tfile);
+ if (tgti == null) {
+ theContext.addError("Failure to resolve import '" + def + "', " + IMPORTED_FROM + " " + tgt, theRule, null,
+ null);
+ continue;
+ }
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Import {} located at {}", def,
+ tgti.getLocation());
- if (this.catalog.addTarget(tgti, tgt)) {
- // we've never seen this import (location) before
- try {
+ if (this.catalog.addTarget(tgti, tgt)) {
+ // we've never seen this import (location) before
+ try {
List<Target> tgtis = parseTarget(tgti);
if (tgtis.isEmpty()) {
continue;
}
- if (tgtis.size() > 1) {
- theContext.addError(
- "Import '" + tgti + "', imported from " + tgt + ", contains multiple yaml documents",
- theRule, null, null);
- continue;
- }
+ if (tgtis.size() > 1) {
+ theContext.addError(
+ "Import '" + tgti + "', " + IMPORTED_FROM + " " + tgt + ", contains multiple yaml documents",
+ theRule, null, null);
+ continue;
+ }
- tgti = tgtis.get(0);
+ tgti = tgtis.get(0);
if (tgt.getReport().hasErrors()) {
theContext.addError("Failure parsing import '" + tgti + IMPORTED_FROM + tgt, theRule, null,
@@ -3209,59 +970,33 @@ substitute the canonical form for the short form so that checking does not have
}
}
- // replace with the actual location (also because this is what they
- // get
- // index by .. bad, this exposed catalog inner workings)
+ // replace with the actual location (also because this is what they
+ // get
+ // index by .. bad, this exposed catalog inner workings)
- def.put("file", tgti.getLocation());
- }
+ def.put("file", tgti.getLocation());
+ }
}
protected void node_templates_post_validation_handler(Object theValue, Rule theRule,
Validator.ValidationContext theContext) {
debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering node_templates_post_validation_handler {}",
theContext.getPath());
- assert "map".equals(theRule.getType());
+ assert MAP.equals(theRule.getType());
Map<String, Map> nodeTemplates = (Map<String, Map>) theValue;
- for (Iterator<Map.Entry<String, Map>> i = nodeTemplates.entrySet().iterator(); i.hasNext();) {
- Map.Entry<String, Map> node = i.next();
- try {
- catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Node,
- node.getKey(), node.getValue());
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Node template {} has been cataloged",
- node.getKey());
- } catch (CatalogException cx) {
- theContext.addError(cx.toString(), theRule, node, null);
- }
- }
+ nodeTemplates.entrySet().forEach(node -> {
+ try {
+ catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Node,
+ node.getKey(), node.getValue());
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Node template {} has been cataloged",
+ node.getKey());
+ } catch (CatalogException cx) {
+ theContext.addError(cx.toString(), theRule, node, null);
+ }
+ });
}
- protected void inputs_post_validation_handler(Object theValue, Rule theRule,
- Validator.ValidationContext theContext) {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering inputs_post_validation_handler {}",
- theContext.getPath());
- assert theRule.getType().equals("map");
- // we'll repeat this test during checking but because we index inputs
- // early
- // we need it here too
- if (theValue == null) {
- return;
- }
-
- Map<String, Map> inputs = (Map<String, Map>) theValue;
- for (Iterator<Map.Entry<String, Map>> i = inputs.entrySet().iterator(); i.hasNext();) {
- Map.Entry<String, Map> input = i.next();
- try {
- catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Data,
- input.getKey(), input.getValue());
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Input {} has been cataloged",
- input.getKey());
- } catch (CatalogException cx) {
- theContext.addError(cx.toString(), theRule, input, null);
- }
- }
- }
private void process(String theProcessorSpec) throws CheckerException {
@@ -3288,359 +1023,504 @@ substitute the canonical form for the short form so that checking does not have
process(proc);
}
- protected void check_artifact_definition(String theName, Map theDef, CheckContext theContext) {
- theContext.enter(theName, Construct.Artifact);
+ @Catalogs(path = "/artifact_types")
+ protected void catalog_artifact_types(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext) {
+ theContext.enter(ARTIFACT_TYPES);
+ try {
+ typeCommon.catalogTypes(Construct.Artifact, theDefinitions, theContext, catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
- try {
- if (!checkDefinition(theName, theDef, theContext)) {
- return;
- }
- // check artifact type
- if (!checkType(Construct.Artifact, theDef, theContext)) {
- return;
- }
- } finally {
- theContext.exit();
- }
- }
+ /* */
+ @Checks(path = "/artifact_types")
+ protected void check_artifact_types(
+ Map<String, Map> theDefinition, Checker.CheckContext theContext) {
+ abstractCheck(artifactCommon::checkArtifactTypeDefinition, theDefinition, theContext, ARTIFACT_TYPES);
+ }
- /* */
- protected void check_policy_type_definition(String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName, Construct.Policy);
+ @Catalogs(path = "/capability_types")
+ protected void catalog_capability_types(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext) {
+ theContext.enter(ConstCommon.CAPABILITY_TYPES);
+ try {
+ typeCommon.catalogTypes(Construct.Capability, theDefinitions, theContext, catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
+ /* */
+ @Checks(path = "/capability_types")
+ protected void check_capability_types(
+ Map<String, Map> theTypes, Checker.CheckContext theContext) {
+ abstractCheck(capabilityCommon::checkCapabilityTypeDefinition, theTypes, theContext, CAPABILITY_TYPES);
+ }
- if (theDefinition.containsKey(PROPERTIES)) {
- check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Policy, theName, theDefinition, Facet.properties, theContext);
- }
+ @Catalogs(path = "/data_types")
+ protected void catalog_data_types(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext) {
+ theContext.enter(DATA_TYPES);
+ try {
+ typeCommon.catalogTypes(Construct.Data, theDefinitions, theContext, catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
- // the targets can be known node types or group types
- List<String> targets = (List<String>) theDefinition.get("targets");
- if (targets != null) {
- if (checkDefinition("targets", targets, theContext)) {
- for (String target : targets) {
- if (!(this.catalog.hasType(Construct.Node, target)
- || this.catalog.hasType(Construct.Group, target))) {
- theContext.addError(
- "The 'targets' entry must contain a reference to a node type or group type, '"
- + target + "' is none of those",
- null);
- }
- }
- }
- }
+ @Checks(path = "/data_types")
+ protected void check_data_types(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext) {
+ abstractCheck(checkCommon::checkDataTypeDefinition, theDefinitions, theContext, DATA_TYPES);
- } finally {
- theContext.exit();
- }
- }
+ }
- /* */
- protected void check_group_type_definition(String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName, Construct.Group);
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
+ @Catalogs(path = "/group_types")
+ protected void catalog_group_types(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext) {
+ theContext.enter(GROUP_TYPES);
+ try {
+ typeCommon.catalogTypes(Construct.Group, theDefinitions, theContext, catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
- if (theDefinition.containsKey(PROPERTIES)) {
- check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Group, theName, theDefinition, Facet.properties, theContext);
- }
+ @Checks(path = "/group_types")
+ protected void check_group_types(
+ Map<String, Map> theDefinition, Checker.CheckContext theContext) {
+ abstractCheck(groupCommon::checkGroupTypeDefinition, theDefinition, theContext, GROUP_TYPES);
+ }
- if (theDefinition.containsKey("targets")) {
- checkTypeReference(Construct.Node, theContext,
- ((List<String>) theDefinition.get("targets")).toArray(EMPTY_STRING_ARRAY));
- }
+ @Catalogs(path = "/interface_types")
+ protected void catalog_interface_types(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext) {
+ theContext.enter(INTERFACE_TYPES);
+ try {
+ typeCommon.catalogTypes(Construct.Interface, theDefinitions, theContext, catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
- // interfaces
- Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces");
- if (interfaces != null) {
- try {
- theContext.enter("interfaces");
- for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) {
- Map.Entry<String, Map> e = i.next();
- check_type_interface_definition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
+ @Checks(path = "/interface_types")
+ protected void check_interface_types(
+ Map<String, Map> theDefinition, Checker.CheckContext theContext) {
+ abstractCheck(interfaceCommon::checkInterfaceTypeDefinition, theDefinition, theContext, INTERFACE_TYPES);
+ }
- } finally {
- theContext.exit();
- }
- }
+ @Catalogs(path = "/node_types")
+ protected void catalog_node_types(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext) {
+ theContext.enter(NODE_TYPES);
+ try {
+ typeCommon.catalogTypes(Construct.Node, theDefinitions, theContext, catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
- /* */
- protected void check_node_type_definition(String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName, Construct.Node);
+ /* */
+ @Checks(path = "/node_types")
+ protected void check_node_types(
+ Map<String, Map> theDefinition, Checker.CheckContext theContext) {
+ abstractCheck(nodeCommon::checkNodeTypeDefinition, theDefinition, theContext, NODE_TYPES);
+ }
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
+ @Catalogs(path = "/policy_types")
+ protected void catalog_policy_types(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext) {
+ theContext.enter(POLICY_TYPES);
+ try {
+ typeCommon.catalogTypes(Construct.Policy, theDefinitions, theContext, catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
- if (theDefinition.containsKey(PROPERTIES)) {
- check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.properties, theContext);
- }
+ /* */
+ @Checks(path = "/policy_types")
+ protected void check_policy_types(
+ Map<String, Map> theDefinition, Checker.CheckContext theContext) {
+ abstractCheck(policyCommon::checkPolicyTypeDefinition, theDefinition, theContext, POLICY_TYPES);
+ }
- if (theDefinition.containsKey("attributes")) {
- check_properties((Map<String, Map>) theDefinition.get("attributes"), theContext);
- checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.attributes, theContext);
- }
+ @Catalogs(path = "/relationship_types")
+ protected void catalog_relationship_types(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext) {
+ theContext.enter(RELATIONSHIP_TYPES);
+ try {
+ typeCommon.catalogTypes(Construct.Relationship, theDefinitions, theContext, catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
- // requirements
- if (theDefinition.containsKey("requirements")) {
- check_requirements((List<Map>) theDefinition.get("requirements"), theContext);
- }
+ /* */
+ @Checks(path = "/relationship_types")
+ protected void check_relationship_types(
+ Map<String, Map> theDefinition, Checker.CheckContext theContext) {
+ abstractCheck(relationshipCommon::checkRelationshipTypeDefinition, theDefinition, theContext, RELATIONSHIP_TYPES);
+ }
- // capabilities
- if (theDefinition.containsKey(CAPABILITIES)) {
- check_capabilities((Map<String, Map>) theDefinition.get(CAPABILITIES), theContext);
- }
+ @Checks(path = "/topology_template/groups")
+ protected void check_groups(Map<String, Map> theGroups,
+ Checker.CheckContext theContext) {
+ abstractCheck(groupCommon::checkGroupDefinition, theGroups, theContext, GROUPS);
+ }
- // interfaces:
- Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces");
- if (interfaces != null) {
- try {
- theContext.enter("interfaces");
- for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) {
- Map.Entry<String, Map> e = i.next();
- check_type_interface_definition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
+ @Checks(path = "/topology_template/policies")
+ protected void check_policies(List<Map<String, Map>> thePolicies,
+ Checker.CheckContext theContext) {
+ theContext.enter(POLICIES);
- // artifacts
+ try {
+ if (!checkCommon.checkDefinition(POLICIES, thePolicies, theContext)) {
+ return;
+ }
- } finally {
- theContext.exit();
- }
- }
+ thePolicies.forEach(policy -> {
+ assert policy.size() == 1;
+ Map.Entry<String, Map> e = policy.entrySet().iterator().next();
+ policyCommon.checkPolicyDefinition(e.getKey(), e.getValue(), theContext, catalog, target);
+ });
+ } finally {
+ theContext.exit();
+ }
+ }
- /* */
- protected void check_interface_type_definition(String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName, Construct.Interface);
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
+ /* */
+ @Checks(path = "/topology_template/substitution_mappings")
+ protected void check_substitution_mappings(Map<String, Object> theSub,
+ Checker.CheckContext theContext) {
+ theContext.enter("substitution_mappings");
+ try {
+ //type is mandatory
+ String type = (String) theSub.get("node_type");
+ if (!typeCommon.checkTypeReference(Construct.Node, theContext, catalog, type)) {
+ theContext.addError("Unknown node type: " + type + "", null);
+ return; //not much to go on with
+ }
- // not much else here: a list of operation_definitions, each with
- // its
- // implementation and inputs
+ Map<String, List> capabilities = (Map<String, List>) theSub.get(ConstCommon.CAPABILITIES);
+ if (null != capabilities) {
+ //the key must be a capability of the type
+ //the value is a 2 element list: first is a local node,
+ //second is the name of one of its capabilities
+ capabilities.entrySet().forEach(ce -> {
+ if (null == facetCommon.findTypeFacetByName(Construct.Node, type,
+ Facet.capabilities, ce.getKey(), catalog)) {
+ theContext.addError("Unknown node type capability: " + ce.getKey() + ", type " + type, null);
+ }
+ if (!checkValidationOnCatalog(ce, "capability", theContext, Construct.Node)) {
+ return;
+ }
+ String targetNode = (String) ce.getValue().get(0);
+ Map<String, Object> targetNodeDef = (Map<String, Object>)
+ catalog.getTemplate(theContext.target(), Construct.Node, targetNode);
+ String targetCapability = (String) ce.getValue().get(1);
+ String targetNodeType = (String) targetNodeDef.get("type");
+ if (null == facetCommon.findTypeFacetByName(Construct.Node, targetNodeType,
+ Facet.capabilities, targetCapability, catalog)) {
+ theContext.addError("Invalid capability mapping capability: " + targetCapability + ". No such capability found for node template " + targetNode + ", of type " + targetNodeType, null);
+ }
+ });
+ }
- // check that common inputs are re-defined in a compatible manner
+ Map<String, List> requirements = (Map<String, List>) theSub.get(ConstCommon.REQUIREMENTS);
+ if (null != requirements) {
+ //the key must be a requirement of the type
+ requirements.entrySet().forEach(re -> {
+ if (null == nodeCommon.findNodeTypeRequirementByName(type, re.getKey(), catalog)) {
+ theContext.addError("Unknown node type requirement: " + re.getKey() + ", type " + type, null);
+ }
+ if (!checkValidationOnCatalog(re, "requirement", theContext, Construct.Node)) {
+ return;
+ }
+ String targetNode = (String) re.getValue().get(0);
+ Map<String, Object> targetNodeDef = (Map<String, Object>)
+ catalog.getTemplate(theContext.target(), Construct.Node, targetNode);
+ String targetRequirement = (String) re.getValue().get(1);
+ String targetNodeType = (String) targetNodeDef.get("type");
+ if (null == nodeCommon.findNodeTypeRequirementByName(targetNodeType, targetRequirement, catalog)) {
+ theContext.addError("Invalid requirement mapping requirement: " + targetRequirement + ". No such requirement found for node template " + targetNode + ", of type " + targetNodeType, null);
+ }
+ });
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
- // check that the interface operations are overwritten in a
- // compatible manner
- // for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet()
+ private boolean checkValidationOnCatalog(Map.Entry<String, List> target, String name, Checker.CheckContext theContext, Construct construct) {
+ List targetList = target.getValue();
+ if (targetList.size() != 2) {
+ theContext.addError("Invalid " + name + " mapping: " + target + ", expecting 2 elements", null);
+ return false;
+ }
- } finally {
- theContext.exit();
- }
- }
+ String targetNode = (String) targetList.get(0);
- /* */
- protected void check_artifact_type_definition(String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName, Construct.Artifact);
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
+ Map<String, Object> targetNodeDef = (Map<String, Object>)
+ catalog.getTemplate(theContext.target(), construct, targetNode);
+ if (null == targetNodeDef) {
+ theContext.addError("Invalid " + name + " mapping node template: " + targetNode, null);
+ return false;
+ }
+ return true;
+ }
- } finally {
- theContext.exit();
- }
- }
+ @Override
+ @Checks(path = "/topology_template/artifacts")
+ public void check_template_artifacts_definition(
+ Map<String, Object> theDefinition,
+ Checker.CheckContext theContext) {
+ theContext.enter(ARTIFACTS);
+ theContext.exit();
+ }
- /* */
- protected void check_relationship_type_definition(String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName, Construct.Relationship);
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
+ @Checks(path = "/topology_template/relationship_templates")
+ protected void check_relationship_templates(Map theTemplates,
+ Checker.CheckContext theContext) {
+ abstractCheck(relationshipCommon::checkRelationshipTemplateDefinition, theTemplates, theContext, RELATIONSHIP_TEMPLATES);
+ }
- if (theDefinition.containsKey(PROPERTIES)) {
- check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.properties, theContext);
- }
+ @Checks(path = "topology_template/outputs")
+ protected void check_outputs(Map<String, Map> theOutputs,
+ Checker.CheckContext theContext) {
+ abstractCheck(inputsOutputsCommon::checkOutputDefinition, theOutputs, theContext, OUTPUTS);
+ }
- if (theDefinition.containsKey("attributes")) {
- check_properties((Map<String, Map>) theDefinition.get("attributes"), theContext);
- checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.attributes, theContext);
- }
+ /* */
+ @Checks(path = "/topology_template/node_templates")
+ protected void check_node_templates(Map<String, Map> theTemplates,
+ Checker.CheckContext theContext) {
+ abstractCheck(nodeCommon::checkNodeTemplateDefinition, theTemplates, theContext, NODE_TEMPLATES);
+ }
- Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces");
- if (interfaces != null) {
- theContext.enter("interfaces");
- for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) {
- Map.Entry<String, Map> e = i.next();
- check_type_interface_definition(e.getKey(), e.getValue(), theContext);
- }
- theContext.exit();
- }
+ /* */
+ @Override
+ @Checks(path = "/topology_template/inputs")
+ public void check_inputs(Map<String, Map> theInputs,
+ Checker.CheckContext theContext) {
+ abstractCheck(inputsOutputsCommon::checkInputDefinition, theInputs, theContext, INPUTS);
+ }
- if (theDefinition.containsKey(VALID_TARGET_TYPES)) {
- checkTypeReference(Construct.Capability, theContext,
- ((List<String>) theDefinition.get(VALID_TARGET_TYPES)).toArray(EMPTY_STRING_ARRAY));
- }
- } finally {
- theContext.exit();
- }
- }
+ @Override
+ public void validationHook(String theTiming,
+ Object theTarget,
+ Rule theRule,
+ Validator.ValidationContext theContext) {
- /* */
- protected void check_capability_type_definition(String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName, Construct.Capability);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "looking up validation handler for {}, {} {}", theRule.getName(), theTiming, theContext.getPath());
+ if (!invokeHook(theRule.getName() + "_" + theTiming + "_validation_handler",
+ validationHookArgTypes,
+ theTarget, theRule, theContext)) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "no validation handler for {}", theRule.getName() + "_" + theTiming);
+ }
+ }
+ private boolean invokeHook(String theHookName,
+ Class[] theArgTypes,
+ Object... theArgs) {
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
+ Invokable hookHandler = null;
+ try {
+ Method m = Checker.class.getDeclaredMethod(
+ theHookName, theArgTypes);
+ m.setAccessible(true);
+ hookHandler = Invokable.from(m);
+ } catch (NoSuchMethodException nsmx) {
+ //that's ok, not every rule has to have a handler
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "That's ok, not every rule has to have a handler. Method name =", theHookName);
+ }
- if (theDefinition.containsKey(PROPERTIES)) {
- check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.properties, theContext);
- }
+ if (hookHandler != null) {
+ try {
+ hookHandler.invoke(this, theArgs);
+ } catch (InvocationTargetException | IllegalAccessException itx) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invocation failed for hook handler {} {}", theHookName, itx);
+ } catch (Exception x) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "Hook handler failed {} {}", theHookName, x);
+ }
+ }
- if (theDefinition.containsKey("attributes")) {
- check_attributes((Map<String, Map>) theDefinition.get("attributes"), theContext);
- checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.attributes, theContext);
- }
+ return hookHandler != null;
+ }
- // valid_source_types: see capability_type_definition
- // unclear: how is the valid_source_types list definition eveolving
- // across
- // the type hierarchy: additive, overwriting, ??
- if (theDefinition.containsKey("valid_source_types")) {
- checkTypeReference(Construct.Node, theContext,
- ((List<String>) theDefinition.get("valid_source_types")).toArray(EMPTY_STRING_ARRAY));
- }
- } finally {
- theContext.exit();
- }
- }
+ public void inputs_post_validation_handler(Object theValue, Rule theRule,
+ Validator.ValidationContext theContext) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering inputs_post_validation_handler {}",
+ theContext.getPath());
+ assert MAP.equals(theRule.getType());
- /* */
- protected void check_data_type_definition(String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName, Construct.Data);
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
+ // we'll repeat this test during checking but because we index inputs
+ // early
+ // we need it here too
+ if (theValue == null) {
+ return;
+ }
- if (theDefinition.containsKey(PROPERTIES)) {
- check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
- checkTypeConstructFacet(Construct.Data, theName, theDefinition, Facet.properties, theContext);
- }
- } finally {
- theContext.exit();
- }
- }
+ Map<String, Map> inputs = (Map<String, Map>) theValue;
+ inputs.entrySet().forEach(input -> {
+ try {
+ catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Data,
+ input.getKey(), input.getValue());
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Input {} has been cataloged",
+ input.getKey());
+ } catch (CatalogException cx) {
+ theContext.addError(cx.toString(), theRule, input, null);
+ }
+ });
+ }
- /*
- * top level rule, we collected the whole information set. this is where
- * checking starts
- */
- protected void check_service_template_definition(Map<String, Object> theDef, CheckContext theContext) {
- theContext.enter("");
+ /* */
+ public void check_node_type_definition(String theName, Map theDefinition, Checker.CheckContext theContext) {
+ theContext.enter(theName, Construct.Node);
- if (theDef == null) {
- theContext.addError("Empty template", null);
- return;
- }
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
- // !!! imports need to be processed first now that catalogging takes
- // place at check time!!
+ checkProperties(theName, theDefinition, theContext, PROPERTIES, Construct.Node, Facet.properties);
+ checkProperties(theName, theDefinition, theContext, ATTRIBUTES, Construct.Node, Facet.attributes);
+ // requirements
+ checkRequirements(theDefinition, theContext, requirementCommon);
+ // capabilities
+ checkCapabilities(theDefinition, theContext, capabilityCommon);
+ // interfaces:
+ checkInterfaces(theDefinition, theContext);
- // first catalog whatever it is there to be cataloged so that the checks
- // can perform cross-checking
- for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); ri.hasNext();) {
- Map.Entry<String, Object> e = ri.next();
- catalogs(e.getKey(), e.getValue(), theContext);
- }
+ // artifacts
- for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); ri.hasNext();) {
- Map.Entry<String, Object> e = ri.next();
- checks(e.getKey(), e.getValue(), theContext);
- }
- theContext.exit();
- }
+ } finally {
+ theContext.exit();
+ }
+ }
- protected void check_attribute_definition(String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName);
- try {
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
- if (!checkDataType(theDefinition, theContext)) {
- return;
- }
- } finally {
- theContext.exit();
- }
- }
+ private void checkCapabilities(Map theDefinition, CheckContext theContext, CapabilityCommon capabilityCommon) {
+ if (theDefinition.containsKey(CAPABILITIES)) {
+ capabilityCommon.check_capabilities((Map<String, Map>) theDefinition.get(CAPABILITIES), theContext, catalog);
+ }
+ }
- public void check_attributes(Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter("attributes");
- try {
- if (!checkDefinition("attributes", theDefinitions, theContext)) {
+ private void checkRequirements(Map theDefinition, CheckContext theContext, RequirementCommon requirementCommon) {
+ if (theDefinition.containsKey(REQUIREMENTS)) {
+ requirementCommon.check_requirements((List<Map>) theDefinition.get(REQUIREMENTS), theContext, catalog);
+ }
+ }
+
+ private void checkProperties(String theName, Map theDefinition, CheckContext theContext, String definition, Construct node, Facet facet) {
+ if (theDefinition.containsKey(definition)) {
+ propertiesCommon.check_properties((Map<String, Map>) theDefinition.get(definition), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(node, theName, theDefinition, facet, theContext, catalog);
+ }
+ }
+
+ /* */
+ public void check_data_type_definition(String theName, Map theDefinition, Checker.CheckContext theContext) {
+ theContext.enter(theName, Construct.Data);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
return;
}
- for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) {
- Map.Entry<String, Map> e = i.next();
- check_attribute_definition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
+ checkProperties(theName, theDefinition, theContext, PROPERTIES, Construct.Data, Facet.properties);
+ } finally {
+ theContext.exit();
+ }
+ }
- protected void check_property_definition(String theName, Map theDefinition, CheckContext theContext) {
- theContext.enter(theName);
- if (!checkDefinition(theName, theDefinition, theContext)) {
- return;
- }
- // check the type
- if (!checkDataType(theDefinition, theContext)) {
- return;
- }
- // check default value is compatible with type
- Object defaultValue = theDefinition.get("default");
- if (defaultValue != null) {
- checkDataValuation(defaultValue, theDefinition, theContext);
- }
+ /* */
+ public void check_capability_type_definition(String theName, Map theDefinition, Checker.CheckContext theContext) {
+ theContext.enter(theName, Construct.Capability);
- theContext.exit();
- }
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
- public void check_properties(Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter(PROPERTIES);
- try {
- if (!checkDefinition(PROPERTIES, theDefinitions, theContext)) {
+ checkProperties(theName, theDefinition, theContext, PROPERTIES, Construct.Capability, Facet.properties);
+
+ if (theDefinition.containsKey(ATTRIBUTES)) {
+ attributesCommon.check_attributes((Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.attributes, theContext, catalog);
+ }
+
+ // valid_source_types: see capability_type_definition
+ // unclear: how is the valid_source_types list definition eveolving
+ // across
+ // the type hierarchy: additive, overwriting, ??
+ if (theDefinition.containsKey("valid_source_types")) {
+ typeCommon.checkTypeReference(Construct.Node, theContext, catalog,
+ ((List<String>) theDefinition.get("valid_source_types")).toArray(EMPTY_STRING_ARRAY));
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+
+ /* */
+ public void check_group_type_definition(String theName, Map theDefinition, Checker.CheckContext theContext) {
+ theContext.enter(theName, Construct.Group);
+
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
return;
}
- for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) {
- Map.Entry<String, Map> e = i.next();
- check_property_definition(e.getKey(), e.getValue(), theContext);
- }
- } finally {
- theContext.exit();
- }
- }
+ checkProperties(theName, theDefinition, theContext, PROPERTIES, Construct.Group, Facet.properties);
+
+ if (theDefinition.containsKey(TARGETS)) {
+ typeCommon.checkTypeReference(Construct.Node, theContext, catalog,
+ ((List<String>) theDefinition.get(TARGETS)).toArray(EMPTY_STRING_ARRAY));
+ }
+
+ // interfaces
+ checkInterfaces(theDefinition, theContext);
+
+ } finally {
+ theContext.exit();
+ }
+ }
+ private void checkInterfaces(Map theDefinition, CheckContext theContext) {
+ Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get(INTERFACES);
+ if (interfaces != null) {
+ try {
+ theContext.enter(INTERFACES);
+ interfaces.forEach((key, value) -> interfaceCommon.check_type_interface_definition(key, value, theContext, catalog));
+ } finally {
+ theContext.exit();
+ }
+ }
+ }
+
+ /* */
+ public void check_interface_type_definition(String theName, Map theDefinition, Checker.CheckContext theContext) {
+ theContext.enter(theName, Construct.Interface);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ // not much else here: a list of operation_definitions, each with
+ // its
+ // implementation and inputs
+
+ // check that common inputs are re-defined in a compatible manner
+
+ // check that the interface operations are overwritten in a
+ // compatible manner
+ // for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet()
+
+ } finally {
+ theContext.exit();
+ }
+ }
}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/IChecker.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/IChecker.java
new file mode 100644
index 0000000..be77500
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/IChecker.java
@@ -0,0 +1,21 @@
+package org.onap.sdc.dcae.checker;
+
+import kwalify.Rule;
+import kwalify.Validator;
+
+import java.util.Map;
+
+public interface IChecker {
+ void check_template_artifacts_definition(
+ Map<String, Object> theDefinition,
+ Checker.CheckContext theContext);
+
+ /* */
+ void check_inputs(Map<String, Map> theInputs,
+ Checker.CheckContext theContext);
+
+ void validationHook(String theTiming,
+ Object theTarget,
+ Rule theRule,
+ Validator.ValidationContext theContext);
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java
index b630564..9dd5db4 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java
@@ -1,27 +1,16 @@
package org.onap.sdc.dcae.checker;
-import org.onap.sdc.common.onaplog.OnapLoggerDebug;
-import org.onap.sdc.common.onaplog.OnapLoggerError;
-
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
import java.io.BufferedReader;
import java.io.IOException;
-
+import java.io.InputStreamReader;
+import java.io.Reader;
import java.net.URI;
-import java.net.URL;
-import java.net.MalformedURLException;
/**
* Represents a yaml document to be parsed/validated/checked
*/
public class Target {
- private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
- private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
-
-
private String name; //maintained mainly for logging
private URI location;
private Object target; //this is the parsed form of the target
@@ -72,7 +61,6 @@ public class Target {
}
public String toString() {
- //return String.format("Target %s (%.20s ...)", this.location, this.target == null ? "" : this.target.toString());
return String.format("Target %s at %s", this.name, this.location);
}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/ArtifactCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/ArtifactCommon.java
new file mode 100644
index 0000000..32a78c1
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/ArtifactCommon.java
@@ -0,0 +1,31 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.dcae.checker.Checker;
+import org.onap.sdc.dcae.checker.Construct;
+
+import java.util.Map;
+
+public class ArtifactCommon extends BaseCommon {
+
+ private static ArtifactCommon instance;
+
+ public synchronized static ArtifactCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new ArtifactCommon();
+ }
+ return instance;
+ }
+
+ private ArtifactCommon() {}
+ public void checkArtifactTypeDefinition(String theName,
+ Map theDefinition,
+ Checker.CheckContext theContext) {
+ theContext.enter(theName, Construct.Artifact);
+ try {
+ CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext);
+ } finally {
+ theContext.exit();
+ }
+ }
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/AttributesCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/AttributesCommon.java
new file mode 100644
index 0000000..afe8ada
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/AttributesCommon.java
@@ -0,0 +1,72 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.dcae.checker.Catalog;
+import org.onap.sdc.dcae.checker.Checker;
+
+import java.util.Iterator;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.ATTRIBUTES;
+
+public class AttributesCommon extends BaseCommon {
+ private static AttributesCommon instance;
+
+ public synchronized static AttributesCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new AttributesCommon();
+ }
+ return instance;
+ }
+
+ private AttributesCommon() {}
+
+ public void checkAttributes(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext, Catalog catalog) {
+ theContext.enter(ATTRIBUTES);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(ATTRIBUTES, theDefinitions, theContext)) {
+ return;
+ }
+
+ for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String, Map> e = i.next();
+ checkAttributeDefinition(e.getKey(), e.getValue(), theContext, catalog);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkAttributeDefinition(
+ String theName, Map theDefinition, Checker.CheckContext theContext, Catalog catalog) {
+ DataCommon dataCommon = DataCommon.getInstance();
+ theContext.enter(theName);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+ if (!dataCommon.checkDataType(theDefinition, theContext, catalog)) {
+ return;
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void check_attributes(Map<String, Map> theDefinitions, Checker.CheckContext theContext, Catalog catalog) {
+ theContext.enter("attributes");
+ try {
+ if (!CheckCommon.getInstance().checkDefinition("attributes", theDefinitions, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) {
+ Map.Entry<String, Map> e = i.next();
+ checkAttributeDefinition(e.getKey(), e.getValue(), theContext, catalog);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/BaseCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/BaseCommon.java
new file mode 100644
index 0000000..43bda09
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/BaseCommon.java
@@ -0,0 +1,9 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+
+public class BaseCommon {
+ protected static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ protected static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/CapabilityCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/CapabilityCommon.java
new file mode 100644
index 0000000..08a1dd2
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/CapabilityCommon.java
@@ -0,0 +1,190 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.dcae.checker.Catalog;
+import org.onap.sdc.dcae.checker.Checker;
+import org.onap.sdc.dcae.checker.Construct;
+import org.onap.sdc.dcae.checker.Facet;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.*;
+
+public class CapabilityCommon extends BaseCommon {
+
+ private static CapabilityCommon instance;
+
+ public synchronized static CapabilityCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new CapabilityCommon();
+ }
+ return instance;
+ }
+
+ private CapabilityCommon() {}
+
+
+
+ public void check_capabilities(Map<String, Map> theDefinition,
+ Checker.CheckContext theContext, Catalog catalog) {
+ theContext.enter(CAPABILITIES);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(CAPABILITIES, theDefinition, theContext)) {
+ return;
+ }
+
+ for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String, Map> e = i.next();
+ checkCapabilityDefinition(e.getKey(), e.getValue(), theContext, catalog);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ /* A capability definition appears within the context ot a node type */
+ public void checkCapabilityDefinition(String theName,
+ Map theDef,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ theContext.enter(theName, Construct.Capability);
+
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+
+ //check capability type
+ if (!typeCommon.checkType(Construct.Capability, theDef, theContext, catalog)) {
+ return;
+ }
+
+ //check properties
+ if (!facetCommon.checkFacetAugmentation(
+ Construct.Capability, theDef, Facet.properties, theContext, catalog)) {
+ return;
+ }
+
+ //check attributes
+ if (!facetCommon.checkFacetAugmentation(
+ Construct.Capability, theDef, Facet.attributes, theContext, catalog)) {
+ return;
+ }
+
+ //valid_source_types: should point to valid template nodes
+ if (theDef.containsKey(VALID_SOURCE_TYPES)) {
+ typeCommon.checkTypeReference(Construct.Node, theContext, catalog,
+ ((List<String>) theDef.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY));
+ //per A.6.1.4 there is an additinal check to be performed here:
+ //"Any Node Type (names) provides as values for the valid_source_types keyname SHALL be type-compatible (i.e., derived from the same parent Node Type) with any Node Types defined using the same keyname in the parent Capability Type."
+ }
+ //occurences: were verified in range_definition
+
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkCapabilityTypeDefinition(String theName,
+ Map theDefinition,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ PropertiesCommon propertiesCommon = PropertiesCommon.getInstance();
+ AttributesCommon attributesCommon = AttributesCommon.getInstance();
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ theContext.enter(theName, Construct.Capability);
+
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ if (theDefinition.containsKey(PROPERTIES)) {
+ propertiesCommon.checkProperties(
+ (Map<String, Map>) theDefinition.get(PROPERTIES), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(Construct.Capability, theName, theDefinition,
+ Facet.properties, theContext, catalog);
+ }
+
+ if (theDefinition.containsKey(ATTRIBUTES)) {
+ attributesCommon.checkAttributes(
+ (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(Construct.Capability, theName, theDefinition,
+ Facet.attributes, theContext, catalog);
+ }
+
+ //valid_source_types: see capability_type_definition
+ //unclear: how is the valid_source_types list definition eveolving across
+ //the type hierarchy: additive, overwriting, ??
+ if (theDefinition.containsKey(VALID_SOURCE_TYPES)) {
+ typeCommon.checkTypeReference(Construct.Node, theContext, catalog,
+ ((List<String>) theDefinition.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY));
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkCapabilitiesAssignmentDefinition(
+ Map<String, Map> theCapabilities, Checker.CheckContext theContext, Catalog catalog) {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ theContext.enter(CAPABILITIES);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(CAPABILITIES, theCapabilities, theContext)) {
+ return;
+ }
+
+ //the node type for the node template enclosing these requirements
+ String nodeType = (String) catalog.getTemplate(
+ theContext.target(),
+ Construct.Node,
+ theContext.enclosingConstruct(Construct.Node))
+ .get("type");
+
+ for (Iterator<Map.Entry<String, Map>> ci =
+ theCapabilities.entrySet().iterator();
+ ci.hasNext(); ) {
+
+ Map.Entry<String, Map> ce = ci.next();
+
+ String capabilityName = ce.getKey();
+ Map capabilityDef = facetCommon.findTypeFacetByName(Construct.Node, nodeType,
+ Facet.capabilities, capabilityName, catalog);
+ if (capabilityDef == null) {
+ theContext.addError("No capability " + capabilityName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null);
+ continue;
+ }
+
+ checkCapabilityAssignmentDefinition(
+ capabilityName, ce.getValue(), capabilityDef, theContext, catalog);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkCapabilityAssignmentDefinition(
+ String theCapabilityName,
+ Map theAssignment,
+ Map theDefinition,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ theContext.enter(theCapabilityName, Construct.Capability);
+ try {
+ String capabilityType = (String) theDefinition.get("type");
+ //list of property and attributes assignments
+ facetCommon.checkFacet(Construct.Capability, theAssignment, capabilityType,
+ Facet.properties, theContext, catalog);
+ facetCommon.checkFacet(Construct.Capability, theAssignment, capabilityType,
+ Facet.attributes, theContext, catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/CheckCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/CheckCommon.java
new file mode 100644
index 0000000..300420f
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/CheckCommon.java
@@ -0,0 +1,79 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.dcae.checker.*;
+
+import java.util.List;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.PROPERTIES;
+
+public class CheckCommon extends BaseCommon {
+ private static CheckCommon instance = null;
+
+ public synchronized static CheckCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new CheckCommon();
+ }
+ return instance;
+ }
+
+ private CheckCommon() {}
+
+ public boolean checkDefinition(String theName,
+ Map theDefinition,
+ Checker.CheckContext theContext) {
+ if (theDefinition == null) {
+ theContext.addError("Missing definition for " + theName, null);
+ return false;
+ }
+
+ if (theDefinition.isEmpty()) {
+ theContext.addError("Empty definition for " + theName, null);
+ return false;
+ }
+
+ return true;
+ }
+
+ public boolean checkDefinition(String theName,
+ List theDefinition,
+ Checker.CheckContext theContext) {
+ if (theDefinition == null) {
+ theContext.addError("Missing definition for " + theName, null);
+ return false;
+ }
+
+ if (theDefinition.isEmpty()) {
+ theContext.addError("Empty definition for " + theName, null);
+ return false;
+ }
+
+ return true;
+ }
+
+ public void checkDataTypeDefinition(String theName,
+ Map theDefinition,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ PropertiesCommon propertiesCommon = PropertiesCommon.getInstance();
+ theContext.enter(theName, Construct.Data);
+ try {
+ if (!checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ if (theDefinition.containsKey(PROPERTIES)) {
+ propertiesCommon.checkProperties(
+ (Map<String, Map>) theDefinition.get(PROPERTIES), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(Construct.Data, theName, theDefinition,
+ Facet.properties, theContext, catalog);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/ConstCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/ConstCommon.java
new file mode 100644
index 0000000..2a57e77
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/ConstCommon.java
@@ -0,0 +1,38 @@
+package org.onap.sdc.dcae.checker.common;
+
+public class ConstCommon {
+ public static final String PROPERTIES = "properties";
+ public static final String TYPE = " type ";
+ public static final String DEFAULT = "default";
+ public static final String ATTRIBUTES = "attributes";
+ public static final String DATA_TYPES = "data_types";
+ public static final String VALID_SOURCE_TYPES = "valid_source_types";
+ public static final String RELATIONSHIP_TYPES = "relationship_types";
+ public static final String INTERFACES = "interfaces";
+ public static final String VALID_TARGET_TYPES = "valid_target_types";
+ public static final String ARTIFACT_TYPES = "artifact_types";
+ public static final String INTERFACE_TYPES = "interface_types";
+ public static final String NODE_TYPES = "node_types";
+ public static final String REQUIREMENTS = "requirements";
+ public static final String CAPABILITIES = "capabilities";
+ public static final String GROUP_TYPES = "group_types";
+ public static final String TARGETS_CONSTANT = "targets";
+ public static final String POLICY_TYPES = "policy_types";
+ public static final String IS_NONE_OF_THOSE = "' is none of those";
+ public static final String INPUTS = "inputs";
+ public static final String CAPABILITY = "capability";
+ public static final String ARTIFACTS = "artifacts";
+ public static final String WAS_DEFINED_FOR_THE_NODE_TYPE = " was defined for the node type ";
+ public static final String UNKNOWN = "Unknown ";
+ public static final String CAPABILITY_TYPES = "capability_types";
+ public static final String GROUPS = "groups";
+ public static final String RELATIONSHIP_TEMPLATES = "relationship_templates";
+ public static final String OUTPUTS = "outputs";
+ public static final String NODE_TEMPLATES = "node_templates";
+ public static final String POLICIES = "policies";
+ public static final String IMPORTED_FROM = "imported from";
+ public static final String MAP = "map";
+ public static final String TARGETS = "targets";
+ public static final String[] EMPTY_STRING_ARRAY = new String[0];
+ private ConstCommon() {}
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/DataCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/DataCommon.java
new file mode 100644
index 0000000..52ceef3
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/DataCommon.java
@@ -0,0 +1,102 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.checker.*;
+
+import java.util.Map;
+
+public class DataCommon extends BaseCommon {
+
+ private static DataCommon instance;
+
+ public synchronized static DataCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new DataCommon();
+ }
+ return instance;
+ }
+
+ private DataCommon() {}
+
+ /* the type can be:
+ * a known type: predefined or user-defined
+ * a collection (list or map) and then check that the entry_schema points to one of the first two cases (is that it?)
+ */
+ public boolean checkDataType(Map theSpec, Checker.CheckContext theContext, Catalog catalog) {
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ if (!typeCommon.checkType(Construct.Data, theSpec, theContext, catalog)) {
+ return false;
+ }
+
+ String type = (String) theSpec.get("type");
+ if (/*isCollectionType(type)*/
+ "list".equals(type) || "map".equals(type)) {
+ Map entrySchema = (Map) theSpec.get("entry_schema");
+ if (entrySchema == null) {
+ //maybe issue a warning ?? or is 'string' the default??
+ return true;
+ }
+
+ if (!catalog.hasType(Construct.Data, (String) entrySchema.get("type"))) {
+ theContext.addError("Unknown entry_schema type: " + entrySchema, null);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /*
+ * For inputs/properties/attributes/(parameters). It is the caller's
+ * responsability to provide the value (from a 'default', inlined, ..)
+ *
+ * @param theDef the definition of the given construct/facet as it appears in
+ * its enclosing type definition.
+ * @param
+ */
+ public boolean checkDataValuation(Object theExpr,
+ Map<String, ?> theDef,
+ Checker.CheckContext theContext) {
+ //first check if the expression is a function, if not handle it as a value assignment
+ Data.Function f = Data.function(theExpr);
+ if (f != null) {
+ return f.evaluator()
+ .eval(theExpr, theDef, theContext);
+ } else {
+ Data.Type type = Data.typeByName((String) theDef.get("type"));
+ if (type != null) {
+ Data.Evaluator evaluator;
+
+ evaluator = type.evaluator();
+ if (evaluator == null) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No value evaluator available for type {}", type);
+ } else {
+ if ((theExpr != null) && (!evaluator.eval(theExpr, theDef, theContext))) {
+ return false;
+ }
+ }
+
+
+ evaluator = type.constraintsEvaluator();
+ if (evaluator == null) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No constraints evaluator available for type {}", type);
+ } else {
+ if (theExpr != null) {
+ if (!evaluator.eval(theExpr, theDef, theContext)) {
+ return false;
+ }
+ } else {
+ //should have a null value validatorT
+ }
+ }
+
+ return true;
+ } else {
+ theContext.addError("Expression " + theExpr + " of " + theDef + " could not be evaluated", null);
+ return false;
+ }
+ }
+ }
+
+
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/FacetCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/FacetCommon.java
new file mode 100644
index 0000000..93c8416
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/FacetCommon.java
@@ -0,0 +1,320 @@
+package org.onap.sdc.dcae.checker.common;
+
+import com.google.common.collect.MapDifference;
+import com.google.common.collect.Maps;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.checker.*;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.DEFAULT;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.TYPE;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.UNKNOWN;
+
+public class FacetCommon extends BaseCommon {
+
+ private static FacetCommon instance;
+
+ public synchronized static FacetCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new FacetCommon();
+ }
+ return instance;
+ }
+
+ private FacetCommon() {}
+ /**
+ * Given the type of a certain construct (node type for example), look up
+ * in one of its facets (properties, capabilities, ..) for one of the given
+ * facet type (if looking in property, one of the given data type).
+ *
+ * @return a map of all facets of the given type, will be empty to signal
+ * none found
+ * <p>
+ * Should we look for a facet construct of a compatible type: any type derived
+ * from the given facet's construct type??
+ */
+ public Map<String, Map>
+ findTypeFacetByType(Construct theTypeConstruct,
+ String theTypeName,
+ Facet theFacet,
+ String theFacetType,
+ Catalog catalog) {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType {}, {}: {} {}", theTypeName, theTypeConstruct, theFacetType, theFacet);
+ Map<String, Map> res = new HashMap<>();
+ Iterator<Map.Entry<String, Map>> i =
+ catalog.hierarchy(theTypeConstruct, theTypeName);
+ while (i.hasNext()) {
+ Map.Entry<String, Map> typeSpec = i.next();
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", theTypeConstruct, typeSpec.getKey());
+ Map<String, Map> typeFacet =
+ (Map<String, Map>) typeSpec.getValue().get(theFacet.name());
+ if (typeFacet == null) {
+ continue;
+ }
+ Iterator<Map.Entry<String, Map>> fi = typeFacet.entrySet().iterator();
+ while (fi.hasNext()) {
+ Map.Entry<String, Map> facet = fi.next();
+ String facetType = (String) facet.getValue().get("type");
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", facet.getKey(), facetType);
+
+ //here is the question: do we look for an exact match or ..
+ //now we check that the type has a capability of a type compatible
+ //(equal or derived from) the given capability type.
+ if (catalog.isDerivedFrom(
+ theFacet.construct(), facetType, theFacetType)) {
+ res.putIfAbsent(facet.getKey(), facet.getValue());
+ }
+ }
+ }
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, found {}", res);
+
+ return res;
+ }
+
+ public Map<String, Object>
+ findTypeFacetByName(Construct theTypeConstruct,
+ String theTypeName,
+ Facet theFacet,
+ String theFacetName,
+ Catalog catalog) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName {} {}", theTypeConstruct, theTypeName);
+ Iterator<Map.Entry<String, Map>> i =
+ catalog.hierarchy(theTypeConstruct, theTypeName);
+ while (i.hasNext()) {
+ Map.Entry<String, Map> typeSpec = i.next();
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName, Checking {} type {}", theTypeConstruct, typeSpec.getKey());
+ Map<String, Map> typeFacet =
+ (Map<String, Map>) typeSpec.getValue().get(theFacet.name());
+ if (typeFacet == null) {
+ continue;
+ }
+ Map<String, Object> facet = typeFacet.get(theFacetName);
+ if (facet != null) {
+ return facet;
+ }
+ }
+ return null;
+ }
+
+ /* Check that a particular facet (properties, attributes) of a construct type
+ * (node type, capability type, etc) is correctly (consistenly) defined
+ * across a type hierarchy
+ */
+ public boolean checkTypeConstructFacet(Construct theConstruct,
+ String theTypeName,
+ Map theTypeSpec,
+ Facet theFacet,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ Map<String, Map> defs =
+ (Map<String, Map>) theTypeSpec.get(theFacet.name());
+ if (null == defs) {
+ return true;
+ }
+
+ boolean res = true;
+
+ //given that the type was cataloged there will be at least one entry
+ Iterator<Map.Entry<String, Map>> i =
+ catalog.hierarchy(theConstruct, theTypeName);
+ if (!i.hasNext()) {
+ theContext.addError(
+ "The type " + theTypeName + " needs to be cataloged before attempting 'checkTypeConstruct'", null);
+ return false;
+ }
+ i.next(); //skip self
+ while (i.hasNext()) {
+ Map.Entry<String, Map> e = i.next();
+ Map<String, Map> superDefs = (Map<String, Map>) e.getValue()
+ .get(theFacet.name());
+ if (null == superDefs) {
+ continue;
+ }
+ //this computes entries that appear on both collections but with different values, i.e. the re-defined properties
+ Map<String, MapDifference.ValueDifference<Map>> diff = Maps.difference(defs, superDefs).entriesDiffering();
+
+ for (Iterator<Map.Entry<String, MapDifference.ValueDifference<Map>>> di = diff.entrySet().iterator(); di.hasNext(); ) {
+ Map.Entry<String, MapDifference.ValueDifference<Map>> de = di.next();
+ MapDifference.ValueDifference<Map> dediff = de.getValue();
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {}: {} has been re-defined between the {} types {} and {}", theConstruct, theFacet, de.getKey(), theConstruct, e.getKey(), theTypeName);
+ //for now we just check that the type is consistenly re-declared
+ if (!catalog.isDerivedFrom(theFacet.construct(),
+ (String) dediff.leftValue().get("type"),
+ (String) dediff.rightValue().get("type"))) {
+ theContext.addError(
+ theConstruct + TYPE + theFacet + ", redefiniton changed its type: " + de.getKey() + " has been re-defined between the " + theConstruct + " types " + e.getKey() + " and " + theTypeName + " in an incompatible manner", null);
+ res = false;
+ }
+ }
+ }
+
+ return res;
+ }
+
+ /* Augmentation occurs in cases such as the declaration of capabilities within a node type.
+ * In such cases the construct facets (the capabilitity's properties) can redefine (augment) the
+ * specification found in the construct type.
+ */
+ public boolean checkFacetAugmentation(Construct theConstruct,
+ Map theSpec,
+ Facet theFacet,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ return checkFacetAugmentation(theConstruct, theSpec, null, theFacet, theContext, catalog);
+ }
+
+ public boolean checkFacetAugmentation(Construct theConstruct,
+ Map theSpec,
+ String theSpecType,
+ Facet theFacet,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+
+ Map<String, Map> augs = (Map<String, Map>) theSpec.get(theFacet.name());
+ if (null == augs) {
+ return true;
+ }
+
+ boolean res = true;
+ if (theSpecType == null) {
+ theSpecType = (String) theSpec.get("type");
+ }
+ if (theSpecType == null) {
+ theContext.addError("No specification type available", null);
+ return false;
+ }
+
+ for (Iterator<Map.Entry<String, Map>> ai = augs.entrySet().iterator(); ai.hasNext(); ) {
+ Map.Entry<String, Map> ae = ai.next();
+
+ //make sure it was declared by the type
+ Map facetDef = catalog.getFacetDefinition(theConstruct, theSpecType, theFacet, ae.getKey());
+ if (facetDef == null) {
+ theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + ae.getKey(), null);
+ res = false;
+ continue;
+ }
+
+ //check the compatibility of the augmentation: only the type cannot be changed
+ //can the type be changed in a compatible manner ??
+ if (!facetDef.get("type").equals(ae.getValue().get("type"))) {
+ theContext.addError(theConstruct + " " + theFacet + " " + ae.getKey() + " has a different type than its definition: " + ae.getValue().get("type") + " instead of " + facetDef.get("type"), null);
+ res = false;
+ continue;
+ }
+ DataCommon dataCommon = DataCommon.getInstance();
+ //check any valuation (here just defaults)
+ Object defaultValue = ae.getValue().get(DEFAULT);
+ if (defaultValue != null) {
+ dataCommon.checkDataValuation(defaultValue, ae.getValue(), theContext);
+ }
+ }
+
+ return res;
+ }
+
+ /*
+ * Checks the validity of a certain facet of a construct
+ * (properties of a node) across a type hierarchy.
+ * For now the check is limited to a verifying that a a facet was declared
+ * somewhere in the construct type hierarchy (a node template property has
+ * been declared in the node type hierarchy).
+ *
+ * 2 versions with the more generic allowing the specification of the type
+ * to be done explicitly.
+ */
+ public boolean checkFacet(Construct theConstruct,
+ Map theSpec,
+ Facet theFacet,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ return checkFacet(theConstruct, theSpec, null, theFacet, theContext, catalog);
+ }
+
+ /**
+ * We walk the hierarchy and verify the assignment of a property with respect to its definition.
+ * We also collect the names of those properties defined as required but for which no assignment was provided.
+ */
+ public boolean checkFacet(Construct theConstruct,
+ Map theSpec,
+ String theSpecType,
+ Facet theFacet,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+
+ Map<String, Map> defs = (Map<String, Map>) theSpec.get(theFacet.name());
+ if (null == defs) {
+ return true;
+ }
+ defs = Maps.newHashMap(defs); //
+
+ boolean res = true;
+ if (theSpecType == null) {
+ theSpecType = (String) theSpec.get("type");
+ }
+ if (theSpecType == null) {
+ theContext.addError("No specification type available", null);
+ return false;
+ }
+
+ Map<String, Byte> missed = new HashMap<>(); //keeps track of the missing required properties, the value is
+ //false if a default was found along the hierarchy
+ Iterator<Map.Entry<String, Map>> i =
+ catalog.hierarchy(theConstruct, theSpecType);
+ while (i.hasNext() && !defs.isEmpty()) {
+ Map.Entry<String, Map> type = i.next();
+
+ Map<String, Map> typeDefs = (Map<String, Map>) type.getValue()
+ .get(theFacet.name());
+ if (null == typeDefs) {
+ continue;
+ }
+
+ MapDifference<String, Map> diff = Maps.difference(defs, typeDefs);
+
+ //this are the ones this type and the spec have in common (same key,
+ //different values)
+ Map<String, MapDifference.ValueDifference<Map>> facetDefs =
+ diff.entriesDiffering();
+ //TODO: this assumes the definition of the facet is not cumulative, i.e.
+ //subtypes 'add' something to the definition provided by the super-types
+ //it considers the most specialized definition stands on its own
+ for (MapDifference.ValueDifference<Map> valdef : facetDefs.values()) {
+ DataCommon dataCommon = DataCommon.getInstance();
+ dataCommon.checkDataValuation(valdef.leftValue(), valdef.rightValue(), theContext);
+ }
+
+ //remove from properties all those that appear in this type: unfortunately this returns an unmodifiable map ..
+ defs = Maps.newHashMap(diff.entriesOnlyOnLeft());
+ }
+
+ if (!defs.isEmpty()) {
+ theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + defs, null);
+ res = false;
+ }
+
+ if (!missed.isEmpty()) {
+ List missedNames =
+ missed.entrySet()
+ .stream()
+ .filter(e -> e.getValue().byteValue() == (byte) 1)
+ .map(e -> e.getKey())
+ .collect(Collectors.toList());
+ if (!missedNames.isEmpty()) {
+ theContext.addError(theConstruct + " " + theFacet + " missing required values for: " + missedNames, null);
+ res = false;
+ }
+ }
+
+ return res;
+ }
+
+
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/GroupCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/GroupCommon.java
new file mode 100644
index 0000000..13ee692
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/GroupCommon.java
@@ -0,0 +1,118 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.dcae.checker.*;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.*;
+
+public class GroupCommon extends BaseCommon{
+ private static GroupCommon instance;
+
+ public synchronized static GroupCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new GroupCommon();
+ }
+ return instance;
+ }
+
+ private GroupCommon() {}
+
+ public void checkGroupTypeDefinition(String theName,
+ Map theDefinition,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ PropertiesCommon propertiesCommon = PropertiesCommon.getInstance();
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ theContext.enter(theName, Construct.Group);
+
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ if (theDefinition.containsKey(PROPERTIES)) {
+ propertiesCommon.checkProperties(
+ (Map<String, Map>) theDefinition.get(PROPERTIES), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(Construct.Group, theName, theDefinition,
+ Facet.properties, theContext, catalog);
+ }
+
+ if (theDefinition.containsKey(TARGETS_CONSTANT)) {
+ typeCommon.checkTypeReference(Construct.Node, theContext, catalog,
+ ((List<String>) theDefinition.get(TARGETS_CONSTANT)).toArray(EMPTY_STRING_ARRAY));
+ }
+ InterfaceCommon interfaceCommon = InterfaceCommon.getInstance();
+ //interfaces
+ Map<String, Map> interfaces =
+ (Map<String, Map>) theDefinition.get(INTERFACES);
+ interfaceCommon.checkMapTypeInterfaceDefinition(theContext, interfaces, catalog);
+
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkGroupDefinition(String theName,
+ Map theDef,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ theContext.enter(theName);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+
+ if (!typeCommon.checkType(Construct.Group, theDef, theContext, catalog)) {
+ return;
+ }
+
+ if (!facetCommon.checkFacet(
+ Construct.Group, theDef, Facet.properties, theContext, catalog)) {
+ return;
+ }
+
+ if (theDef.containsKey(TARGETS_CONSTANT)) {
+
+ List<String> targetsTypes = (List<String>)
+ catalog.getTypeDefinition(Construct.Group,
+ (String) theDef.get("type"))
+ .get(TARGETS_CONSTANT);
+
+ List<String> targets = (List<String>) theDef.get(TARGETS_CONSTANT);
+ for (String targetItr : targets) {
+ if (!catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) {
+ theContext.addError("The 'targets' entry must contain a reference to a node template, '" + targetItr + "' is not one", null);
+ } else {
+ if (targetsTypes != null) {
+ String targetType = (String)
+ catalog.getTemplate(theContext.target(), Construct.Node, targetItr).get("type");
+
+ boolean found = false;
+ for (String type : targetsTypes) {
+ found = catalog
+ .isDerivedFrom(Construct.Node, targetType, type);
+ if (found) {
+ break;
+ }
+ }
+
+ if (!found) {
+ theContext.addError("The 'targets' entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null);
+ }
+ }
+ }
+ }
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/InputsOutputsCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/InputsOutputsCommon.java
new file mode 100644
index 0000000..04fd4b1
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/InputsOutputsCommon.java
@@ -0,0 +1,66 @@
+package org.onap.sdc.dcae.checker.common;
+
+import kwalify.Rule;
+import kwalify.Validator;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.checker.*;
+import org.onap.sdc.dcae.checker.validation.TOSCAValidator;
+
+import java.util.Iterator;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.DEFAULT;
+
+public class InputsOutputsCommon extends BaseCommon {
+
+ private static InputsOutputsCommon instance;
+
+ public synchronized static InputsOutputsCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new InputsOutputsCommon();
+ }
+ return instance;
+ }
+
+ private InputsOutputsCommon() {}
+
+ public void checkInputDefinition(String theName,
+ Map theDef,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ DataCommon dataCommon = DataCommon.getInstance();
+ theContext.enter(theName);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+ //
+ if (!dataCommon.checkDataType(theDef, theContext, catalog)) {
+ return;
+ }
+ //check default value
+ Object defaultValue = theDef.get(DEFAULT);
+ if (defaultValue != null) {
+ dataCommon.checkDataValuation(defaultValue, theDef, theContext);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+
+
+ public void checkOutputDefinition(String theName,
+ Map theDef,
+ Checker.CheckContext theContext) {
+ theContext.enter(theName);
+ try {
+ CheckCommon.getInstance().checkDefinition(theName, theDef, theContext);
+ //check the expression
+ } finally {
+ theContext.exit();
+ }
+ }
+
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/InterfaceCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/InterfaceCommon.java
new file mode 100644
index 0000000..3d7dfef
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/InterfaceCommon.java
@@ -0,0 +1,164 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.checker.*;
+
+import java.util.Iterator;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.INPUTS;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.INTERFACES;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.WAS_DEFINED_FOR_THE_NODE_TYPE;
+
+public class InterfaceCommon extends BaseCommon {
+ private static InterfaceCommon instance;
+
+ public synchronized static void init(IChecker checker) {
+ if (instance == null)
+ {
+ instance = new InterfaceCommon();
+ instance.setChecker(checker);
+ }
+ }
+
+ public synchronized static InterfaceCommon getInstance() {
+ if (instance == null)
+ {
+ errLogger.log(LogLevel.ERROR, InterfaceCommon.class.getSimpleName(),"Need to call init before");
+ }
+ return instance;
+ }
+
+ private InterfaceCommon() {}
+
+ private IChecker checker;
+
+ //checking of actual constructs (capability, ..)
+
+ /* First, interface types do not have a hierarchical organization (no
+ * 'derived_from' in a interface type definition).
+ * So, when interfaces (with a certain type) are defined in a node
+ * or relationship type (and they can define new? operations), what
+ * is there to check:
+ * Can operations here re-define their declaration from the interface
+ * type spec?? From A.5.11.3 we are to understand indicates override to be
+ * the default interpretation .. but they talk about sub-classing so it
+ * probably intended as a reference to the node or relationship type
+ * hierarchy and not the interface type (no hierarchy there).
+ * Or is this a a case of augmentation where new operations can be added??
+ */
+ public void check_type_interface_definition(
+ String theName, Map theDef, Checker.CheckContext theContext, Catalog catalog) {
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ theContext.enter(theName);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+
+ if (!typeCommon.checkType(Construct.Interface, theDef, theContext, catalog)) {
+ return;
+ }
+
+ if (theDef.containsKey(INPUTS)) {
+ checker.check_inputs((Map<String, Map>) theDef.get(INPUTS), theContext);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkTemplateInterfacesDefinition(
+ Map<String, Map> theInterfaces,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ theContext.enter(INTERFACES);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(INTERFACES, theInterfaces, theContext)) {
+ return;
+ }
+
+ //the node type for the node template enclosing these requirements
+ String nodeType = (String) catalog.getTemplate(
+ theContext.target(),
+ Construct.Node,
+ theContext.enclosingConstruct(Construct.Node))
+ .get("type");
+
+ for (Iterator<Map.Entry<String, Map>> ii =
+ theInterfaces.entrySet().iterator();
+ ii.hasNext(); ) {
+
+ Map.Entry<String, Map> ie = ii.next();
+
+ String interfaceName = ie.getKey();
+ Map interfaceDef = facetCommon.findTypeFacetByName(Construct.Node, nodeType,
+ Facet.interfaces, interfaceName, catalog);
+
+ if (interfaceDef == null) {
+ /* this is subject to augmentation: this could be a warning but not an error */
+ theContext.addError("No interface " + interfaceName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null);
+ continue;
+ }
+
+ checkTemplateInterfaceDefinition(
+ interfaceName, ie.getValue(), interfaceDef, theContext, catalog);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkTemplateInterfaceDefinition(
+ String theInterfaceName,
+ Map theAssignment,
+ Map theDefinition,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ theContext.enter(theInterfaceName, Construct.Interface);
+ try {
+ //check the assignment of the common inputs
+ facetCommon.checkFacet(Construct.Interface,
+ theAssignment,
+ (String) theDefinition.get("type"),
+ Facet.inputs,
+ theContext,
+ catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkMapTypeInterfaceDefinition(Checker.CheckContext theContext, Map<String, Map> interfaces, Catalog catalog) {
+ if (interfaces != null) {
+ try {
+ theContext.enter(INTERFACES);
+ for (Iterator<Map.Entry<String, Map>> i =
+ interfaces.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String, Map> e = i.next();
+ check_type_interface_definition(
+ e.getKey(), e.getValue(), theContext, catalog);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+ }
+
+ public void checkInterfaceTypeDefinition(String theName,
+ Map theDefinition,
+ Checker.CheckContext theContext) {
+ theContext.enter(theName, Construct.Interface);
+ try {
+ CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext);
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ void setChecker(IChecker checker) {
+ this.checker = checker;
+ }
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/NodeCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/NodeCommon.java
new file mode 100644
index 0000000..2fa9752
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/NodeCommon.java
@@ -0,0 +1,197 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.checker.*;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.*;
+
+public class NodeCommon extends BaseCommon {
+
+ private static NodeCommon instance;
+
+ public synchronized static void init(IChecker checker) {
+ if (instance == null) {
+ instance = new NodeCommon();
+ instance.setChecker(checker);
+ }
+ }
+
+ public synchronized static NodeCommon getInstance() {
+ if (instance == null)
+ {
+ errLogger.log(LogLevel.ERROR, InterfaceCommon.class.getSimpleName(),"Need to call init before");
+ }
+ return instance;
+ }
+
+ private NodeCommon() {}
+
+ private IChecker checker;
+
+ public void checkNodeTypeDefinition(String theName,
+ Map theDefinition,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ PropertiesCommon propertiesCommon = PropertiesCommon.getInstance();
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ RequirementCommon requirementCommon = RequirementCommon.getInstance();
+ CapabilityCommon capabilityCommon = CapabilityCommon.getInstance();
+ InterfaceCommon interfaceCommon = InterfaceCommon.getInstance();
+ theContext.enter(theName, Construct.Node);
+
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ if (theDefinition.containsKey(PROPERTIES)) {
+ propertiesCommon.checkProperties(
+ (Map<String, Map>) theDefinition.get(PROPERTIES), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(Construct.Node, theName, theDefinition,
+ Facet.properties, theContext, catalog);
+ }
+
+ if (theDefinition.containsKey(ATTRIBUTES)) {
+ propertiesCommon.checkProperties(
+ (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(Construct.Node, theName, theDefinition,
+ Facet.attributes, theContext, catalog);
+ }
+
+ //requirements
+ if (theDefinition.containsKey(REQUIREMENTS)) {
+ requirementCommon.check_requirements(
+ (List<Map>) theDefinition.get(REQUIREMENTS), theContext, catalog);
+ }
+
+ //capabilities
+ if (theDefinition.containsKey(CAPABILITIES)) {
+ capabilityCommon.check_capabilities(
+ (Map<String, Map>) theDefinition.get(CAPABILITIES), theContext, catalog);
+ }
+
+ //interfaces:
+ Map<String, Map> interfaces =
+ (Map<String, Map>) theDefinition.get(INTERFACES);
+ interfaceCommon.checkMapTypeInterfaceDefinition(theContext, interfaces, catalog);
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ public void checkNodeTemplateDefinition(String theName,
+ Map theNode,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ RequirementCommon requirementCommon = RequirementCommon.getInstance();
+ CapabilityCommon capabilityCommon = CapabilityCommon.getInstance();
+ InterfaceCommon interfaceCommon = InterfaceCommon.getInstance();
+ theContext.enter(theName, Construct.Node);
+
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theNode, theContext)) {
+ return;
+ }
+
+ if (!typeCommon.checkType(Construct.Node, theNode, theContext, catalog)) {
+ return;
+ }
+
+ //copy
+ String copy = (String) theNode.get("copy");
+ if (copy != null) {
+ if (!typeCommon.checkTemplateReference(Construct.Node, theContext, catalog, copy)) {
+ theContext.addError("The 'copy' reference " + copy + " does not point to a known node template", null);
+ } else {
+ //the 'copy' node specification should be used to provide 'defaults'
+ //for this specification
+ }
+ }
+
+ /* check that we operate on properties and attributes within the scope of
+ the specified node type */
+ if (!facetCommon.checkFacet(
+ Construct.Node, /*theName,*/theNode, Facet.properties, theContext, catalog)) {
+ return;
+ }
+
+ if (!facetCommon.checkFacet(
+ Construct.Node, /*theName,*/theNode, Facet.attributes, theContext, catalog)) {
+ return;
+ }
+
+ //requirement assignment seq
+ if (theNode.containsKey(REQUIREMENTS)) {
+ requirementCommon.checkRequirementsAssignmentDefinition(
+ (List<Map>) theNode.get(REQUIREMENTS), theContext, catalog);
+ }
+
+ //capability assignment map: subject to augmentation
+ if (theNode.containsKey(CAPABILITIES)) {
+ capabilityCommon.checkCapabilitiesAssignmentDefinition(
+ (Map<String, Map>) theNode.get(CAPABILITIES), theContext, catalog);
+ }
+
+ //interfaces
+ if (theNode.containsKey(INTERFACES)) {
+ interfaceCommon.checkTemplateInterfacesDefinition(
+ (Map<String, Map>) theNode.get(INTERFACES), theContext, catalog);
+ }
+
+ //artifacts: artifacts do not have different definition forms/syntax
+ //depending on the context (type or template) but they are still subject
+ //to 'augmentation'
+ if (theNode.containsKey(ARTIFACTS)) {
+ checker.check_template_artifacts_definition(
+ (Map<String, Object>) theNode.get(ARTIFACTS), theContext);
+ }
+
+ /* node_filter: the context to which the node filter is applied is very
+ * wide here as opposed to the node filter specification in a requirement
+ * assignment which has a more strict context (target node/capability are
+ * specified).
+ * We could check that there are nodes in this template having the
+ * properties/capabilities specified in this filter, i.e. the filter has
+ * a chance to succeed.
+ */
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ /* Requirements are the odd ball as they are structured as a sequence .. */
+ public Map<String, Map> findNodeTypeRequirementByName(
+ String theNodeType, String theRequirementName, Catalog catalog) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName {}/{}", theNodeType, theRequirementName);
+ Iterator<Map.Entry<String, Map>> i =
+ catalog.hierarchy(Construct.Node, theNodeType);
+ while (i.hasNext()) {
+ Map.Entry<String, Map> nodeType = i.next();
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName, Checking node type {}", nodeType.getKey());
+ List<Map<String, Map>> nodeTypeRequirements =
+ (List<Map<String, Map>>) nodeType.getValue().get(REQUIREMENTS);
+ if (nodeTypeRequirements == null) {
+ continue;
+ }
+
+ for (Map<String, Map> requirement : nodeTypeRequirements) {
+ Map requirementDef = requirement.get(theRequirementName);
+ if (requirementDef != null) {
+ return requirementDef;
+ }
+ }
+ }
+ return null;
+ }
+
+ public void setChecker(IChecker checker) {
+ this.checker = checker;
+ }
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/PolicyCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/PolicyCommon.java
new file mode 100644
index 0000000..9c1d8cd
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/PolicyCommon.java
@@ -0,0 +1,134 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.dcae.checker.*;
+
+import java.util.List;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.IS_NONE_OF_THOSE;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.PROPERTIES;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.TARGETS_CONSTANT;
+
+public class PolicyCommon extends BaseCommon {
+
+ private static PolicyCommon instance;
+
+ public synchronized static PolicyCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new PolicyCommon();
+ }
+ return instance;
+ }
+
+ private PolicyCommon() {}
+
+
+ public void checkPolicyTypeDefinition(String theName,
+ Map theDefinition,
+ Checker.CheckContext theContext,
+ Catalog catalog,
+ Target target) {
+ PropertiesCommon propertiesCommon = PropertiesCommon.getInstance();
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ theContext.enter(theName, Construct.Policy);
+
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ if (theDefinition.containsKey(PROPERTIES)) {
+ propertiesCommon.checkProperties(
+ (Map<String, Map>) theDefinition.get(PROPERTIES), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(Construct.Policy, theName, theDefinition,
+ Facet.properties, theContext, catalog);
+ }
+
+ //the targets can be known node types or group types
+ List<String> targets = (List<String>) theDefinition.get(TARGETS_CONSTANT);
+ if ((targets != null) && (CheckCommon.getInstance().checkDefinition(TARGETS_CONSTANT, targets, theContext))) {
+ for (String targetItr : targets) {
+ if (!(catalog.hasType(Construct.Node, targetItr) ||
+ catalog.hasType(Construct.Group, targetItr))) {
+ theContext.addError("The 'targets' entry must contain a reference to a node type or group type, '" + target + IS_NONE_OF_THOSE, null);
+ }
+ }
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkPolicyDefinition(String theName,
+ Map theDef,
+ Checker.CheckContext theContext,
+ Catalog catalog,
+ Target target)
+ {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ TypeCommon typeCommon = TypeCommon.getInstance();
+
+ theContext.enter(theName);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+
+ if (!typeCommon.checkType(Construct.Policy, theDef, theContext, catalog)) {
+ return;
+ }
+
+ if (!facetCommon.checkFacet(Construct.Policy, theDef, Facet.properties, theContext, catalog)) {
+ return;
+ }
+
+ //targets: must point to node or group templates (that are of a type
+ //specified in the policy type definition, if targets were specified
+ //there).
+ if (theDef.containsKey(TARGETS_CONSTANT)) {
+ List<String> targetsTypes = (List<String>)
+ catalog.getTypeDefinition(Construct.Policy,
+ (String) theDef.get("type"))
+ .get(TARGETS_CONSTANT);
+
+ List<String> targets = (List<String>) theDef.get(TARGETS_CONSTANT);
+ for (String targetItr : targets) {
+ Construct targetConstruct = null;
+
+ if (catalog.hasTemplate(theContext.target(), Construct.Group, targetItr)) {
+ targetConstruct = Construct.Group;
+ } else if (catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) {
+ targetConstruct = Construct.Node;
+ } else {
+ theContext.addError("The 'targets' entry must contain a reference to a node template or group template, '" + target + IS_NONE_OF_THOSE, null);
+ }
+
+ if (targetConstruct != null &&
+ targetsTypes != null) {
+ //get the target type and make sure is compatible with the types
+ //indicated in the type spec
+ String targetType = (String)
+ catalog.getTemplate(theContext.target(), targetConstruct, targetItr).get("type");
+
+ boolean found = false;
+ for (String type : targetsTypes) {
+ found = catalog
+ .isDerivedFrom(targetConstruct, targetType, type);
+ if (found) {
+ break;
+ }
+ }
+
+ if (!found) {
+ theContext.addError("The 'targets' " + targetConstruct + " entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null);
+ }
+ }
+ }
+ }
+
+ } finally {
+ theContext.exit();
+ }
+ }
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/PropertiesCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/PropertiesCommon.java
new file mode 100644
index 0000000..f19e19b
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/PropertiesCommon.java
@@ -0,0 +1,97 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.dcae.checker.Catalog;
+import org.onap.sdc.dcae.checker.Checker;
+import org.onap.sdc.dcae.checker.IChecker;
+
+import java.util.Iterator;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.DEFAULT;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.PROPERTIES;
+
+public class PropertiesCommon extends BaseCommon {
+
+ private static PropertiesCommon instance;
+
+ public synchronized static PropertiesCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new PropertiesCommon();
+ }
+ return instance;
+ }
+
+ private PropertiesCommon() {}
+
+ public void checkProperties(
+ Map<String, Map> theDefinitions, Checker.CheckContext theContext, Catalog catalog) {
+ theContext.enter(PROPERTIES);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(PROPERTIES, theDefinitions, theContext)) {
+ return;
+ }
+
+ for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String, Map> e = i.next();
+ checkPropertyDefinition(e.getKey(), e.getValue(), theContext, catalog);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ private void checkPropertyDefinition(
+ String theName, Map theDefinition, Checker.CheckContext theContext, Catalog catalog) {
+ DataCommon dataCommon = DataCommon.getInstance();
+ theContext.enter(theName);
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+ //check the type
+ if (!dataCommon.checkDataType(theDefinition, theContext, catalog)) {
+ return;
+ }
+ //check default value is compatible with type
+ Object defaultValue = theDefinition.get(DEFAULT);
+ if (defaultValue != null) {
+ dataCommon.checkDataValuation(defaultValue, theDefinition, theContext);
+ }
+
+ theContext.exit();
+ }
+
+ public void check_property_definition(String theName, Map theDefinition, Checker.CheckContext theContext, Catalog catalog) {
+ DataCommon dataCommon = DataCommon.getInstance();
+ theContext.enter(theName);
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+ // check the type
+ if (!dataCommon.checkDataType(theDefinition, theContext, catalog)) {
+ return;
+ }
+ // check default value is compatible with type
+ Object defaultValue = theDefinition.get("default");
+ if (defaultValue != null) {
+ dataCommon.checkDataValuation(defaultValue, theDefinition, theContext);
+ }
+
+ theContext.exit();
+ }
+
+ public void check_properties(Map<String, Map> theDefinitions, Checker.CheckContext theContext, Catalog catalog) {
+ theContext.enter("properties");
+ try {
+ if (!CheckCommon.getInstance().checkDefinition("properties", theDefinitions, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) {
+ Map.Entry<String, Map> e = i.next();
+ check_property_definition(e.getKey(), e.getValue(), theContext, catalog);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/RelationshipCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/RelationshipCommon.java
new file mode 100644
index 0000000..145873a
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/RelationshipCommon.java
@@ -0,0 +1,112 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.dcae.checker.*;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.*;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.EMPTY_STRING_ARRAY;
+import static org.onap.sdc.dcae.checker.common.ConstCommon.VALID_TARGET_TYPES;
+
+public class RelationshipCommon extends BaseCommon {
+ private static RelationshipCommon instance;
+
+ public synchronized static RelationshipCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new RelationshipCommon();
+ }
+ return instance;
+ }
+
+ private RelationshipCommon() {}
+
+ public void checkRelationshipTypeDefinition(String theName,
+ Map theDefinition,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ PropertiesCommon propertiesCommon = PropertiesCommon.getInstance();
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ InterfaceCommon interfaceCommon = InterfaceCommon.getInstance();
+ theContext.enter(theName, Construct.Relationship);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ if (theDefinition.containsKey(PROPERTIES)) {
+ propertiesCommon.checkProperties(
+ (Map<String, Map>) theDefinition.get(PROPERTIES), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(Construct.Relationship, theName, theDefinition,
+ Facet.properties, theContext, catalog);
+ }
+
+ if (theDefinition.containsKey(ATTRIBUTES)) {
+ propertiesCommon.checkProperties(
+ (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext, catalog);
+ facetCommon.checkTypeConstructFacet(Construct.Relationship, theName, theDefinition,
+ Facet.attributes, theContext, catalog);
+ }
+
+ Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get(INTERFACES);
+ if (interfaces != null) {
+ theContext.enter(INTERFACES);
+ for (Iterator<Map.Entry<String, Map>> i =
+ interfaces.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String, Map> e = i.next();
+ interfaceCommon.check_type_interface_definition(
+ e.getKey(), e.getValue(), theContext, catalog);
+ }
+ theContext.exit();
+ }
+
+ if (theDefinition.containsKey(VALID_TARGET_TYPES)) {
+ typeCommon.checkTypeReference(Construct.Capability, theContext , catalog,
+ ((List<String>) theDefinition.get(VALID_TARGET_TYPES)).toArray(EMPTY_STRING_ARRAY));
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkRelationshipTemplateDefinition(
+ String theName,
+ Map theRelationship,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ theContext.enter(theName, Construct.Relationship);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theRelationship, theContext)) {
+ return;
+ }
+
+ if (!typeCommon.checkType(Construct.Relationship, theRelationship, theContext, catalog)) {
+ return;
+ }
+
+ /* check that we operate on properties and attributes within the scope of
+ the specified relationship type */
+ if (!facetCommon.checkFacet(Construct.Relationship, theRelationship,
+ Facet.properties, theContext, catalog)) {
+ return;
+ }
+
+ if (!facetCommon.checkFacet(Construct.Relationship, theRelationship,
+ Facet.attributes, theContext, catalog)) {
+ return;
+ }
+
+ /* interface definitions
+ note: augmentation is allowed here so not clear what to check ..
+ maybe report augmentations if so configured .. */
+
+ } finally {
+ theContext.exit();
+ }
+ }
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/RequirementCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/RequirementCommon.java
new file mode 100644
index 0000000..ff2c2f2
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/RequirementCommon.java
@@ -0,0 +1,464 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.apache.commons.jxpath.JXPathContext;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.checker.*;
+
+import java.util.*;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.*;
+
+public class RequirementCommon extends BaseCommon {
+
+ private static RequirementCommon instance;
+
+ public synchronized static RequirementCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new RequirementCommon();
+ }
+ return instance;
+ }
+
+ private RequirementCommon() {}
+
+ public void check_requirements(List<Map> theDefinition,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ theContext.enter(REQUIREMENTS);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(REQUIREMENTS, theDefinition, theContext)) {
+ return;
+ }
+
+ for (Iterator<Map> i = theDefinition.iterator(); i.hasNext(); ) {
+ Map e = i.next();
+ Iterator<Map.Entry<String, Map>> ei =
+ (Iterator<Map.Entry<String, Map>>) e.entrySet().iterator();
+ Map.Entry<String, Map> eie = ei.next();
+ checkRequirementDefinition(eie.getKey(), eie.getValue(), theContext, catalog);
+ assert !ei.hasNext();
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkRequirementDefinition(String theName,
+ Map theDef,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ theContext.enter(theName, Construct.Requirement);
+
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+ //check capability type
+ String capabilityType = (String) theDef.get(CAPABILITY);
+ if (null != capabilityType) {
+ typeCommon.checkTypeReference(Construct.Capability, theContext, catalog, capabilityType);
+ }
+
+ //check node type
+ String nodeType = (String) theDef.get("node");
+ if (null != nodeType) {
+ typeCommon.checkTypeReference(Construct.Node, theContext, catalog, nodeType);
+ }
+
+ //check relationship type
+ Map relationshipSpec = (Map) theDef.get("relationship");
+ String relationshipType = null;
+ if (null != relationshipSpec) {
+ relationshipType = (String) relationshipSpec.get("type");
+ if (relationshipType != null) { //should always be the case
+ typeCommon.checkTypeReference(Construct.Relationship, theContext, catalog, relationshipType);
+ }
+
+ Map<String, Map> interfaces = (Map<String, Map>)
+ relationshipSpec.get(INTERFACES);
+ if (interfaces != null) {
+ //augmentation (additional properties or operations) of the interfaces
+ //defined by the above relationship types
+
+ //check that the interface types are known
+ for (Map interfaceDef : interfaces.values()) {
+ typeCommon.checkType(Construct.Interface, interfaceDef, theContext, catalog);
+ }
+ }
+ }
+
+ //cross checks
+
+ //the capability definition might come from the capability type or from the capability definition
+ //within the node type. We might have more than one as a node might specify multiple capabilities of the
+ //same type.
+ //the goal here is to cross check the compatibility of the valid_source_types specification in the
+ //target capability definition (if that definition contains a valid_source_types entry).
+ List<Map> capabilityDefs = new LinkedList<>();
+ //nodeType exposes capabilityType
+ if (nodeType != null) {
+ Map<String, Map> capabilities =
+ facetCommon.findTypeFacetByType(Construct.Node, nodeType,
+ Facet.capabilities, capabilityType, catalog);
+ if (capabilities.isEmpty()) {
+ theContext.addError("The node type " + nodeType + " does not appear to expose a capability of a type compatible with " + capabilityType, null);
+ } else {
+ for (Map.Entry<String, Map> capability : capabilities.entrySet()) {
+ //this is the capability as it was defined in the node type
+ Map capabilityDef = capability.getValue();
+ //if it defines a valid_source_types then we're working with it,
+ //otherwise we're working with the capability type it points to.
+ //The spec does not make it clear if the valid_source_types in a capability definition augments or
+ //overwrites the one from the capabilityType (it just says they must be compatible).
+ if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) {
+ capabilityDefs.add(capabilityDef);
+ } else {
+ capabilityDef =
+ catalog.getTypeDefinition(Construct.Capability, (String) capabilityDef.get("type"));
+ if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) {
+ capabilityDefs.add(capabilityDef);
+ } else {
+ //!!if there is a capability that does not have a valid_source_type than there is no reason to
+ //make any further verification (as there is a valid node_type/capability target for this requirement)
+ capabilityDefs.clear();
+ break;
+ }
+ }
+ }
+ }
+ } else {
+ Map capabilityDef = catalog.getTypeDefinition(Construct.Capability, capabilityType);
+ if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) {
+ capabilityDefs.add(capabilityDef);
+ }
+ }
+
+ //check that the node type enclosing this requirement definition
+ //is in the list of valid_source_types
+ if (!capabilityDefs.isEmpty()) {
+ String enclosingNodeType =
+ theContext.enclosingConstruct(Construct.Node);
+ assert enclosingNodeType != null;
+
+ if (!capabilityDefs.stream().anyMatch(
+ (Map capabilityDef) -> {
+ List<String> valid_source_types =
+ (List<String>) capabilityDef.get(VALID_SOURCE_TYPES);
+ return valid_source_types.stream().anyMatch(
+ (String source_type) -> catalog.isDerivedFrom(
+ Construct.Node, enclosingNodeType, source_type));
+ })) {
+ theContext.addError("Node type: " + enclosingNodeType + " not compatible with any of the valid_source_types provided in the definition of compatible capabilities", null);
+ }
+ }
+
+ //if we have a relationship type, check if it has a valid_target_types
+ //if it does, make sure that the capability type is compatible with one
+ //of them
+ if (relationshipType != null) { //should always be the case
+ Map relationshipTypeDef = catalog.getTypeDefinition(
+ Construct.Relationship, relationshipType);
+ if (relationshipTypeDef != null) {
+ List<String> valid_target_types =
+ (List<String>) relationshipTypeDef.get(VALID_TARGET_TYPES);
+ if (valid_target_types != null) {
+ boolean found = false;
+ for (String target_type : valid_target_types) {
+ if (catalog.isDerivedFrom(
+ Construct.Capability, capabilityType, target_type)) {
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ theContext.addError("Capability type: " + capabilityType + " not compatible with any of the valid_target_types " + valid_target_types + " provided in the definition of relationship type " + relationshipType, null);
+ }
+ }
+ }
+ }
+
+ //relationship declares the capabilityType in its valid_target_type set
+ //in A.6.9 'Relationship Type' the spec does not indicate how inheritance
+ //is to be applied to the valid_target_type spec: cumulative, overwrites,
+ //so we treat it as an overwrite.
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ //requirements and capabilities assignment appear in a node templates
+ public void checkRequirementsAssignmentDefinition(
+ List<Map> theRequirements, Checker.CheckContext theContext, Catalog catalog) {
+ NodeCommon nodeCommon = NodeCommon.getInstance();
+ theContext.enter(REQUIREMENTS);
+ try {
+ if (!CheckCommon.getInstance().checkDefinition(REQUIREMENTS, theRequirements, theContext)) {
+ return;
+ }
+
+ //the node type for the node template enclosing these requirements
+ String nodeType = (String) catalog.getTemplate(
+ theContext.target(),
+ Construct.Node,
+ theContext.enclosingConstruct(Construct.Node))
+ .get("type");
+
+ for (Iterator<Map> ri = theRequirements.iterator(); ri.hasNext(); ) {
+ Map<String, Map> requirement = (Map<String, Map>) ri.next();
+
+ Iterator<Map.Entry<String, Map>> rai = requirement.entrySet().iterator();
+
+ Map.Entry<String, Map> requirementEntry = rai.next();
+ assert !rai.hasNext();
+
+ String requirementName = requirementEntry.getKey();
+ Map requirementDef = nodeCommon.findNodeTypeRequirementByName(
+ nodeType, requirementName, catalog);
+
+ if (requirementDef == null) {
+ theContext.addError("No requirement " + requirementName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null);
+ continue;
+ }
+
+ checkRequirementAssignmentDefinition(
+ requirementName, requirementEntry.getValue(), requirementDef, theContext, catalog);
+ }
+ } finally {
+ theContext.exit();
+ }
+ }
+
+ public void checkRequirementAssignmentDefinition(
+ String theRequirementName,
+ Map theAssignment,
+ Map theDefinition,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+ TypeCommon typeCommon = TypeCommon.getInstance();
+ FacetCommon facetCommon = FacetCommon.getInstance();
+ theContext//.enter("requirement_assignment")
+ .enter(theRequirementName, Construct.Requirement);
+
+ //grab the node type definition to verify compatibility
+
+ try {
+ //node assignment
+ boolean targetNodeIsTemplate = false;
+ String targetNode = (String) theAssignment.get("node");
+ if (targetNode == null) {
+ targetNode = (String) theDefinition.get("node");
+ //targetNodeIsTemplate stays false, targetNode must be a type
+ } else {
+ //the value must be a node template or a node type
+ targetNodeIsTemplate = typeCommon.isTemplateReference(
+ Construct.Node, theContext, targetNode, catalog);
+ if ((!targetNodeIsTemplate) && (!typeCommon.isTypeReference(Construct.Node, targetNode, catalog))){
+ theContext.addError("The 'node' entry must contain a reference to a node template or node type, '" + targetNode + IS_NONE_OF_THOSE, null);
+ return;
+ }
+
+ //additional checks
+ String targetNodeDef = (String) theDefinition.get("node");
+ if (targetNodeDef != null && targetNode != null) {
+ if (targetNodeIsTemplate) {
+ //if the target is node template, it must be compatible with the
+ //node type specification in the requirement defintion
+ String targetNodeType = (String)
+ catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type");
+ if (!catalog.isDerivedFrom(
+ Construct.Node, targetNodeType, targetNodeDef)) {
+ theContext.addError("The required target node type '" + targetNodeType + "' of target node " + targetNode + " is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null);
+ return;
+ }
+ } else {
+ //if the target is a node type it must be compatible (= or derived
+ //from) with the node type specification in the requirement definition
+ if (!catalog.isDerivedFrom(
+ Construct.Node, targetNode, targetNodeDef)) {
+ theContext.addError("The required target node type '" + targetNode + "' is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null);
+ return;
+ }
+ }
+ }
+ }
+
+ String targetNodeType = targetNodeIsTemplate ?
+ (String) catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type") :
+ targetNode;
+
+ //capability assignment
+ boolean targetCapabilityIsType = false;
+ String targetCapability = (String) theAssignment.get(CAPABILITY);
+ if (targetCapability == null) {
+ targetCapability = (String) theDefinition.get(CAPABILITY);
+ //in a requirement definition the target capability can only be a
+ //capability type (and not a capability name within some target node
+ //type)
+ targetCapabilityIsType = true;
+ } else {
+ targetCapabilityIsType = typeCommon.isTypeReference(Construct.Capability, targetCapability, catalog);
+
+ //check compatibility with the target compatibility type specified
+ //in the requirement definition, if any
+ String targetCapabilityDef = (String) theDefinition.get(CAPABILITY);
+ if (targetCapabilityDef != null && targetCapability != null) {
+ if (targetCapabilityIsType) {
+ if (!catalog.isDerivedFrom(
+ Construct.Capability, targetCapability, targetCapabilityDef)) {
+ theContext.addError("The required target capability type '" + targetCapability + "' is not compatible with the target capability type found in the requirement definition: " + targetCapabilityDef, null);
+ return;
+ }
+ } else {
+ //the capability is from a target node. Find its definition and
+ //check that its type is compatible with the capability type
+ //from the requirement definition
+
+ //check target capability compatibility with target node
+ if (targetNode == null) {
+ theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', which was not specified", null);
+ return;
+ }
+ if (!targetNodeIsTemplate) {
+ theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', but there you specified a node type", null);
+ return;
+ }
+ //check that the targetNode (its type) indeed has the
+ //targetCapability
+
+ Map<String, Object> targetNodeCapabilityDef =
+ facetCommon.findTypeFacetByName(
+ Construct.Node, targetNodeType,
+ Facet.capabilities, targetCapability, catalog);
+ if (targetNodeCapabilityDef == null) {
+ theContext.addError("No capability '" + targetCapability + "' was specified in the node " + targetNode + " of type " + targetNodeType, null);
+ return;
+ }
+
+ String targetNodeCapabilityType = (String) targetNodeCapabilityDef.get("type");
+
+ if (!catalog.isDerivedFrom(Construct.Capability,
+ targetNodeCapabilityType,
+ targetCapabilityDef)) {
+ theContext.addError("The required target capability type '" + targetCapabilityDef + "' is not compatible with the target capability type found in the target node type capability definition : " + targetNodeCapabilityType + ", targetNode " + targetNode + ", capability name " + targetCapability, null);
+ return;
+ }
+ }
+ }
+ }
+
+ //relationship assignment
+ Map targetRelationship = (Map) theAssignment.get("relationship");
+ if (targetRelationship != null) {
+ //this has to be compatible with the relationship with the same name
+ //from the node type
+ //check the type
+ }
+
+ //node_filter; used jxpath to simplify the navigation somewhat
+ //this is too cryptic
+ JXPathContext jxPath = JXPathContext.newContext(theAssignment);
+ jxPath.setLenient(true);
+
+ List<Map> propertiesFilter =
+ (List<Map>) jxPath.getValue("/node_filter/properties");
+ if (propertiesFilter != null) {
+ for (Map propertyFilter : propertiesFilter) {
+ if (targetNode != null) {
+ //if we have a target node or node template then it must have
+ //have these properties
+ for (Object propertyName : propertyFilter.keySet()) {
+ if (null == facetCommon.findTypeFacetByName(Construct.Node,
+ targetNodeType,
+ Facet.properties,
+ propertyName.toString(),
+ catalog)) {
+ theContext.addError("The node_filter property " + propertyName + " is invalid: requirement target node " + targetNode + " does not have such a property", null);
+ }
+ }
+ }
+ }
+ }
+
+ List<Map> capabilitiesFilter =
+ (List<Map>) jxPath.getValue("node_filter/capabilities");
+ if (capabilitiesFilter != null) {
+ for (Map capabilityFilterDef : capabilitiesFilter) {
+ assert capabilityFilterDef.size() == 1;
+ Map.Entry<String, Map> capabilityFilterEntry =
+ (Map.Entry<String, Map>) capabilityFilterDef.entrySet().iterator().next();
+ String targetFilterCapability = capabilityFilterEntry.getKey();
+ Map<String, Object> targetFilterCapabilityDef = null;
+
+ //if we have a targetNode capabilityName must be a capability of
+ //that node (type); or it can be simply capability type (but the node
+ //must have a capability of that type)
+
+ String targetFilterCapabilityType = null;
+ if (targetNode != null) {
+ targetFilterCapabilityDef =
+ facetCommon.findTypeFacetByName(Construct.Node, targetNodeType,
+ Facet.capabilities, targetFilterCapability, catalog);
+ if (targetFilterCapabilityDef != null) {
+ targetFilterCapabilityType =
+ (String) targetFilterCapabilityDef/*.values().iterator().next()*/.get("type");
+ } else {
+ Map<String, Map> targetFilterCapabilities =
+ facetCommon.findTypeFacetByType(Construct.Node, targetNodeType,
+ Facet.capabilities, targetFilterCapability, catalog);
+
+ if (!targetFilterCapabilities.isEmpty()) {
+ if (targetFilterCapabilities.size() > 1) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "checkRequirementAssignmentDefinition: filter check, target node type '{}' has more than one capability of type '{}', not supported", targetNodeType, targetFilterCapability);
+ }
+ //pick the first entry, it represents a capability of the required type
+ Map.Entry<String, Map> capabilityEntry = targetFilterCapabilities.entrySet().iterator().next();
+ targetFilterCapabilityDef = Collections.singletonMap(capabilityEntry.getKey(),
+ capabilityEntry.getValue());
+ targetFilterCapabilityType = targetFilterCapability;
+ }
+ }
+ } else {
+ //no node (type) specified, it can be a straight capability type
+ targetFilterCapabilityDef = catalog.getTypeDefinition(
+ Construct.Capability, targetFilterCapability);
+ //here comes the odd part: it can still be a just a name in which
+ //case we should look at the requirement definition, see which
+ //capability (type) it indicates
+ assert targetCapabilityIsType; //cannot be otherwise, we'd need a node
+ targetFilterCapabilityDef = catalog.getTypeDefinition(
+ Construct.Capability, targetCapability);
+ targetFilterCapabilityType = targetCapability;
+ }
+
+ if (targetFilterCapabilityDef == null) {
+ theContext.addError("Capability (name or type) " + targetFilterCapability + " is invalid: not a known capability (type) " +
+ ((targetNodeType != null) ? (" of node type" + targetNodeType) : ""), null);
+ continue;
+ }
+
+ for (Map propertyFilter :
+ (List<Map>) jxPath.getValue("/node_filter/capabilities/" + targetFilterCapability + "/properties")) {
+ //check that the properties are in the scope of the
+ //capability definition
+ for (Object propertyName : propertyFilter.keySet()) {
+ if (null == facetCommon.findTypeFacetByName(Construct.Capability,
+ targetCapability,
+ Facet.properties,
+ propertyName.toString(),
+ catalog)) {
+ theContext.addError("The capability filter " + targetFilterCapability + " property " + propertyName + " is invalid: target capability " + targetFilterCapabilityType + " does not have such a property", null);
+ }
+ }
+ }
+ }
+ }
+
+ } finally {
+ theContext.exit();
+ }
+ }
+
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/TypeCommon.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/TypeCommon.java
new file mode 100644
index 0000000..9fb4226
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/common/TypeCommon.java
@@ -0,0 +1,115 @@
+package org.onap.sdc.dcae.checker.common;
+
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.checker.*;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import static org.onap.sdc.dcae.checker.common.ConstCommon.*;
+
+public class TypeCommon extends BaseCommon {
+
+ private static TypeCommon instance;
+
+ public synchronized static TypeCommon getInstance() {
+ if (instance == null)
+ {
+ instance = new TypeCommon();
+ }
+ return instance;
+ }
+
+ private TypeCommon() {}
+
+ public boolean catalogTypes(Construct theConstruct, Map<String, Map> theTypes, Checker.CheckContext theContext, Catalog catalog) {
+
+ boolean res = true;
+ for (Map.Entry<String, Map> typeEntry : theTypes.entrySet()) {
+ res &= catalogType(theConstruct, typeEntry.getKey(), typeEntry.getValue(), theContext, catalog);
+ }
+
+ return res;
+ }
+
+ public boolean catalogType(Construct theConstruct,
+ String theName,
+ Map theDef,
+ Checker.CheckContext theContext,
+ Catalog catalog) {
+
+ if (!catalog.addType(theConstruct, theName, theDef)) {
+ theContext.addError(theConstruct + TYPE + theName + " re-declaration", null);
+ return false;
+ }
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {} has been cataloged", theConstruct, theName);
+
+ String parentType = (String) theDef.get("derived_from");
+ if (parentType != null && !catalog.hasType(theConstruct, parentType)) {
+ theContext.addError(
+ theConstruct + TYPE + theName + " indicates a supertype that has not (yet) been declared: " + parentType, null);
+ return false;
+ }
+ return true;
+ }
+
+ public boolean checkTypeReference(Construct theConstruct,
+ Checker.CheckContext theContext,
+ Catalog catalog,
+ String... theTypeNames) {
+ boolean res = true;
+ for (String typeName : theTypeNames) {
+ if (!isTypeReference(theConstruct, typeName, catalog)) {
+ theContext.addError("Reference to " + theConstruct + " type '" + typeName + "' points to unknown type", null);
+ res = false;
+ }
+ }
+ return res;
+ }
+
+ public boolean isTypeReference(Construct theConstruct,
+ String theTypeName, Catalog catalog) {
+ return catalog.hasType(theConstruct, theTypeName);
+ }
+
+ //generic checking actions, not related to validation rules
+
+ /* will check the validity of the type specification for any construct containing a 'type' entry */
+ public boolean checkType(Construct theCategory, Map theSpec, Checker.CheckContext theContext, Catalog catalog) {
+ String type = (String) theSpec.get("type");
+ if (type == null) {
+ theContext.addError("Missing type specification", null);
+ return false;
+ }
+
+ if (!catalog.hasType(theCategory, type)) {
+ theContext.addError(UNKNOWN + theCategory + " type: " + type, null);
+ return false;
+ }
+
+ return true;
+ }
+
+ /* node or relationship templates */
+ public boolean checkTemplateReference(Construct theConstruct,
+ Checker.CheckContext theContext,
+ Catalog catalog,
+ String... theTemplateNames) {
+ boolean res = true;
+ for (String templateName : theTemplateNames) {
+ if (!isTemplateReference(theConstruct, theContext, templateName, catalog)) {
+ theContext.addError("Reference to " + theConstruct + " template '" + templateName + "' points to unknown template", null);
+ res = false;
+ }
+ }
+ return res;
+ }
+
+ public boolean isTemplateReference(Construct theConstruct,
+ Checker.CheckContext theContext,
+ String theTemplateName,
+ Catalog catalog) {
+ return catalog.hasTemplate(theContext.target(), theConstruct, theTemplateName);
+ }
+}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/validation/TOSCAValidator.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/validation/TOSCAValidator.java
new file mode 100644
index 0000000..38bd84f
--- /dev/null
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/validation/TOSCAValidator.java
@@ -0,0 +1,115 @@
+package org.onap.sdc.dcae.checker.validation;
+
+import kwalify.Rule;
+import kwalify.Types;
+import kwalify.Validator;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+import org.onap.sdc.dcae.checker.IChecker;
+import org.onap.sdc.dcae.checker.Target;
+
+import java.util.*;
+
+public class TOSCAValidator extends Validator {
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+ private final IChecker checker;
+ //what were validating
+ private Target target;
+
+ /* Some of the TOSCA entries accept a 'short form/notation' instead of the canonical map representation.
+ * kwalify cannot easily express these alternatives and as such we handle them here. In the pre-validation phase we detect the presence of a short notation
+and compute the canonical form and validate it. In the post-validation phase we
+substitute the canonical form for the short form so that checking does not have to deal with it.
+ */
+
+ public Map<String, Object> getCanonicals() {
+ return canonicals;
+ }
+
+ private Map<String, Object> canonicals = new TreeMap<>();
+
+ public TOSCAValidator(Target theTarget, Object theSchema, IChecker checker) {
+ super(theSchema);
+ this.checker = checker;
+ this.target = theTarget;
+ }
+
+ public Target getTarget() {
+ return this.target;
+ }
+
+ /* hook method called by Validator#validate()
+ */
+ @Override
+ protected boolean preValidationHook(Object value, Rule rule, ValidationContext context) {
+
+ checker.validationHook("pre", value, rule, context);
+ //short form handling
+ String hint = rule.getShort();
+ if (value != null &&
+ hint != null) {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Attempting canonical at {}, rule {}", context.getPath(), rule.getName());
+
+ Object canonical = null;
+ //if the canonical form requires a collection
+ if (Types.isCollectionType(rule.getType())) {
+ //and the actual value isn't one
+ if (!(value instanceof Map || value instanceof List)) {
+ //used to use singleton map/list here (was good for catching errors)
+ //but there is the possibility if short forms within short forms so
+ //the created canonicals need to accomodate other values.
+ if (Types.isMapType(rule.getType())) {
+ canonical = new HashMap();
+ ((Map) canonical).put(hint, value);
+ } else {
+ //the hint is irrelevant here but we should impose a value when the target is a list
+ canonical = new LinkedList();
+ ((List) canonical).add(value);
+ }
+ } else {
+ //we can accomodate:
+ // map to list of map transformation
+ if (!Types.isMapType(rule.getType()) /* a seq */ &&
+ value instanceof Map) {
+ canonical = new LinkedList();
+ ((List) canonical).add(value);
+ } else {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType());
+ return false;
+ }
+ }
+
+ int errc = context.errorCount();
+ validateRule(canonical, rule, context);
+ if (errc != context.errorCount()) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} failed validation", rule.getName(), hint, context.getPath());
+ } else {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} passed validation. Canonical form is {}", rule.getName(), hint, context.getPath(), canonical);
+ //replace the short notation with the canonicall one so we don't
+ //have to deal it again during checking
+ this.canonicals.put(context.getPath(), canonical);
+ return true;
+ }
+ } else {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType());
+ }
+ }
+
+ //perform default validation process
+ return false;
+ }
+
+ /*
+ * Only gets invoked once the value was succesfully verified against the syntax indicated by the given rule.
+ */
+ @Override
+ protected void postValidationHook(Object value,
+ Rule rule,
+ ValidationContext context) {
+ checker.validationHook("post", value, rule, context);
+ }
+
+} \ No newline at end of file