summaryrefslogtreecommitdiffstats
path: root/dcaedt_validator/checker
diff options
context:
space:
mode:
authorStone, Avi (as206k) <as206k@att.com>2018-05-23 11:21:11 +0300
committerStone, Avi (as206k) <as206k@att.com>2018-05-23 11:30:13 +0300
commit3e4c18770957b55e2f80da32c3a32caa908f1386 (patch)
tree8a94c656300e75e38febfe9826ad36fc54fe14f5 /dcaedt_validator/checker
parentda9db1b89e8c9199da4791a2ccd26d1628120a08 (diff)
Upgrade dt-be-main
Update sources for dcae-dt-be-main to latest version Change-Id: I3d58a2dc32611c0ca90f1c97e1294a17d5748623 Issue-ID: SDC-1359 Signed-off-by: Stone, Avi (as206k) <as206k@att.com>
Diffstat (limited to 'dcaedt_validator/checker')
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java722
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java89
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java245
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java8
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java234
5 files changed, 622 insertions, 676 deletions
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java
index 1512e56..bdddce3 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java
@@ -23,7 +23,6 @@ import com.google.common.collect.Iterators;
import com.google.common.collect.Table;
import com.google.common.collect.HashBasedTable;
import org.onap.sdc.common.onaplog.OnapLoggerDebug;
-import org.onap.sdc.common.onaplog.OnapLoggerError;
import org.onap.sdc.common.onaplog.Enums.LogLevel;
/*
@@ -33,250 +32,249 @@ import org.onap.sdc.common.onaplog.Enums.LogLevel;
*/
public class Catalog {
- private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
- private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+ private static final String DERIVED_FROM = "derived_from";
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
- /* Type hierarchies are stored as maps from a type name to its definition
+
+ /* tracks imports, i.e.targets */
+ private LinkedHashMap<URI, Target> targets =
+ new LinkedHashMap<>();
+ /* tracks dependencies between targets, i.e. the 'adjency' matrix defined by
+ * the 'import' relationship */
+ private Table<Target,Target,Boolean> imports = HashBasedTable.create();
+
+
+ /* Type hierarchies are stored as maps from a type name to its definition
* Not the best but easy to follow hierarchies towards their root ..
*/
private EnumMap<Construct, Map<String,Map>> types =
- new EnumMap<Construct, Map<String,Map>>(Construct.class);
- /* track templates: we track templates (tye instances) first per target then per contruct.
- * This allows us to share the catalog among multiple templates sharign the same type set
+ new EnumMap<>(Construct.class);
+ /* track templates: we track templates (tye instances) first per target then per contruct.
+ * This allows us to share the catalog among multiple templates sharign the same type set
*/
private Map<Target, EnumMap<Construct, Map<String,Map>>> templates =
- new HashMap<Target, EnumMap<Construct, Map<String,Map>>>();
-
- private Catalog parent;
-
- public Catalog(Catalog theParent) {
- this.parent = theParent;
- /* there are no requirement types, they are the same as capability types */
- types.put(Construct.Data, new LinkedHashMap<String, Map>());
- types.put(Construct.Capability, new LinkedHashMap<String, Map>());
- types.put(Construct.Relationship, new LinkedHashMap<String, Map>());
- types.put(Construct.Artifact, new LinkedHashMap<String, Map>());
- types.put(Construct.Interface, new LinkedHashMap<String, Map>());
- types.put(Construct.Node, new LinkedHashMap<String, Map>());
- types.put(Construct.Group, new LinkedHashMap<String, Map>());
- types.put(Construct.Policy, new LinkedHashMap<String, Map>());
+ new HashMap<>();
+
+ private Catalog parent;
+
+ public Catalog(Catalog theParent) {
+ this.parent = theParent;
+ /* there are no requirement types, they are the same as capability types */
+ types.put(Construct.Data, new LinkedHashMap<>());
+ types.put(Construct.Capability, new LinkedHashMap<>());
+ types.put(Construct.Relationship, new LinkedHashMap<>());
+ types.put(Construct.Artifact, new LinkedHashMap<>());
+ types.put(Construct.Interface, new LinkedHashMap<>());
+ types.put(Construct.Node, new LinkedHashMap<>());
+ types.put(Construct.Group, new LinkedHashMap<>());
+ types.put(Construct.Policy, new LinkedHashMap<>());
- }
-
- public Catalog() {
- this(null);
- }
-
- public boolean addType(Construct theConstruct, String theName, Map theDef) {
- if (hasType(theConstruct, theName)) {
- return false;
- }
- getConstructTypes(theConstruct).put(theName, theDef);
- return true;
+ }
+
+ public Catalog() {
+ this(null);
+ }
+
+ public boolean addType(Construct theConstruct, String theName, Map theDef) {
+ if (hasType(theConstruct, theName)) {
+ return false;
+ }
+ getConstructTypes(theConstruct).put(theName, theDef);
+ return true;
}
- public Map getTypeDefinition(Construct theConstruct, String theName) {
- Map<String, Map> constructTypes = getConstructTypes(theConstruct);
- Map typeDef = constructTypes.get(theName);
- if (typeDef == null && this.parent != null) {
- return this.parent.getTypeDefinition(theConstruct, theName);
- }
- return typeDef;
- }
+ public Map getTypeDefinition(Construct theConstruct, String theName) {
+ Map<String, Map> constructTypes = getConstructTypes(theConstruct);
+ Map typeDef = constructTypes.get(theName);
+ if (typeDef == null && this.parent != null) {
+ return this.parent.getTypeDefinition(theConstruct, theName);
+ }
+ return typeDef;
+ }
public boolean hasType(Construct theConstruct, String theName) {
- Map<String, Map> constructTypes = getConstructTypes(theConstruct);
- boolean res = constructTypes.containsKey(theName);
- if (!res && this.parent != null) {
- res = this.parent.hasType(theConstruct, theName);
- }
- return res;
- }
-
- protected Map<String, Map> getConstructTypes(Construct theConstruct) {
- Map<String, Map> constructTypes = this.types.get(theConstruct);
- if (null == constructTypes) {
- throw new RuntimeException("Something worse is cooking here!",
- new CatalogException("No types for construct " + theConstruct));
- }
- return constructTypes;
- }
-
- protected Iterator<Map.Entry<String,Map>>
- typesIterator(Construct theConstruct) {
- List<Map.Entry<String,Map>> constructTypes =
- new ArrayList<Map.Entry<String,Map>>(
- this.types.get(theConstruct).entrySet());
- Collections.reverse(constructTypes);
- return (this.parent == null)
- ? constructTypes.iterator()
- : Iterators.concat(constructTypes.iterator(),
- this.parent.typesIterator(theConstruct));
- }
-
- /* this will iterate through the type hierarchy for the given type, included.
- */
- public Iterator<Map.Entry<String,Map>>
- hierarchy(Construct theConstruct, final String theName) {
- return Iterators.filter(typesIterator(theConstruct),
+ Map<String, Map> constructTypes = getConstructTypes(theConstruct);
+ boolean res = constructTypes.containsKey(theName);
+ if (!res && this.parent != null) {
+ res = this.parent.hasType(theConstruct, theName);
+ }
+ return res;
+ }
+
+ protected Map<String, Map> getConstructTypes(Construct theConstruct) {
+ Map<String, Map> constructTypes = this.types.get(theConstruct);
+ if (null == constructTypes) {
+ throw new RuntimeException("Something worse is cooking here!",
+ new CatalogException("No types for construct " + theConstruct));
+ }
+ return constructTypes;
+ }
+
+ private Iterator<Map.Entry<String,Map>>
+ typesIterator(Construct theConstruct) {
+ List<Map.Entry<String,Map>> constructTypes =
+ new ArrayList<>(
+ this.types.get(theConstruct).entrySet());
+ Collections.reverse(constructTypes);
+ return (this.parent == null)
+ ? constructTypes.iterator()
+ : Iterators.concat(constructTypes.iterator(),
+ this.parent.typesIterator(theConstruct));
+ }
+
+
+ // this will iterate through the type hierarchy for the given type, included.
+ public Iterator<Map.Entry<String,Map>>
+ hierarchy(Construct theConstruct, final String theName) {
+ return Iterators.filter(typesIterator(theConstruct),
new Predicate<Map.Entry<String,Map>>() {
Object next = theName;
public boolean apply(Map.Entry<String,Map> theEntry) {
if (next != null && next.equals(theEntry.getKey())) {
- next = theEntry.getValue().get("derived_from");
+ next = theEntry.getValue().get(DERIVED_FROM);
return true;
+ } else {
+ return false;
}
- else
- return false;
}
});
}
- public boolean isDerivedFrom(Construct theConstruct, String theType, String theBaseType) {
-
- Iterator<Map.Entry<String,Map>> hierachyIterator =
- hierarchy(theConstruct, theType);
- while (hierachyIterator.hasNext()) {
- Map.Entry<String,Map> typeDef = hierachyIterator.next();
-
- if (typeDef.getKey().equals(theBaseType)) {
- return true;
- }
- }
- return false;
- }
-
- /* We go over the type hierarchy and retain only an iterator over the
- * elements of the given facet for each type in the hierarchy.
- * We concatenate these iterators and filter out duplicates.
- * TODO: cannot just filter out duplicates - a redefinition can refine the one in the base construct so we
- * should merge them!
- */
- public Iterator<Map.Entry> facets(Construct theConstruct,
- final Facet theFacet,
- final String theName) {
- return
- Iterators.filter(
- Iterators.concat(
- Iterators.transform(
- hierarchy(theConstruct, theName),
- new Function<Map.Entry<String,Map>, Iterator<Map.Entry>>() {
- public Iterator<Map.Entry> apply(Map.Entry<String,Map> theEntry) {
- Map m = (Map)theEntry.getValue().get(theFacet.name());
- return m == null
- ? Collections.emptyIterator()
- : m.entrySet().iterator();
- }
- }
- )
- ),
+ public boolean isDerivedFrom(Construct theConstruct, String theType, String theBaseType) {
+
+ Iterator<Map.Entry<String,Map>> hierachyIterator =
+ hierarchy(theConstruct, theType);
+ while (hierachyIterator.hasNext()) {
+ Map.Entry<String,Map> typeDef = hierachyIterator.next();
+
+ if (typeDef.getKey().equals(theBaseType)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /* We go over the type hierarchy and retain only an iterator over the
+ * elements of the given facet for each type in the hierarchy.
+ * We concatenate these iterators and filter out duplicates.
+ * TODO: cannot just filter out duplicates - a redefinition can refine the one in the base construct so we
+ * should merge them!
+ */
+ public Iterator<Map.Entry> facets(Construct theConstruct, final Facet theFacet, final String theName) {
+ return
+ Iterators.filter(
+ Iterators.concat(
+ Iterators.transform(
+ hierarchy(theConstruct, theName),
+ (Function<Map.Entry<String, Map>, Iterator<Map.Entry>>) theEntry -> {
+ Map m = (Map)theEntry.getValue().get(theFacet.name());
+ return m == null
+ ? Collections.emptyIterator()
+ : m.entrySet().iterator();
+ }
+ )
+ ),
new Predicate<Map.Entry>() {
- Set insts = new HashSet();
- public boolean apply(Map.Entry theEntry) {
- return !insts.contains(theEntry.getKey());
- }
- }
- );
- }
-
- //no need to specify a construct, only nodes can have requirements
- public Iterator<Map.Entry> requirements(final String theName) {
- return
- Iterators.concat(
- Iterators.transform(
- hierarchy(Construct.Node, theName),
- new Function<Map.Entry<String,Map>, Iterator<Map.Entry>>() {
- public Iterator<Map.Entry> apply(Map.Entry<String,Map> theEntry) {
- List<Map> l = (List<Map>)theEntry.getValue().get("requirements");
- return l == null
- ? Collections.emptyIterator()
- : Iterators.concat(
- Iterators.transform(
- l.iterator(),
- new Function<Map, Iterator<Map.Entry>> () {
- public Iterator<Map.Entry> apply(Map theEntry) {
- return theEntry.entrySet().iterator();
- }
- }
- )
- );
- }
- }
- )
- );
- }
-
- /* Example: find the definition of property 'port' of the node type
- * tosca.nodes.Database (properties being a facet of the node construct)
- *
- * Note: the definition of a facet is cumulative, i.e. more specialized
- * definitions contribute (by overwriting) to the
- */
- public Map getFacetDefinition(Construct theConstruct,
- String theConstructTypeName,
- Facet theFacet,
- String theName) {
- Map def = null;
- Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName);
- while (ti.hasNext()) {
- //this is where requirements would yield a List ..
- Map<String,Map> fset = (Map<String,Map>)ti.next().getValue().get(theFacet.name());
- if (fset != null) {
- def = def == null ? fset.get(theName)
- : mergeDefinitions(def, fset.get(theName));
- }
- }
- return def;
- }
-
- public Map getRequirementDefinition(Construct theConstruct,
- String theConstructTypeName,
- String theName) {
- Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName);
- while (ti.hasNext()) {
- //this is where requirements yield a List ..
- List<Map> reqs = (List<Map>)ti.next().getValue().get("requirements");
-
- if(reqs!=null){
- for (Map req: reqs) {
- Map.Entry reqe = (Map.Entry)req.entrySet().iterator().next();
- if (theName.equals(reqe.getKey())) {
- return (Map)reqe.getValue();
- }
- }
- }else{
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Avoiding adding requirment block since it doesn't exists on the template....");
- }
- }
- return null;
- }
+ Set insts = new HashSet();
+ public boolean apply(Map.Entry theEntry) {
+ return !insts.contains(theEntry.getKey());
+ }
+ }
+ );
+ }
+
+ //no need to specify a construct, only nodes can have requirements
+ public Iterator<Map.Entry> requirements(final String theName) {
+ return
+ Iterators.concat(
+ Iterators.transform(
+ hierarchy(Construct.Node, theName),
+ theEntry -> {
+ List<Map> l = (List<Map>)theEntry.getValue().get("requirements");
+ return l == null
+ ? Collections.emptyIterator()
+ : Iterators.concat(
+ Iterators.transform(
+ l.iterator(),
+ (Function<Map, Iterator<Map.Entry>>) theEntry1 -> theEntry1.entrySet().iterator()
+ )
+ );
+ }
+ )
+ );
+ }
+
+ /* Example: find the definition of property 'port' of the node type
+ * tosca.nodes.Database (properties being a facet of the node construct)
+ *
+ * Note: the definition of a facet is cumulative, i.e. more specialized
+ * definitions contribute (by overwriting) to the
+ */
+ public Map getFacetDefinition(Construct theConstruct,
+ String theConstructTypeName,
+ Facet theFacet,
+ String theName) {
+ Map def = null;
+ Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName);
+ while (ti.hasNext()) {
+ //this is where requirements would yield a List ..
+ Map<String,Map> fset = (Map<String,Map>)ti.next().getValue().get(theFacet.name());
+ if (fset != null) {
+ def = def == null ? fset.get(theName)
+ : mergeDefinitions(def, fset.get(theName));
+ }
+ }
+ return def;
+ }
+
+ public Map getRequirementDefinition(Construct theConstruct,
+ String theConstructTypeName,
+ String theName) {
+ Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName);
+ while (ti.hasNext()) {
+ //this is where requirements yield a List ..
+ List<Map> reqs = (List<Map>)ti.next().getValue().get("requirements");
+
+ if(reqs!=null) {
+ for (Map req: reqs) {
+ Map.Entry reqe = (Map.Entry)req.entrySet().iterator().next();
+ if (theName.equals(reqe.getKey())) {
+ return (Map)reqe.getValue();
+ }
+ }
+ } else {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Avoiding adding requirment block since it doesn't exists on the template....");
+ }
+ }
+ return null;
+ }
/* */
private EnumMap<Construct,Map<String,Map>> getTemplates(Target theTarget) {
- EnumMap<Construct, Map<String,Map>> targetTemplates = templates.get(theTarget);
- if (targetTemplates == null) {
- targetTemplates = new EnumMap<Construct,Map<String,Map>>(Construct.class);
- targetTemplates.put(Construct.Data, new LinkedHashMap<String, Map>());
- targetTemplates.put(Construct.Relationship, new LinkedHashMap<String, Map>());
- targetTemplates.put(Construct.Node, new LinkedHashMap<String, Map>());
- targetTemplates.put(Construct.Group, new LinkedHashMap<String, Map>());
- targetTemplates.put(Construct.Policy, new LinkedHashMap<String, Map>());
-
- templates.put(theTarget, targetTemplates);
- }
- return targetTemplates;
- }
-
- public Map<String,Map> getTargetTemplates(Target theTarget, Construct theConstruct) {
- return getTemplates(theTarget).get(theConstruct);
- }
-
- public void addTemplate(Target theTarget, Construct theConstruct, String theName, Map theDef)
- throws CatalogException {
- Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
- if (null == constructTemplates) {
+ EnumMap<Construct, Map<String,Map>> targetTemplates = templates.get(theTarget);
+ if (targetTemplates == null) {
+ targetTemplates = new EnumMap<>(Construct.class);
+ targetTemplates.put(Construct.Data, new LinkedHashMap<>());
+ targetTemplates.put(Construct.Relationship, new LinkedHashMap<>());
+ targetTemplates.put(Construct.Node, new LinkedHashMap<>());
+ targetTemplates.put(Construct.Group, new LinkedHashMap<>());
+ targetTemplates.put(Construct.Policy, new LinkedHashMap<>());
+
+ templates.put(theTarget, targetTemplates);
+ }
+ return targetTemplates;
+ }
+
+ public Map<String,Map> getTargetTemplates(Target theTarget, Construct theConstruct) {
+ return getTemplates(theTarget).get(theConstruct);
+ }
+
+ public void addTemplate(Target theTarget, Construct theConstruct, String theName, Map theDef)
+ throws CatalogException {
+ Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
+ if (null == constructTemplates) {
throw new CatalogException("No such thing as " + theConstruct + " templates");
- }
+ }
if (constructTemplates.containsKey(theName)) {
throw new CatalogException(theConstruct + " template '" + theName + "' re-declaration");
}
@@ -286,159 +284,159 @@ public class Catalog {
public boolean hasTemplate(Target theTarget, Construct theConstruct, String theName) {
Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
return constructTemplates != null &&
- constructTemplates.containsKey(theName);
+ constructTemplates.containsKey(theName);
}
public Map getTemplate(Target theTarget, Construct theConstruct, String theName) {
Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
- if (constructTemplates != null)
- return constructTemplates.get(theName);
- else
- return null;
+ if (constructTemplates != null) {
+ return constructTemplates.get(theName);
+ } else {
+ return null;
+ }
}
- public static Map mergeDefinitions(Map theAggregate, Map theIncrement) {
- if (theIncrement == null)
- return theAggregate;
-
- for(Map.Entry e: (Set<Map.Entry>)theIncrement.entrySet()) {
- theAggregate.putIfAbsent(e.getKey(), e.getValue());
- }
- return theAggregate;
- }
+ public static Map mergeDefinitions(Map theAggregate, Map theIncrement) {
+ if (theIncrement == null) {
+ return theAggregate;
+ }
- /* tracks imports, i.e.targets */
- private LinkedHashMap<URI, Target> targets =
- new LinkedHashMap<URI, Target>();
- /* tracks dependencies between targets, i.e. the 'adjency' matrix defined by
- * the 'import' relationship */
- private Table<Target,Target,Boolean> imports = HashBasedTable.create();
+ for(Map.Entry e: (Set<Map.Entry>)theIncrement.entrySet()) {
+ theAggregate.putIfAbsent(e.getKey(), e.getValue());
+ }
+ return theAggregate;
+ }
- /*
+ /*
* theParent contains an 'include/import' statement pointing to the Target
*/
- public boolean addTarget(Target theTarget, Target theParent) {
- boolean cataloged = targets.containsKey(theTarget.getLocation());
-
- if(!cataloged) {
- targets.put(theTarget.getLocation(), theTarget);
- }
-
- if (theParent != null) {
- imports.put(theParent, theTarget, Boolean.TRUE);
- }
-
- return !cataloged;
- }
-
- public Target getTarget(URI theLocation) {
- return targets.get(theLocation);
- }
-
- public Collection<Target> targets() {
- return targets.values();
- }
-
- /* Targets that no other targets depend on */
- public Collection<Target> topTargets() {
- return targets.values()
- .stream()
- .filter(t -> !imports.containsColumn(t))
- .collect(Collectors.toList());
-
- }
-
- public String importString(Target theTarget) {
- return importString(theTarget, " ");
- }
-
- private String importString(Target theTarget, String thePrefix) {
- StringBuilder sb = new StringBuilder("");
- Map<Target,Boolean> parents = imports.column(theTarget);
- if (parents != null) {
- for (Target p: parents.keySet()) {
- sb.append(thePrefix)
- .append("from ")
- .append(p.getLocation())
- .append("\n")
- .append(importString(p, thePrefix + " "));
- }
- //we only keep the positive relationships
- }
- return sb.toString();
- }
-
- /* */
- private class TargetComparator implements Comparator<Target> {
-
- /* @return 1 if there is a dependency path from TargetOne to TargetTwo, -1 otherwise */
- public int compare(Target theTargetOne, Target theTargetTwo) {
- if (hasPath(theTargetTwo, theTargetOne))
- return -1;
-
- if (hasPath(theTargetOne, theTargetTwo))
- return 1;
-
- return 0;
- }
-
- public boolean hasPath(Target theStart, Target theEnd) {
- Map<Target,Boolean> deps = imports.row(theStart);
- if (deps.containsKey(theEnd))
- return true;
- for (Target dep: deps.keySet()) {
- if (hasPath(dep, theEnd))
- return true;
- }
- return false;
- }
- }
-
- public Collection<Target> sortedTargets() {
- List keys = new ArrayList(this.targets.values());
- Collections.sort(keys, new TargetComparator());
- return keys;
- }
-
- public static void main(String[] theArgs) throws Exception {
-
- Catalog cat = new Catalog();
-
- Target a = new Target("a", new URI("a")),
- b = new Target("b", new URI("b")),
- c = new Target("c", new URI("c")),
- d = new Target("d", new URI("d"));
-
- cat.addTarget(a, null);
- cat.addTarget(b, null);
- cat.addTarget(c, null);
- cat.addTarget(d, null);
-
- cat.addTarget(b, c);
- cat.addTarget(a, c);
- cat.addTarget(c, d);
- cat.addTarget(a, b);
-
- for (Target t: cat.sortedTargets())
- debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), t.toString());
-
- Catalog root = new Catalog();
- root.addType(Construct.Node, "_a", Collections.emptyMap());
- root.addType(Construct.Node, "__a", Collections.singletonMap("derived_from", "_a"));
- root.addType(Construct.Node, "___a", Collections.singletonMap("derived_from", "_a"));
-
- Catalog base = new Catalog(root);
- base.addType(Construct.Node, "_b", Collections.singletonMap("derived_from", "__a"));
- base.addType(Construct.Node, "__b", Collections.singletonMap("derived_from", "_b"));
- base.addType(Construct.Node, "__b_", Collections.singletonMap("derived_from", "_a"));
-
- if (theArgs.length > 0) {
- Iterator<Map.Entry<String, Map>> ti =
- base.hierarchy(Construct.Node, theArgs[0]);
- while (ti.hasNext()) {
+ public boolean addTarget(Target theTarget, Target theParent) {
+ boolean cataloged = targets.containsKey(theTarget.getLocation());
+
+ if(!cataloged) {
+ targets.put(theTarget.getLocation(), theTarget);
+ }
+
+ if (theParent != null) {
+ imports.put(theParent, theTarget, Boolean.TRUE);
+ }
+
+ return !cataloged;
+ }
+
+ public Target getTarget(URI theLocation) {
+ return targets.get(theLocation);
+ }
+
+ public Collection<Target> targets() {
+ return targets.values();
+ }
+
+ /* Targets that no other targets depend on */
+ public Collection<Target> topTargets() {
+ return targets.values()
+ .stream()
+ .filter(t -> !imports.containsColumn(t))
+ .collect(Collectors.toList());
+
+ }
+
+ public String importString(Target theTarget) {
+ return importString(theTarget, " ");
+ }
+
+ private String importString(Target theTarget, String thePrefix) {
+ StringBuilder sb = new StringBuilder("");
+ Map<Target,Boolean> parents = imports.column(theTarget);
+ if (parents != null) {
+ for (Target p: parents.keySet()) {
+ sb.append(thePrefix)
+ .append("from ")
+ .append(p.getLocation())
+ .append("\n")
+ .append(importString(p, thePrefix + " "));
+ }
+ //we only keep the positive relationships
+ }
+ return sb.toString();
+ }
+
+ /* */
+ private class TargetComparator implements Comparator<Target> {
+
+ /* @return 1 if there is a dependency path from TargetOne to TargetTwo, -1 otherwise */
+ public int compare(Target theTargetOne, Target theTargetTwo) {
+ if (hasPath(theTargetTwo, theTargetOne)) {
+ return -1;
+ }
+
+ if (hasPath(theTargetOne, theTargetTwo)) {
+ return 1;
+ }
+
+ return 0;
+ }
+
+ boolean hasPath(Target theStart, Target theEnd) {
+ Map<Target,Boolean> deps = imports.row(theStart);
+ if (deps.containsKey(theEnd)) {
+ return true;
+ }
+ for (Target dep: deps.keySet()) {
+ if (hasPath(dep, theEnd)) {
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+
+ public Collection<Target> sortedTargets() {
+ List keys = new ArrayList(this.targets.values());
+ Collections.sort(keys, new TargetComparator());
+ return keys;
+ }
+
+ public static void main(String[] theArgs) throws Exception {
+
+ Catalog cat = new Catalog();
+
+ Target a = new Target("a", new URI("a")),
+ b = new Target("b", new URI("b")),
+ c = new Target("c", new URI("c")),
+ d = new Target("d", new URI("d"));
+
+ cat.addTarget(a, null);
+ cat.addTarget(b, null);
+ cat.addTarget(c, null);
+ cat.addTarget(d, null);
+
+ cat.addTarget(b, c);
+ cat.addTarget(a, c);
+ cat.addTarget(c, d);
+ cat.addTarget(a, b);
+
+ for (Target t: cat.sortedTargets()) {
+ debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), t.toString());
+ }
+
+ Catalog root = new Catalog();
+ root.addType(Construct.Node, "_a", Collections.emptyMap());
+ root.addType(Construct.Node, "__a", Collections.singletonMap(DERIVED_FROM, "_a"));
+ root.addType(Construct.Node, "___a", Collections.singletonMap(DERIVED_FROM, "_a"));
+
+ Catalog base = new Catalog(root);
+ base.addType(Construct.Node, "_b", Collections.singletonMap(DERIVED_FROM, "__a"));
+ base.addType(Construct.Node, "__b", Collections.singletonMap(DERIVED_FROM, "_b"));
+ base.addType(Construct.Node, "__b_", Collections.singletonMap(DERIVED_FROM, "_a"));
+
+ if (theArgs.length > 0) {
+ Iterator<Map.Entry<String, Map>> ti =
+ base.hierarchy(Construct.Node, theArgs[0]);
+ while (ti.hasNext()) {
debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), "> {}", ti.next().getKey());
- }
- }
- }
+ }
+ }
+ }
}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java
index fee617f..bf1843e 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java
@@ -87,6 +87,7 @@ public class Checker {
private static final String WAS_DEFINED_FOR_THE_NODE_TYPE = " was defined for the node type ";
private static final String UNKNOWN = "Unknown ";
private static final String TYPE = " type ";
+ public static final String IMPORTED_FROM = "',imported from ";
private Target target = null; //what we're validating at the moment
@@ -2404,9 +2405,7 @@ public class Checker {
return false;
}
- for (Iterator<Map.Entry<String, Map>> ai = augs.entrySet().iterator(); ai.hasNext(); ) {
- Map.Entry<String, Map> ae = ai.next();
-
+ for (Map.Entry<String, Map> ae : augs.entrySet()) {
//make sure it was declared by the type
Map facetDef = catalog.getFacetDefinition(theConstruct, theSpecType, theFacet, ae.getKey());
if (facetDef == null) {
@@ -2715,7 +2714,7 @@ public class Checker {
private String patchWhitespaces(String thePath) {
String[] elems = thePath.split("/");
- StringBuffer path = new StringBuffer();
+ StringBuilder path = new StringBuilder();
for (int i = 0; i < elems.length; i++) {
if (spacePattern.matcher(elems[i]).find()) {
path.append("[@name='")
@@ -2836,7 +2835,7 @@ public class Checker {
hookHandler = Invokable.from(m);
} catch (NoSuchMethodException nsmx) {
//that's ok, not every rule has to have a handler
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "That's ok, not every rule has to have a handler. Method name =", theHookName);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "That's ok, not every rule has to have a handler. Method name is:{}. Exception:{}", theHookName,nsmx);
}
if (hookHandler != null) {
@@ -3120,7 +3119,7 @@ substitute the canonical form for the short form so that checking does not have
debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering range_definition {}",
theContext.getPath());
- assert theRule.getType().equals("seq");
+ assert "seq".equals(theRule.getType());
List bounds = (List) theValue;
if (bounds.size() != 2) {
@@ -3148,10 +3147,10 @@ substitute the canonical form for the short form so that checking does not have
* early processing (validation time) of the imports allows us to catalog
* their types before those declared in the main document.
*/
- protected void imports_post_validation_handler(Object theValue, Rule theRule,
- Validator.ValidationContext theContext) {
+ protected void imports_post_validation_handler(Object theValue, Rule theRule, Validator.ValidationContext theContext) {
debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering imports {}", theContext.getPath());
- assert theRule.getType().equals("seq");
+
+ assert "seq".equals(theRule.getType());
Target tgt = ((TOSCAValidator) theContext.getValidator()).getTarget();
@@ -3179,8 +3178,9 @@ substitute the canonical form for the short form so that checking does not have
try {
List<Target> tgtis = parseTarget(tgti);
- if (tgtis.isEmpty())
- continue;
+ if (tgtis.isEmpty()) {
+ continue;
+ }
if (tgtis.size() > 1) {
theContext.addError(
@@ -3191,21 +3191,20 @@ substitute the canonical form for the short form so that checking does not have
tgti = tgtis.get(0);
- // tgti = parseTarget(tgti);
if (tgt.getReport().hasErrors()) {
- theContext.addError("Failure parsing import '" + tgti + "',imported from " + tgt, theRule, null,
+ theContext.addError("Failure parsing import '" + tgti + IMPORTED_FROM + tgt, theRule, null,
null);
continue;
}
validateTarget(tgti);
if (tgt.getReport().hasErrors()) {
- theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule,
+ theContext.addError("Failure validating import '" + tgti + IMPORTED_FROM + tgt, theRule,
null, null);
continue;
}
} catch (CheckerException cx) {
- theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, cx,
+ theContext.addError("Failure validating import '" + tgti + IMPORTED_FROM + tgt, theRule, cx,
null);
}
}
@@ -3222,7 +3221,7 @@ substitute the canonical form for the short form so that checking does not have
Validator.ValidationContext theContext) {
debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering node_templates_post_validation_handler {}",
theContext.getPath());
- assert theRule.getType().equals("map");
+ assert "map".equals(theRule.getType());
Map<String, Map> nodeTemplates = (Map<String, Map>) theValue;
for (Iterator<Map.Entry<String, Map>> i = nodeTemplates.entrySet().iterator(); i.hasNext();) {
Map.Entry<String, Map> node = i.next();
@@ -3267,17 +3266,18 @@ substitute the canonical form for the short form so that checking does not have
private void process(String theProcessorSpec) throws CheckerException {
String[] spec = theProcessorSpec.split(" ");
- if (spec.length == 0)
- throw new IllegalArgumentException("Incomplete processor specification");
+ if (spec.length == 0) {
+ throw new IllegalArgumentException("Incomplete processor specification");
+ }
- Class processorClass = null;
+ Class processorClass;
try {
processorClass = Class.forName(spec[0]);
} catch (ClassNotFoundException cnfx) {
throw new CheckerException("Cannot find processor implementation", cnfx);
}
- Processor proc = null;
+ Processor proc;
try {
proc = (Processor) ConstructorUtils.invokeConstructor(processorClass,
Arrays.copyOfRange(spec, 1, spec.length));
@@ -3296,8 +3296,9 @@ substitute the canonical form for the short form so that checking does not have
return;
}
// check artifact type
- if (!checkType(Construct.Artifact, theDef, theContext))
- return;
+ if (!checkType(Construct.Artifact, theDef, theContext)) {
+ return;
+ }
} finally {
theContext.exit();
}
@@ -3312,8 +3313,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Policy, theName, theDefinition, Facet.properties, theContext);
}
@@ -3347,8 +3348,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Group, theName, theDefinition, Facet.properties, theContext);
}
@@ -3385,8 +3386,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.properties, theContext);
}
@@ -3401,8 +3402,8 @@ substitute the canonical form for the short form so that checking does not have
}
// capabilities
- if (theDefinition.containsKey("capabilities")) {
- check_capabilities((Map<String, Map>) theDefinition.get("capabilities"), theContext);
+ if (theDefinition.containsKey(CAPABILITIES)) {
+ check_capabilities((Map<String, Map>) theDefinition.get(CAPABILITIES), theContext);
}
// interfaces:
@@ -3470,8 +3471,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.properties, theContext);
}
@@ -3490,9 +3491,9 @@ substitute the canonical form for the short form so that checking does not have
theContext.exit();
}
- if (theDefinition.containsKey("valid_target_types")) {
+ if (theDefinition.containsKey(VALID_TARGET_TYPES)) {
checkTypeReference(Construct.Capability, theContext,
- ((List<String>) theDefinition.get("valid_target_types")).toArray(EMPTY_STRING_ARRAY));
+ ((List<String>) theDefinition.get(VALID_TARGET_TYPES)).toArray(EMPTY_STRING_ARRAY));
}
} finally {
theContext.exit();
@@ -3508,8 +3509,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.properties, theContext);
}
@@ -3539,8 +3540,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Data, theName, theDefinition, Facet.properties, theContext);
}
} finally {
@@ -3594,8 +3595,9 @@ substitute the canonical form for the short form so that checking does not have
public void check_attributes(Map<String, Map> theDefinitions, CheckContext theContext) {
theContext.enter("attributes");
try {
- if (!checkDefinition("attributes", theDefinitions, theContext))
- return;
+ if (!checkDefinition("attributes", theDefinitions, theContext)) {
+ return;
+ }
for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) {
Map.Entry<String, Map> e = i.next();
@@ -3625,10 +3627,11 @@ substitute the canonical form for the short form so that checking does not have
}
public void check_properties(Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter("properties");
+ theContext.enter(PROPERTIES);
try {
- if (!checkDefinition("properties", theDefinitions, theContext))
- return;
+ if (!checkDefinition(PROPERTIES, theDefinitions, theContext)) {
+ return;
+ }
for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) {
Map.Entry<String, Map> e = i.next();
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java
index 295a1f2..acc0a4a 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java
@@ -20,125 +20,128 @@ import org.onap.sdc.common.onaplog.Enums.LogLevel;
public class CommonLocator implements TargetLocator {
- private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
- private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
-
- private Set<URI> searchPaths = new LinkedHashSet();
-
- /* will create a locator with 2 default search paths: the file directory
- * from where the app was and the jar from which this checker (actually this
- * class) was loaded */
- public CommonLocator() {
- addSearchPath(
- Paths.get(".").toAbsolutePath().normalize().toUri());
- }
-
- public CommonLocator(String... theSearchPaths) {
- for (String path: theSearchPaths) {
- addSearchPath(path);
- }
- }
-
- public boolean addSearchPath(URI theURI) {
-
- if (!theURI.isAbsolute()) {
- errLogger.log(LogLevel.WARN, this.getClass().getName(), "Search paths must be absolute uris: {}", theURI);
- return false;
- }
-
- return searchPaths.add(theURI);
- }
-
- public boolean addSearchPath(String thePath) {
- URI suri = null;
- try {
- suri = new URI(thePath);
- }
- catch(URISyntaxException urisx) {
- errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid search path: {} {}", thePath, urisx);
- return false;
- }
-
- return addSearchPath(suri);
- }
-
- public Iterable<URI> searchPaths() {
- return Iterables.unmodifiableIterable(this.searchPaths);
- }
-
- /**
- * Takes the given path and first URI resolves it and then attempts to open
- * it (a way of verifying its existence) against each search path and stops
- * at the first succesful test.
- */
- public Target resolve(String theName) {
- URI puri = null;
- InputStream pis = null;
-
- //try classpath
- URL purl = getClass().getClassLoader().getResource(theName);
- if (purl != null) {
- try {
- return new Target(theName, purl.toURI());
- }
- catch (URISyntaxException urisx) {
- errLogger.log(LogLevel.ERROR, this.getClass().getName(), "The file {} wasn't found {}", theName, urisx);
- }
- }
-
- //try absolute
- try {
- puri = new URI(theName);
- if (puri.isAbsolute()) {
- try {
- pis = puri.toURL().openStream();
- }
- catch (IOException iox) {
- errLogger.log(LogLevel.WARN, this.getClass().getName(), "The path {} is an absolute uri but it cannot be opened {}", theName, iox);
- return null;
- }
- }
- }
- catch(URISyntaxException urisx) {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, urisx);
- //keep it silent but what are the chances ..
- }
-
- //try relative to the search paths
- for (URI suri: searchPaths) {
- try {
- puri = suri.resolve(theName);
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver trying {}", puri);
- pis = puri.toURL().openStream();
- return new Target(theName, puri.normalize());
- }
- catch (Exception x) {
- debugLogger.log(LogLevel.ERROR, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, x);
- continue;
- }
- finally {
- if (pis!= null) {
- try {
- pis.close();
- }
- catch (IOException iox) {
- }
- }
- }
- }
-
- return null;
- }
-
- public String toString() {
- return "CommonLocator(" + this.searchPaths + ")";
- }
-
-
- public static void main(String[] theArgs) {
- TargetLocator tl = new CommonLocator();
- tl.addSearchPath(java.nio.file.Paths.get("").toUri());
- tl.addSearchPath("file:///");
- debugLogger.log(LogLevel.DEBUG, CommonLocator.class.getName(), tl.resolve(theArgs[0]).toString());
- }
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ private Set<URI> searchPaths = new LinkedHashSet<>();
+
+ /* will create a locator with 2 default search paths: the file directory
+ * from where the app was and the jar from which this checker (actually this
+ * class) was loaded */
+ CommonLocator() {
+ addSearchPath(
+ Paths.get(".").toAbsolutePath().normalize().toUri());
+ }
+
+ public boolean addSearchPath(URI theURI) {
+
+ if (!theURI.isAbsolute()) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "Search paths must be absolute uris: {}", theURI);
+ return false;
+ }
+
+ return searchPaths.add(theURI);
+ }
+
+ public boolean addSearchPath(String thePath) {
+ URI suri;
+ try {
+ suri = new URI(thePath);
+ }
+ catch(URISyntaxException urisx) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid search path: {} {}", thePath, urisx);
+ return false;
+ }
+
+ return addSearchPath(suri);
+ }
+
+ public Iterable<URI> searchPaths() {
+ return Iterables.unmodifiableIterable(this.searchPaths);
+ }
+
+ /**
+ * Takes the given path and first URI resolves it and then attempts to open
+ * it (a way of verifying its existence) against each search path and stops
+ * at the first succesful test.
+ */
+ public Target resolve(String theName) {
+ URI puri = null;
+ InputStream pis = null;
+
+ //try classpath
+ URL purl = getClass().getClassLoader().getResource(theName);
+ if (purl != null) {
+ try {
+ return new Target(theName, purl.toURI());
+ }
+ catch (URISyntaxException urisx) {
+ errLogger.log(LogLevel.ERROR, this.getClass().getName(), "The file {} wasn't found {}", theName, urisx);
+ }
+ }
+
+ //try absolute
+ try {
+ puri = new URI(theName);
+ if (puri.isAbsolute()) {
+ pis = getPathInputStream(puri,theName);
+ if (pis == null){
+ return null;
+ }
+ }
+ }
+ catch(URISyntaxException urisx) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver failed attempting {} {}", theName, urisx);
+ //keep it silent but what are the chances ..
+ }
+
+ //try relative to the search paths
+ for (URI suri: searchPaths) {
+ try {
+ puri = suri.resolve(theName);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver trying {}", puri);
+ pis = puri.toURL().openStream();
+ return new Target(theName, puri.normalize());
+ }
+ catch (Exception x) {
+ debugLogger.log(LogLevel.ERROR, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, x);
+ }
+ finally {
+ if (pis!= null) {
+ try {
+ pis.close();
+ }
+ catch (IOException iox) {
+ debugLogger.log(LogLevel.ERROR, this.getClass().getName(),"Error closing input stream {}", iox);
+ }
+ }
+ }
+ }
+
+ return null;
+ }
+
+ private InputStream getPathInputStream(URI puri, String theName){
+ InputStream res = null;
+ try (InputStream pis = puri.toURL().openStream()){
+ res = pis;
+ }
+ catch (IOException iox) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "The path {} is an absolute uri but it cannot be opened {}", theName, iox);
+ }
+ return res;
+ }
+
+
+ public String toString() {
+ return "CommonLocator(" + this.searchPaths + ")";
+ }
+
+
+ public static void main(String[] theArgs) {
+ TargetLocator tl = new CommonLocator();
+ tl.addSearchPath(java.nio.file.Paths.get("").toUri());
+ tl.addSearchPath("file:///");
+ debugLogger.log(LogLevel.DEBUG, CommonLocator.class.getName(), tl.resolve(theArgs[0]).toString());
+ }
}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java
index b05cff9..76dfca3 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java
@@ -8,15 +8,15 @@ package org.onap.sdc.dcae.checker;
*/
public enum Construct {
Data,
- Requirement,
+ Requirement,
Capability,
Relationship,
Artifact,
Interface,
Node,
- Group,
- Policy,
- Workflow
+ Group,
+ Policy,
+ Workflow
}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java
index 70552bb..879e5dc 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java
@@ -14,45 +14,33 @@ import java.util.regex.PatternSyntaxException;
import com.google.common.collect.Table;
import com.google.common.collect.HashBasedTable;
import org.onap.sdc.common.onaplog.OnapLoggerDebug;
-import org.onap.sdc.common.onaplog.OnapLoggerError;
import org.onap.sdc.common.onaplog.Enums.LogLevel;
/*
* String -- 'primitive tosca type' converters, used in verifying valuations
*/
public class Data {
- private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
private Data() {
}
-
- /*
- */
+
+
@FunctionalInterface
public static interface Evaluator {
-
public boolean eval(Object theExpr, Map theDef, Checker.CheckContext theCtx);
}
/* data type processing */
- private static Map<String,Type> typesByName = new HashMap<String,Type>();
+ private static Map<String,Type> typesByName = new HashMap<>();
static {
- //CoreType.String.toString();
- //CoreFunction.concat.toString();
- //Constraint.equal.toString();
}
-
public static Data.Type typeByName(String theName) {
return typesByName.getOrDefault(theName, userType);
}
-/*
- public static Evaluator getTypeEvaluator(Type theType) {
- }
-*/
/* Needs a better name ?? RValue??
* This is not an rvalue (C def) per se but the construct who's instances
@@ -101,7 +89,7 @@ public class Data {
(expr,def,ctx) -> Data.valueOf(ctx, expr, Boolean.class),
Data::evalScalarConstraints),
Null("null",
- (expr,def,ctx) -> expr.equals("null"),
+ (expr,def,ctx) -> "null".equals(expr),
null),
Timestamp("timestamp",
(expr,def,ctx) -> timestampRegex.matcher(expr.toString()).matches(),
@@ -114,7 +102,7 @@ public class Data {
/* use a scanner and check that the upper bound is indeed greater than
* the lower bound */
Range("range",
- (expr,def,ctx) -> { return rangeRegex.matcher(expr.toString()).matches();},
+ (expr,def,ctx) -> rangeRegex.matcher(expr.toString()).matches(),
null ),
Size("scalar-unit.size",
(expr,def,ctx) -> sizeRegex.matcher(expr.toString()).matches(),
@@ -128,20 +116,25 @@ public class Data {
private String toscaName;
- private Evaluator valueEvaluator,
- constraintsEvaluator;
+ private Evaluator valueEvaluator, constraintsEvaluator;
+
+
private CoreType(String theName, Evaluator theValueEvaluator, Evaluator theConstraintsEvaluator) {
this.toscaName = theName;
this.valueEvaluator = theValueEvaluator;
this.constraintsEvaluator = theConstraintsEvaluator;
- if (typesByName == null)
+ if (typesByName == null) {
throw new RuntimeException("No type index available!");
+ }
typesByName.put(this.toscaName, this);
}
+
+
+ @Override
public String toString() {
return this.toscaName;
}
@@ -260,8 +253,9 @@ public class Data {
Checker.CheckContext theCtx) {
Data.Type entryType = null;
Map entryTypeDef = (Map)theDef.get("entry_schema");
- if (null != entryTypeDef)
- entryType = typeByName((String)entryTypeDef.get("type"));
+ if (null != entryTypeDef) {
+ entryType = typeByName((String) entryTypeDef.get("type"));
+ }
boolean res = true;
for (Object val: theVals) {
@@ -271,39 +265,32 @@ public class Data {
f.evaluator().eval(val, entryTypeDef, theCtx)) {
res = false;
}
- else if (entryType != null &&
- !entryType.evaluator().eval(val, entryTypeDef, theCtx)) {
- res= false;
- //the error should hav been reported by the particular evaluator
- //theCtx.addError("Value " + val + " failed evaluation", null);
+ else if (entryType != null && !entryType.evaluator().eval(val, entryTypeDef, theCtx)) {
+ res = false;
}
}
return res;
}
- public static boolean evalListConstraints(Object theVal,
- Map theDef,
- Checker.CheckContext theCtx) {
+ public static boolean evalListConstraints(Object theVal, Map theDef, Checker.CheckContext theCtx) {
return evalCollectionConstraints((List)theVal, theDef, theCtx);
}
- public static boolean evalMapConstraints(Object theVal,
- Map theDef,
- Checker.CheckContext theCtx) {
+ public static boolean evalMapConstraints(Object theVal, Map theDef, Checker.CheckContext theCtx) {
return evalCollectionConstraints(((Map)theVal).values(), theDef, theCtx);
}
- private static boolean evalCollectionConstraints(Collection theVals,
- Map theDef,
- Checker.CheckContext theCtx) {
+ private static boolean evalCollectionConstraints(Collection theVals, Map theDef, Checker.CheckContext theCtx) {
//should check overall constraints
- if (theVals == null)
+ if (theVals == null) {
return true;
+ }
Map entryTypeDef = (Map)theDef.get("entry_schema");
- if (null == entryTypeDef)
+ if (null == entryTypeDef) {
return true;
+ }
String entryTypeName = (String)entryTypeDef.get("type");
Data.Type entryType = typeByName(entryTypeName);
@@ -311,11 +298,8 @@ public class Data {
boolean res = true;
for (Object val: theVals) {
Evaluator entryEvaluator = entryType.constraintsEvaluator();
- if (entryEvaluator != null &&
- !entryEvaluator.eval(val, entryTypeDef, theCtx)) {
+ if (entryEvaluator != null && !entryEvaluator.eval(val, entryTypeDef, theCtx)) {
res= false;
- //the constraints evaluator should have already added an error, but it also adds some context
- //theCtx.addError("Value " + val + " failed evaluation", null);
}
}
return res;
@@ -371,16 +355,12 @@ public class Data {
if (propVal != null) {
Data.Type propType = typeByName((String)propDef.get("type"));
- if (propType.constraintsEvaluator() != null &&
- !propType.constraintsEvaluator().eval(propVal, propDef, theCtx)) {
+ if (propType.constraintsEvaluator() != null && !propType.constraintsEvaluator().eval(propVal, propDef, theCtx)) {
res= false;
- //the constraints evaluator should have already added an error
- //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null);
}
}
else {
- if (Boolean.TRUE == (Boolean)propDef.getOrDefault("required", Boolean.FALSE) &&
- !propDef.containsKey("default")) {
+ if (Boolean.TRUE == propDef.getOrDefault("required", Boolean.FALSE) && !propDef.containsKey("default")) {
theCtx.addError("Property " + propEntry.getKey() + " failed 'required' constraint; definition is " + propDef, null);
res = false;
}
@@ -402,26 +382,6 @@ public class Data {
return false;
}
-/*
- private static boolean valueOf(Class theTarget,
- String theExpr,
- Checker.CheckContext theCtx) {
- try {
- theTarget.getMethod("valueOf", new Class[] {String.class})
- .invoke(null, theExpr);
- return true;
- }
- catch (InvocationTargetException itx) {
- theCtx.addError("Failed to parse " + theExpr + " as a " + theTarget.getName(), itx.getCause());
- return false;
- }
- catch (Exception x) {
- theCtx.addError("Failed to valueOf " + theExpr + " as a " + theTarget.getName(), x);
- return false;
- }
- }
-*/
-
/*
* Function e(valuation)
* ?
@@ -499,18 +459,15 @@ public class Data {
}
}
- private static boolean evalConcat(
- Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ private static boolean evalConcat(Object theVal, Map theDef, Checker.CheckContext theCtx) {
return true;
}
- private static boolean evalToken(
- Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ private static boolean evalToken(Object theVal, Map theDef, Checker.CheckContext theCtx) {
return true;
}
- private static boolean evalGetInput(
- Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ private static boolean evalGetInput(Object theVal, Map theDef, Checker.CheckContext theCtx) {
Map val = (Map)theVal;
Map.Entry entry = (Map.Entry)val.entrySet().iterator().next();
@@ -527,8 +484,9 @@ public class Data {
return false;
}
- if (theDef == null)
+ if (theDef == null) {
return true;
+ }
//the output must be type compatible with the input
String targetType = (String)theDef.get("type");
@@ -554,24 +512,24 @@ public class Data {
Object theVal, Map theDef,
EnumSet<Facet> theFacets, Checker.CheckContext theCtx) {
- Map val = (Map)theVal;
- Map.Entry entry = (Map.Entry)val.entrySet().iterator().next();
+ Map val = (Map) theVal;
+ Map.Entry entry = (Map.Entry) val.entrySet().iterator().next();
if (!(entry.getValue() instanceof List)) {
- theCtx.addError("get_property: argument must be a List" ,null);
+ theCtx.addError("get_property: argument must be a List", null);
return false;
}
- List args = (List)entry.getValue();
+ List args = (List) entry.getValue();
if (args.size() < 2) {
theCtx.addError("'get_property' has at least 2 arguments", null);
return false;
}
//the first argument is a node or relationship template
- String tmpl = (String)args.get(0);
- Construct tmplConstruct = null;
- Map tmplSpec = null;
+ String tmpl = (String) args.get(0);
+ Construct tmplConstruct;
+ Map tmplSpec;
if ("SELF".equals(tmpl)) {
tmpl = theCtx.enclosingConstruct(Construct.Node);
@@ -580,27 +538,23 @@ public class Data {
if (tmpl == null) {
theCtx.addError("'get_property' invalid SELF reference: no node or relationship template in scope at " + theCtx.getPath(), null);
return false;
- }
- else {
+ } else {
tmplConstruct = Construct.Relationship;
}
- }
- else {
+ } else {
tmplConstruct = Construct.Node;
}
tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), tmplConstruct, tmpl);
- }
- else if ("SOURCE".equals("tmpl")) {
+ } else if ("SOURCE".equals("tmpl")) {
//we are in the scope of a relationship template and this is the source node template.
tmpl = theCtx.enclosingConstruct(Construct.Relationship);
if (tmpl == null) {
theCtx.addError("'get_property' invalid SOURCE reference: no relationship template in scope at " + theCtx.getPath(), null);
return false;
}
-
+
return true;
- }
- else if ("TARGET".equals("tmpl")) {
+ } else if ("TARGET".equals("tmpl")) {
//we are in the scope of a relationship template and this is the target node template.
tmpl = theCtx.enclosingConstruct(Construct.Relationship);
if (tmpl == null) {
@@ -609,8 +563,7 @@ public class Data {
}
return true;
- }
- else if ("HOST".equals("tmpl")) {
+ } else if ("HOST".equals("tmpl")) {
tmpl = theCtx.enclosingConstruct(Construct.Node);
if (tmpl == null) {
theCtx.addError("'get_property' invalid HOST reference: no node template in scope at " + theCtx.getPath(), null);
@@ -618,8 +571,7 @@ public class Data {
}
return true;
- }
- else {
+ } else {
//try node template first
tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Node, tmpl);
if (tmplSpec == null) {
@@ -628,20 +580,18 @@ public class Data {
if (tmplSpec == null) {
theCtx.addError("'get_data' invalid template reference '" + tmpl + "': no node or relationship template with this name", null);
return false;
- }
- else {
+ } else {
tmplConstruct = Construct.Relationship;
}
- }
- else {
+ } else {
tmplConstruct = Construct.Node;
}
}
int facetNameIndex = 1;
Construct facetConstruct = tmplConstruct; //who's construct the facet is supposed to belong to
- Map facetConstructSpec = null;
- String facetConstructType = null;
+ Map facetConstructSpec = null;
+ String facetConstructType = null;
if (tmplConstruct.equals(Construct.Node) &&
args.size() > 2) {
@@ -654,62 +604,56 @@ public class Data {
//while the spec does not make it explicit this can only take place
//if the first argument turned out to be a node template (as relationship
//templates/types do not have capabilities/requirements
- String secondArg = (String)args.get(1);
+ String secondArg = (String) args.get(1);
if ((facetConstructSpec = theCtx.catalog().getFacetDefinition(
- tmplConstruct,
- (String)tmplSpec.get("type"),
- Facet.capabilities,
- secondArg)) != null) {
+ tmplConstruct,
+ (String) tmplSpec.get("type"),
+ Facet.capabilities,
+ secondArg)) != null) {
facetNameIndex = 2;
facetConstruct = Construct.Capability;
- facetConstructType = (String)facetConstructSpec.get("type");
- }
- else if ((facetConstructSpec = theCtx.catalog().getRequirementDefinition(
- tmplConstruct,
- (String)tmplSpec.get("type"),
- secondArg)) != null) {
+ facetConstructType = (String) facetConstructSpec.get("type");
+ } else if ((facetConstructSpec = theCtx.catalog().getRequirementDefinition(
+ tmplConstruct,
+ (String) tmplSpec.get("type"),
+ secondArg)) != null) {
facetNameIndex = 2;
facetConstruct = Construct.Capability;
-
+
//find the specof the capability this requirement points to
//TODO: check, can the capability reference be anything else but a capability tyep?
- facetConstructType = (String)facetConstructSpec.get("capability");
+ facetConstructType = (String) facetConstructSpec.get("capability");
}
- }
- else {
+ } else {
//we'll attempt to handle it as a property of the node template
facetConstruct = Construct.Node;
facetConstructSpec = tmplSpec;
- facetConstructType = (String)facetConstructSpec.get("type");
+ facetConstructType = (String) facetConstructSpec.get("type");
}
-
+
//validate the facet name
Map facetSpec = null;
- {
- String facetName = (String)args.get(facetNameIndex);
- for (Facet facet: theFacets) {
- facetSpec = theCtx.catalog()
- .getFacetDefinition(
- facetConstruct,
- facetConstructType,
- facet,
- facetName);
- if (facetSpec != null)
- break;
- }
- if (facetSpec == null) {
-//TODO: not the greatest message if the call strated with a requirement ..
- theCtx.addError("'get_data' invalid reference, '" + facetConstruct + "' " + facetConstructType + " has no " + theFacets + " with name " + facetName, null);
- return false;
+ String facetName = (String) args.get(facetNameIndex);
+ for (Facet facet : theFacets) {
+ facetSpec = theCtx.catalog()
+ .getFacetDefinition(
+ facetConstruct,
+ facetConstructType,
+ facet,
+ facetName);
+ if (facetSpec != null) {
+ break;
}
}
- //the rest of the arguments have to resolve to a field of the property's
- //data type; the propertySpec contains the type specification
- for (int i = facetNameIndex + 1; i < args.size(); i++) {
+ if (facetSpec == null) {
+//TODO: not the greatest message if the call strated with a requirement ..
+ theCtx.addError("'get_data' invalid reference, '" + facetConstruct + "' " + facetConstructType + " has no " + theFacets + " with name " + facetName, null);
+ return false;
}
+
return true;
}
@@ -777,13 +721,15 @@ public class Data {
private static Object getConstraintValue(Map theDef,
Constraint theConstraint) {
List<Map> constraints = (List<Map>)theDef.get("constraints");
- if (null == constraints)
+ if (null == constraints) {
return null;
+ }
for(Map constraint: constraints) {
Object val = constraint.get(theConstraint.toString());
- if (val != null)
+ if (val != null) {
return val;
+ }
}
return null;
}
@@ -802,24 +748,20 @@ public class Data {
pattern;
}
-
/* hold the constraint evaluators for pairs of type/constraint.
* If a pair is not present than the given constraint does not apply
* to the type.
*/
- private static Table<Type,Constraint,Evaluator> typeConstraintEvaluator =null;
+ private static Table<Type,Constraint,Evaluator> typeConstraintEvaluator = null;
- public static Evaluator
- getTypeConstraintEvaluator(Type theType, Constraint theConstraint) {
+ public static Evaluator getTypeConstraintEvaluator(Type theType, Constraint theConstraint) {
if (typeConstraintEvaluator == null) {
typeConstraintEvaluator = HashBasedTable.create();
typeConstraintEvaluator.put(CoreType.String, Constraint.equal,
(val,def,ctx) -> val.equals(getConstraintValue(def,Constraint.equal)));
typeConstraintEvaluator.put(CoreType.String, Constraint.valid_values,
- (val,def,ctx) -> {
- return ((List)getConstraintValue(def,Constraint.valid_values)).contains(val);
- });
+ (val,def,ctx) -> ((List)getConstraintValue(def,Constraint.valid_values)).contains(val));
typeConstraintEvaluator.put(CoreType.String, Constraint.length,
(val,def,ctx) -> ((String)val).length() == ((Number)getConstraintValue(def,Constraint.length)).intValue());
typeConstraintEvaluator.put(CoreType.String, Constraint.min_length,