summaryrefslogtreecommitdiffstats
path: root/dcaedt_validator
diff options
context:
space:
mode:
authorStone, Avi (as206k) <as206k@att.com>2018-05-23 11:21:11 +0300
committerStone, Avi (as206k) <as206k@att.com>2018-05-23 11:30:13 +0300
commit3e4c18770957b55e2f80da32c3a32caa908f1386 (patch)
tree8a94c656300e75e38febfe9826ad36fc54fe14f5 /dcaedt_validator
parentda9db1b89e8c9199da4791a2ccd26d1628120a08 (diff)
Upgrade dt-be-main
Update sources for dcae-dt-be-main to latest version Change-Id: I3d58a2dc32611c0ca90f1c97e1294a17d5748623 Issue-ID: SDC-1359 Signed-off-by: Stone, Avi (as206k) <as206k@att.com>
Diffstat (limited to 'dcaedt_validator')
-rw-r--r--dcaedt_validator/Dockerfile28
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java722
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java89
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java245
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java8
-rw-r--r--dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java234
-rw-r--r--dcaedt_validator/kwalify/src/main/java/kwalify/Defaultable.java4
-rw-r--r--dcaedt_validator/kwalify/src/main/java/kwalify/DefaultableHashMap.java9
-rw-r--r--dcaedt_validator/kwalify/src/main/java/kwalify/Messages.java20
-rw-r--r--dcaedt_validator/kwalify/src/main/java/kwalify/MetaValidator.java525
-rw-r--r--dcaedt_validator/kwalify/src/main/java/kwalify/PlainYamlParser.java114
-rw-r--r--dcaedt_validator/kwalify/src/main/java/kwalify/Rule.java135
-rw-r--r--dcaedt_validator/kwalify/src/main/java/kwalify/Validator.java1
-rw-r--r--dcaedt_validator/kwalify/src/main/java/kwalify/YamlParser.java2
14 files changed, 1050 insertions, 1086 deletions
diff --git a/dcaedt_validator/Dockerfile b/dcaedt_validator/Dockerfile
new file mode 100644
index 0000000..bcb4074
--- /dev/null
+++ b/dcaedt_validator/Dockerfile
@@ -0,0 +1,28 @@
+#
+# Oracle Java 8 Dockerfile
+#
+#
+# Image built by Thomas Nelson See Docker Github
+# Pull base image.
+FROM jdk-8u101_ubuntu:16.04
+
+# Define commonly used ENV variables
+#ENV JAVA_HOME /usr/lib/jvm/java-8-oracle
+#ENV PATH $PATH:$JAVA_HOME/bin:/opt/apache-tomcat-8.0.37/bin
+ENV INSDIR /opt/app/validator
+ENV LOGS ${INSDIR}/logs
+
+RUN mkdir -p $INSDIR
+
+RUN mkdir ${INSDIR}/ssl && \
+ mkdir $LOGS
+COPY service/target/ASC-Validator-*.jar ${INSDIR}/ASC-Validator.jar
+COPY service/target/classes/application.properties ${INSDIR}/
+COPY configure-and-run.sh /usr/local/bin/
+# Define working directory.
+WORKDIR $INSDIR
+VOLUME $LOGS
+
+# Define default command.
+CMD ["/usr/local/bin/configure-and-run.sh"]
+
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java
index 1512e56..bdddce3 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java
@@ -23,7 +23,6 @@ import com.google.common.collect.Iterators;
import com.google.common.collect.Table;
import com.google.common.collect.HashBasedTable;
import org.onap.sdc.common.onaplog.OnapLoggerDebug;
-import org.onap.sdc.common.onaplog.OnapLoggerError;
import org.onap.sdc.common.onaplog.Enums.LogLevel;
/*
@@ -33,250 +32,249 @@ import org.onap.sdc.common.onaplog.Enums.LogLevel;
*/
public class Catalog {
- private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
- private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+ private static final String DERIVED_FROM = "derived_from";
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
- /* Type hierarchies are stored as maps from a type name to its definition
+
+ /* tracks imports, i.e.targets */
+ private LinkedHashMap<URI, Target> targets =
+ new LinkedHashMap<>();
+ /* tracks dependencies between targets, i.e. the 'adjency' matrix defined by
+ * the 'import' relationship */
+ private Table<Target,Target,Boolean> imports = HashBasedTable.create();
+
+
+ /* Type hierarchies are stored as maps from a type name to its definition
* Not the best but easy to follow hierarchies towards their root ..
*/
private EnumMap<Construct, Map<String,Map>> types =
- new EnumMap<Construct, Map<String,Map>>(Construct.class);
- /* track templates: we track templates (tye instances) first per target then per contruct.
- * This allows us to share the catalog among multiple templates sharign the same type set
+ new EnumMap<>(Construct.class);
+ /* track templates: we track templates (tye instances) first per target then per contruct.
+ * This allows us to share the catalog among multiple templates sharign the same type set
*/
private Map<Target, EnumMap<Construct, Map<String,Map>>> templates =
- new HashMap<Target, EnumMap<Construct, Map<String,Map>>>();
-
- private Catalog parent;
-
- public Catalog(Catalog theParent) {
- this.parent = theParent;
- /* there are no requirement types, they are the same as capability types */
- types.put(Construct.Data, new LinkedHashMap<String, Map>());
- types.put(Construct.Capability, new LinkedHashMap<String, Map>());
- types.put(Construct.Relationship, new LinkedHashMap<String, Map>());
- types.put(Construct.Artifact, new LinkedHashMap<String, Map>());
- types.put(Construct.Interface, new LinkedHashMap<String, Map>());
- types.put(Construct.Node, new LinkedHashMap<String, Map>());
- types.put(Construct.Group, new LinkedHashMap<String, Map>());
- types.put(Construct.Policy, new LinkedHashMap<String, Map>());
+ new HashMap<>();
+
+ private Catalog parent;
+
+ public Catalog(Catalog theParent) {
+ this.parent = theParent;
+ /* there are no requirement types, they are the same as capability types */
+ types.put(Construct.Data, new LinkedHashMap<>());
+ types.put(Construct.Capability, new LinkedHashMap<>());
+ types.put(Construct.Relationship, new LinkedHashMap<>());
+ types.put(Construct.Artifact, new LinkedHashMap<>());
+ types.put(Construct.Interface, new LinkedHashMap<>());
+ types.put(Construct.Node, new LinkedHashMap<>());
+ types.put(Construct.Group, new LinkedHashMap<>());
+ types.put(Construct.Policy, new LinkedHashMap<>());
- }
-
- public Catalog() {
- this(null);
- }
-
- public boolean addType(Construct theConstruct, String theName, Map theDef) {
- if (hasType(theConstruct, theName)) {
- return false;
- }
- getConstructTypes(theConstruct).put(theName, theDef);
- return true;
+ }
+
+ public Catalog() {
+ this(null);
+ }
+
+ public boolean addType(Construct theConstruct, String theName, Map theDef) {
+ if (hasType(theConstruct, theName)) {
+ return false;
+ }
+ getConstructTypes(theConstruct).put(theName, theDef);
+ return true;
}
- public Map getTypeDefinition(Construct theConstruct, String theName) {
- Map<String, Map> constructTypes = getConstructTypes(theConstruct);
- Map typeDef = constructTypes.get(theName);
- if (typeDef == null && this.parent != null) {
- return this.parent.getTypeDefinition(theConstruct, theName);
- }
- return typeDef;
- }
+ public Map getTypeDefinition(Construct theConstruct, String theName) {
+ Map<String, Map> constructTypes = getConstructTypes(theConstruct);
+ Map typeDef = constructTypes.get(theName);
+ if (typeDef == null && this.parent != null) {
+ return this.parent.getTypeDefinition(theConstruct, theName);
+ }
+ return typeDef;
+ }
public boolean hasType(Construct theConstruct, String theName) {
- Map<String, Map> constructTypes = getConstructTypes(theConstruct);
- boolean res = constructTypes.containsKey(theName);
- if (!res && this.parent != null) {
- res = this.parent.hasType(theConstruct, theName);
- }
- return res;
- }
-
- protected Map<String, Map> getConstructTypes(Construct theConstruct) {
- Map<String, Map> constructTypes = this.types.get(theConstruct);
- if (null == constructTypes) {
- throw new RuntimeException("Something worse is cooking here!",
- new CatalogException("No types for construct " + theConstruct));
- }
- return constructTypes;
- }
-
- protected Iterator<Map.Entry<String,Map>>
- typesIterator(Construct theConstruct) {
- List<Map.Entry<String,Map>> constructTypes =
- new ArrayList<Map.Entry<String,Map>>(
- this.types.get(theConstruct).entrySet());
- Collections.reverse(constructTypes);
- return (this.parent == null)
- ? constructTypes.iterator()
- : Iterators.concat(constructTypes.iterator(),
- this.parent.typesIterator(theConstruct));
- }
-
- /* this will iterate through the type hierarchy for the given type, included.
- */
- public Iterator<Map.Entry<String,Map>>
- hierarchy(Construct theConstruct, final String theName) {
- return Iterators.filter(typesIterator(theConstruct),
+ Map<String, Map> constructTypes = getConstructTypes(theConstruct);
+ boolean res = constructTypes.containsKey(theName);
+ if (!res && this.parent != null) {
+ res = this.parent.hasType(theConstruct, theName);
+ }
+ return res;
+ }
+
+ protected Map<String, Map> getConstructTypes(Construct theConstruct) {
+ Map<String, Map> constructTypes = this.types.get(theConstruct);
+ if (null == constructTypes) {
+ throw new RuntimeException("Something worse is cooking here!",
+ new CatalogException("No types for construct " + theConstruct));
+ }
+ return constructTypes;
+ }
+
+ private Iterator<Map.Entry<String,Map>>
+ typesIterator(Construct theConstruct) {
+ List<Map.Entry<String,Map>> constructTypes =
+ new ArrayList<>(
+ this.types.get(theConstruct).entrySet());
+ Collections.reverse(constructTypes);
+ return (this.parent == null)
+ ? constructTypes.iterator()
+ : Iterators.concat(constructTypes.iterator(),
+ this.parent.typesIterator(theConstruct));
+ }
+
+
+ // this will iterate through the type hierarchy for the given type, included.
+ public Iterator<Map.Entry<String,Map>>
+ hierarchy(Construct theConstruct, final String theName) {
+ return Iterators.filter(typesIterator(theConstruct),
new Predicate<Map.Entry<String,Map>>() {
Object next = theName;
public boolean apply(Map.Entry<String,Map> theEntry) {
if (next != null && next.equals(theEntry.getKey())) {
- next = theEntry.getValue().get("derived_from");
+ next = theEntry.getValue().get(DERIVED_FROM);
return true;
+ } else {
+ return false;
}
- else
- return false;
}
});
}
- public boolean isDerivedFrom(Construct theConstruct, String theType, String theBaseType) {
-
- Iterator<Map.Entry<String,Map>> hierachyIterator =
- hierarchy(theConstruct, theType);
- while (hierachyIterator.hasNext()) {
- Map.Entry<String,Map> typeDef = hierachyIterator.next();
-
- if (typeDef.getKey().equals(theBaseType)) {
- return true;
- }
- }
- return false;
- }
-
- /* We go over the type hierarchy and retain only an iterator over the
- * elements of the given facet for each type in the hierarchy.
- * We concatenate these iterators and filter out duplicates.
- * TODO: cannot just filter out duplicates - a redefinition can refine the one in the base construct so we
- * should merge them!
- */
- public Iterator<Map.Entry> facets(Construct theConstruct,
- final Facet theFacet,
- final String theName) {
- return
- Iterators.filter(
- Iterators.concat(
- Iterators.transform(
- hierarchy(theConstruct, theName),
- new Function<Map.Entry<String,Map>, Iterator<Map.Entry>>() {
- public Iterator<Map.Entry> apply(Map.Entry<String,Map> theEntry) {
- Map m = (Map)theEntry.getValue().get(theFacet.name());
- return m == null
- ? Collections.emptyIterator()
- : m.entrySet().iterator();
- }
- }
- )
- ),
+ public boolean isDerivedFrom(Construct theConstruct, String theType, String theBaseType) {
+
+ Iterator<Map.Entry<String,Map>> hierachyIterator =
+ hierarchy(theConstruct, theType);
+ while (hierachyIterator.hasNext()) {
+ Map.Entry<String,Map> typeDef = hierachyIterator.next();
+
+ if (typeDef.getKey().equals(theBaseType)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /* We go over the type hierarchy and retain only an iterator over the
+ * elements of the given facet for each type in the hierarchy.
+ * We concatenate these iterators and filter out duplicates.
+ * TODO: cannot just filter out duplicates - a redefinition can refine the one in the base construct so we
+ * should merge them!
+ */
+ public Iterator<Map.Entry> facets(Construct theConstruct, final Facet theFacet, final String theName) {
+ return
+ Iterators.filter(
+ Iterators.concat(
+ Iterators.transform(
+ hierarchy(theConstruct, theName),
+ (Function<Map.Entry<String, Map>, Iterator<Map.Entry>>) theEntry -> {
+ Map m = (Map)theEntry.getValue().get(theFacet.name());
+ return m == null
+ ? Collections.emptyIterator()
+ : m.entrySet().iterator();
+ }
+ )
+ ),
new Predicate<Map.Entry>() {
- Set insts = new HashSet();
- public boolean apply(Map.Entry theEntry) {
- return !insts.contains(theEntry.getKey());
- }
- }
- );
- }
-
- //no need to specify a construct, only nodes can have requirements
- public Iterator<Map.Entry> requirements(final String theName) {
- return
- Iterators.concat(
- Iterators.transform(
- hierarchy(Construct.Node, theName),
- new Function<Map.Entry<String,Map>, Iterator<Map.Entry>>() {
- public Iterator<Map.Entry> apply(Map.Entry<String,Map> theEntry) {
- List<Map> l = (List<Map>)theEntry.getValue().get("requirements");
- return l == null
- ? Collections.emptyIterator()
- : Iterators.concat(
- Iterators.transform(
- l.iterator(),
- new Function<Map, Iterator<Map.Entry>> () {
- public Iterator<Map.Entry> apply(Map theEntry) {
- return theEntry.entrySet().iterator();
- }
- }
- )
- );
- }
- }
- )
- );
- }
-
- /* Example: find the definition of property 'port' of the node type
- * tosca.nodes.Database (properties being a facet of the node construct)
- *
- * Note: the definition of a facet is cumulative, i.e. more specialized
- * definitions contribute (by overwriting) to the
- */
- public Map getFacetDefinition(Construct theConstruct,
- String theConstructTypeName,
- Facet theFacet,
- String theName) {
- Map def = null;
- Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName);
- while (ti.hasNext()) {
- //this is where requirements would yield a List ..
- Map<String,Map> fset = (Map<String,Map>)ti.next().getValue().get(theFacet.name());
- if (fset != null) {
- def = def == null ? fset.get(theName)
- : mergeDefinitions(def, fset.get(theName));
- }
- }
- return def;
- }
-
- public Map getRequirementDefinition(Construct theConstruct,
- String theConstructTypeName,
- String theName) {
- Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName);
- while (ti.hasNext()) {
- //this is where requirements yield a List ..
- List<Map> reqs = (List<Map>)ti.next().getValue().get("requirements");
-
- if(reqs!=null){
- for (Map req: reqs) {
- Map.Entry reqe = (Map.Entry)req.entrySet().iterator().next();
- if (theName.equals(reqe.getKey())) {
- return (Map)reqe.getValue();
- }
- }
- }else{
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Avoiding adding requirment block since it doesn't exists on the template....");
- }
- }
- return null;
- }
+ Set insts = new HashSet();
+ public boolean apply(Map.Entry theEntry) {
+ return !insts.contains(theEntry.getKey());
+ }
+ }
+ );
+ }
+
+ //no need to specify a construct, only nodes can have requirements
+ public Iterator<Map.Entry> requirements(final String theName) {
+ return
+ Iterators.concat(
+ Iterators.transform(
+ hierarchy(Construct.Node, theName),
+ theEntry -> {
+ List<Map> l = (List<Map>)theEntry.getValue().get("requirements");
+ return l == null
+ ? Collections.emptyIterator()
+ : Iterators.concat(
+ Iterators.transform(
+ l.iterator(),
+ (Function<Map, Iterator<Map.Entry>>) theEntry1 -> theEntry1.entrySet().iterator()
+ )
+ );
+ }
+ )
+ );
+ }
+
+ /* Example: find the definition of property 'port' of the node type
+ * tosca.nodes.Database (properties being a facet of the node construct)
+ *
+ * Note: the definition of a facet is cumulative, i.e. more specialized
+ * definitions contribute (by overwriting) to the
+ */
+ public Map getFacetDefinition(Construct theConstruct,
+ String theConstructTypeName,
+ Facet theFacet,
+ String theName) {
+ Map def = null;
+ Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName);
+ while (ti.hasNext()) {
+ //this is where requirements would yield a List ..
+ Map<String,Map> fset = (Map<String,Map>)ti.next().getValue().get(theFacet.name());
+ if (fset != null) {
+ def = def == null ? fset.get(theName)
+ : mergeDefinitions(def, fset.get(theName));
+ }
+ }
+ return def;
+ }
+
+ public Map getRequirementDefinition(Construct theConstruct,
+ String theConstructTypeName,
+ String theName) {
+ Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName);
+ while (ti.hasNext()) {
+ //this is where requirements yield a List ..
+ List<Map> reqs = (List<Map>)ti.next().getValue().get("requirements");
+
+ if(reqs!=null) {
+ for (Map req: reqs) {
+ Map.Entry reqe = (Map.Entry)req.entrySet().iterator().next();
+ if (theName.equals(reqe.getKey())) {
+ return (Map)reqe.getValue();
+ }
+ }
+ } else {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Avoiding adding requirment block since it doesn't exists on the template....");
+ }
+ }
+ return null;
+ }
/* */
private EnumMap<Construct,Map<String,Map>> getTemplates(Target theTarget) {
- EnumMap<Construct, Map<String,Map>> targetTemplates = templates.get(theTarget);
- if (targetTemplates == null) {
- targetTemplates = new EnumMap<Construct,Map<String,Map>>(Construct.class);
- targetTemplates.put(Construct.Data, new LinkedHashMap<String, Map>());
- targetTemplates.put(Construct.Relationship, new LinkedHashMap<String, Map>());
- targetTemplates.put(Construct.Node, new LinkedHashMap<String, Map>());
- targetTemplates.put(Construct.Group, new LinkedHashMap<String, Map>());
- targetTemplates.put(Construct.Policy, new LinkedHashMap<String, Map>());
-
- templates.put(theTarget, targetTemplates);
- }
- return targetTemplates;
- }
-
- public Map<String,Map> getTargetTemplates(Target theTarget, Construct theConstruct) {
- return getTemplates(theTarget).get(theConstruct);
- }
-
- public void addTemplate(Target theTarget, Construct theConstruct, String theName, Map theDef)
- throws CatalogException {
- Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
- if (null == constructTemplates) {
+ EnumMap<Construct, Map<String,Map>> targetTemplates = templates.get(theTarget);
+ if (targetTemplates == null) {
+ targetTemplates = new EnumMap<>(Construct.class);
+ targetTemplates.put(Construct.Data, new LinkedHashMap<>());
+ targetTemplates.put(Construct.Relationship, new LinkedHashMap<>());
+ targetTemplates.put(Construct.Node, new LinkedHashMap<>());
+ targetTemplates.put(Construct.Group, new LinkedHashMap<>());
+ targetTemplates.put(Construct.Policy, new LinkedHashMap<>());
+
+ templates.put(theTarget, targetTemplates);
+ }
+ return targetTemplates;
+ }
+
+ public Map<String,Map> getTargetTemplates(Target theTarget, Construct theConstruct) {
+ return getTemplates(theTarget).get(theConstruct);
+ }
+
+ public void addTemplate(Target theTarget, Construct theConstruct, String theName, Map theDef)
+ throws CatalogException {
+ Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
+ if (null == constructTemplates) {
throw new CatalogException("No such thing as " + theConstruct + " templates");
- }
+ }
if (constructTemplates.containsKey(theName)) {
throw new CatalogException(theConstruct + " template '" + theName + "' re-declaration");
}
@@ -286,159 +284,159 @@ public class Catalog {
public boolean hasTemplate(Target theTarget, Construct theConstruct, String theName) {
Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
return constructTemplates != null &&
- constructTemplates.containsKey(theName);
+ constructTemplates.containsKey(theName);
}
public Map getTemplate(Target theTarget, Construct theConstruct, String theName) {
Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
- if (constructTemplates != null)
- return constructTemplates.get(theName);
- else
- return null;
+ if (constructTemplates != null) {
+ return constructTemplates.get(theName);
+ } else {
+ return null;
+ }
}
- public static Map mergeDefinitions(Map theAggregate, Map theIncrement) {
- if (theIncrement == null)
- return theAggregate;
-
- for(Map.Entry e: (Set<Map.Entry>)theIncrement.entrySet()) {
- theAggregate.putIfAbsent(e.getKey(), e.getValue());
- }
- return theAggregate;
- }
+ public static Map mergeDefinitions(Map theAggregate, Map theIncrement) {
+ if (theIncrement == null) {
+ return theAggregate;
+ }
- /* tracks imports, i.e.targets */
- private LinkedHashMap<URI, Target> targets =
- new LinkedHashMap<URI, Target>();
- /* tracks dependencies between targets, i.e. the 'adjency' matrix defined by
- * the 'import' relationship */
- private Table<Target,Target,Boolean> imports = HashBasedTable.create();
+ for(Map.Entry e: (Set<Map.Entry>)theIncrement.entrySet()) {
+ theAggregate.putIfAbsent(e.getKey(), e.getValue());
+ }
+ return theAggregate;
+ }
- /*
+ /*
* theParent contains an 'include/import' statement pointing to the Target
*/
- public boolean addTarget(Target theTarget, Target theParent) {
- boolean cataloged = targets.containsKey(theTarget.getLocation());
-
- if(!cataloged) {
- targets.put(theTarget.getLocation(), theTarget);
- }
-
- if (theParent != null) {
- imports.put(theParent, theTarget, Boolean.TRUE);
- }
-
- return !cataloged;
- }
-
- public Target getTarget(URI theLocation) {
- return targets.get(theLocation);
- }
-
- public Collection<Target> targets() {
- return targets.values();
- }
-
- /* Targets that no other targets depend on */
- public Collection<Target> topTargets() {
- return targets.values()
- .stream()
- .filter(t -> !imports.containsColumn(t))
- .collect(Collectors.toList());
-
- }
-
- public String importString(Target theTarget) {
- return importString(theTarget, " ");
- }
-
- private String importString(Target theTarget, String thePrefix) {
- StringBuilder sb = new StringBuilder("");
- Map<Target,Boolean> parents = imports.column(theTarget);
- if (parents != null) {
- for (Target p: parents.keySet()) {
- sb.append(thePrefix)
- .append("from ")
- .append(p.getLocation())
- .append("\n")
- .append(importString(p, thePrefix + " "));
- }
- //we only keep the positive relationships
- }
- return sb.toString();
- }
-
- /* */
- private class TargetComparator implements Comparator<Target> {
-
- /* @return 1 if there is a dependency path from TargetOne to TargetTwo, -1 otherwise */
- public int compare(Target theTargetOne, Target theTargetTwo) {
- if (hasPath(theTargetTwo, theTargetOne))
- return -1;
-
- if (hasPath(theTargetOne, theTargetTwo))
- return 1;
-
- return 0;
- }
-
- public boolean hasPath(Target theStart, Target theEnd) {
- Map<Target,Boolean> deps = imports.row(theStart);
- if (deps.containsKey(theEnd))
- return true;
- for (Target dep: deps.keySet()) {
- if (hasPath(dep, theEnd))
- return true;
- }
- return false;
- }
- }
-
- public Collection<Target> sortedTargets() {
- List keys = new ArrayList(this.targets.values());
- Collections.sort(keys, new TargetComparator());
- return keys;
- }
-
- public static void main(String[] theArgs) throws Exception {
-
- Catalog cat = new Catalog();
-
- Target a = new Target("a", new URI("a")),
- b = new Target("b", new URI("b")),
- c = new Target("c", new URI("c")),
- d = new Target("d", new URI("d"));
-
- cat.addTarget(a, null);
- cat.addTarget(b, null);
- cat.addTarget(c, null);
- cat.addTarget(d, null);
-
- cat.addTarget(b, c);
- cat.addTarget(a, c);
- cat.addTarget(c, d);
- cat.addTarget(a, b);
-
- for (Target t: cat.sortedTargets())
- debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), t.toString());
-
- Catalog root = new Catalog();
- root.addType(Construct.Node, "_a", Collections.emptyMap());
- root.addType(Construct.Node, "__a", Collections.singletonMap("derived_from", "_a"));
- root.addType(Construct.Node, "___a", Collections.singletonMap("derived_from", "_a"));
-
- Catalog base = new Catalog(root);
- base.addType(Construct.Node, "_b", Collections.singletonMap("derived_from", "__a"));
- base.addType(Construct.Node, "__b", Collections.singletonMap("derived_from", "_b"));
- base.addType(Construct.Node, "__b_", Collections.singletonMap("derived_from", "_a"));
-
- if (theArgs.length > 0) {
- Iterator<Map.Entry<String, Map>> ti =
- base.hierarchy(Construct.Node, theArgs[0]);
- while (ti.hasNext()) {
+ public boolean addTarget(Target theTarget, Target theParent) {
+ boolean cataloged = targets.containsKey(theTarget.getLocation());
+
+ if(!cataloged) {
+ targets.put(theTarget.getLocation(), theTarget);
+ }
+
+ if (theParent != null) {
+ imports.put(theParent, theTarget, Boolean.TRUE);
+ }
+
+ return !cataloged;
+ }
+
+ public Target getTarget(URI theLocation) {
+ return targets.get(theLocation);
+ }
+
+ public Collection<Target> targets() {
+ return targets.values();
+ }
+
+ /* Targets that no other targets depend on */
+ public Collection<Target> topTargets() {
+ return targets.values()
+ .stream()
+ .filter(t -> !imports.containsColumn(t))
+ .collect(Collectors.toList());
+
+ }
+
+ public String importString(Target theTarget) {
+ return importString(theTarget, " ");
+ }
+
+ private String importString(Target theTarget, String thePrefix) {
+ StringBuilder sb = new StringBuilder("");
+ Map<Target,Boolean> parents = imports.column(theTarget);
+ if (parents != null) {
+ for (Target p: parents.keySet()) {
+ sb.append(thePrefix)
+ .append("from ")
+ .append(p.getLocation())
+ .append("\n")
+ .append(importString(p, thePrefix + " "));
+ }
+ //we only keep the positive relationships
+ }
+ return sb.toString();
+ }
+
+ /* */
+ private class TargetComparator implements Comparator<Target> {
+
+ /* @return 1 if there is a dependency path from TargetOne to TargetTwo, -1 otherwise */
+ public int compare(Target theTargetOne, Target theTargetTwo) {
+ if (hasPath(theTargetTwo, theTargetOne)) {
+ return -1;
+ }
+
+ if (hasPath(theTargetOne, theTargetTwo)) {
+ return 1;
+ }
+
+ return 0;
+ }
+
+ boolean hasPath(Target theStart, Target theEnd) {
+ Map<Target,Boolean> deps = imports.row(theStart);
+ if (deps.containsKey(theEnd)) {
+ return true;
+ }
+ for (Target dep: deps.keySet()) {
+ if (hasPath(dep, theEnd)) {
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+
+ public Collection<Target> sortedTargets() {
+ List keys = new ArrayList(this.targets.values());
+ Collections.sort(keys, new TargetComparator());
+ return keys;
+ }
+
+ public static void main(String[] theArgs) throws Exception {
+
+ Catalog cat = new Catalog();
+
+ Target a = new Target("a", new URI("a")),
+ b = new Target("b", new URI("b")),
+ c = new Target("c", new URI("c")),
+ d = new Target("d", new URI("d"));
+
+ cat.addTarget(a, null);
+ cat.addTarget(b, null);
+ cat.addTarget(c, null);
+ cat.addTarget(d, null);
+
+ cat.addTarget(b, c);
+ cat.addTarget(a, c);
+ cat.addTarget(c, d);
+ cat.addTarget(a, b);
+
+ for (Target t: cat.sortedTargets()) {
+ debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), t.toString());
+ }
+
+ Catalog root = new Catalog();
+ root.addType(Construct.Node, "_a", Collections.emptyMap());
+ root.addType(Construct.Node, "__a", Collections.singletonMap(DERIVED_FROM, "_a"));
+ root.addType(Construct.Node, "___a", Collections.singletonMap(DERIVED_FROM, "_a"));
+
+ Catalog base = new Catalog(root);
+ base.addType(Construct.Node, "_b", Collections.singletonMap(DERIVED_FROM, "__a"));
+ base.addType(Construct.Node, "__b", Collections.singletonMap(DERIVED_FROM, "_b"));
+ base.addType(Construct.Node, "__b_", Collections.singletonMap(DERIVED_FROM, "_a"));
+
+ if (theArgs.length > 0) {
+ Iterator<Map.Entry<String, Map>> ti =
+ base.hierarchy(Construct.Node, theArgs[0]);
+ while (ti.hasNext()) {
debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), "> {}", ti.next().getKey());
- }
- }
- }
+ }
+ }
+ }
}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java
index fee617f..bf1843e 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java
@@ -87,6 +87,7 @@ public class Checker {
private static final String WAS_DEFINED_FOR_THE_NODE_TYPE = " was defined for the node type ";
private static final String UNKNOWN = "Unknown ";
private static final String TYPE = " type ";
+ public static final String IMPORTED_FROM = "',imported from ";
private Target target = null; //what we're validating at the moment
@@ -2404,9 +2405,7 @@ public class Checker {
return false;
}
- for (Iterator<Map.Entry<String, Map>> ai = augs.entrySet().iterator(); ai.hasNext(); ) {
- Map.Entry<String, Map> ae = ai.next();
-
+ for (Map.Entry<String, Map> ae : augs.entrySet()) {
//make sure it was declared by the type
Map facetDef = catalog.getFacetDefinition(theConstruct, theSpecType, theFacet, ae.getKey());
if (facetDef == null) {
@@ -2715,7 +2714,7 @@ public class Checker {
private String patchWhitespaces(String thePath) {
String[] elems = thePath.split("/");
- StringBuffer path = new StringBuffer();
+ StringBuilder path = new StringBuilder();
for (int i = 0; i < elems.length; i++) {
if (spacePattern.matcher(elems[i]).find()) {
path.append("[@name='")
@@ -2836,7 +2835,7 @@ public class Checker {
hookHandler = Invokable.from(m);
} catch (NoSuchMethodException nsmx) {
//that's ok, not every rule has to have a handler
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "That's ok, not every rule has to have a handler. Method name =", theHookName);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "That's ok, not every rule has to have a handler. Method name is:{}. Exception:{}", theHookName,nsmx);
}
if (hookHandler != null) {
@@ -3120,7 +3119,7 @@ substitute the canonical form for the short form so that checking does not have
debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering range_definition {}",
theContext.getPath());
- assert theRule.getType().equals("seq");
+ assert "seq".equals(theRule.getType());
List bounds = (List) theValue;
if (bounds.size() != 2) {
@@ -3148,10 +3147,10 @@ substitute the canonical form for the short form so that checking does not have
* early processing (validation time) of the imports allows us to catalog
* their types before those declared in the main document.
*/
- protected void imports_post_validation_handler(Object theValue, Rule theRule,
- Validator.ValidationContext theContext) {
+ protected void imports_post_validation_handler(Object theValue, Rule theRule, Validator.ValidationContext theContext) {
debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering imports {}", theContext.getPath());
- assert theRule.getType().equals("seq");
+
+ assert "seq".equals(theRule.getType());
Target tgt = ((TOSCAValidator) theContext.getValidator()).getTarget();
@@ -3179,8 +3178,9 @@ substitute the canonical form for the short form so that checking does not have
try {
List<Target> tgtis = parseTarget(tgti);
- if (tgtis.isEmpty())
- continue;
+ if (tgtis.isEmpty()) {
+ continue;
+ }
if (tgtis.size() > 1) {
theContext.addError(
@@ -3191,21 +3191,20 @@ substitute the canonical form for the short form so that checking does not have
tgti = tgtis.get(0);
- // tgti = parseTarget(tgti);
if (tgt.getReport().hasErrors()) {
- theContext.addError("Failure parsing import '" + tgti + "',imported from " + tgt, theRule, null,
+ theContext.addError("Failure parsing import '" + tgti + IMPORTED_FROM + tgt, theRule, null,
null);
continue;
}
validateTarget(tgti);
if (tgt.getReport().hasErrors()) {
- theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule,
+ theContext.addError("Failure validating import '" + tgti + IMPORTED_FROM + tgt, theRule,
null, null);
continue;
}
} catch (CheckerException cx) {
- theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, cx,
+ theContext.addError("Failure validating import '" + tgti + IMPORTED_FROM + tgt, theRule, cx,
null);
}
}
@@ -3222,7 +3221,7 @@ substitute the canonical form for the short form so that checking does not have
Validator.ValidationContext theContext) {
debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering node_templates_post_validation_handler {}",
theContext.getPath());
- assert theRule.getType().equals("map");
+ assert "map".equals(theRule.getType());
Map<String, Map> nodeTemplates = (Map<String, Map>) theValue;
for (Iterator<Map.Entry<String, Map>> i = nodeTemplates.entrySet().iterator(); i.hasNext();) {
Map.Entry<String, Map> node = i.next();
@@ -3267,17 +3266,18 @@ substitute the canonical form for the short form so that checking does not have
private void process(String theProcessorSpec) throws CheckerException {
String[] spec = theProcessorSpec.split(" ");
- if (spec.length == 0)
- throw new IllegalArgumentException("Incomplete processor specification");
+ if (spec.length == 0) {
+ throw new IllegalArgumentException("Incomplete processor specification");
+ }
- Class processorClass = null;
+ Class processorClass;
try {
processorClass = Class.forName(spec[0]);
} catch (ClassNotFoundException cnfx) {
throw new CheckerException("Cannot find processor implementation", cnfx);
}
- Processor proc = null;
+ Processor proc;
try {
proc = (Processor) ConstructorUtils.invokeConstructor(processorClass,
Arrays.copyOfRange(spec, 1, spec.length));
@@ -3296,8 +3296,9 @@ substitute the canonical form for the short form so that checking does not have
return;
}
// check artifact type
- if (!checkType(Construct.Artifact, theDef, theContext))
- return;
+ if (!checkType(Construct.Artifact, theDef, theContext)) {
+ return;
+ }
} finally {
theContext.exit();
}
@@ -3312,8 +3313,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Policy, theName, theDefinition, Facet.properties, theContext);
}
@@ -3347,8 +3348,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Group, theName, theDefinition, Facet.properties, theContext);
}
@@ -3385,8 +3386,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.properties, theContext);
}
@@ -3401,8 +3402,8 @@ substitute the canonical form for the short form so that checking does not have
}
// capabilities
- if (theDefinition.containsKey("capabilities")) {
- check_capabilities((Map<String, Map>) theDefinition.get("capabilities"), theContext);
+ if (theDefinition.containsKey(CAPABILITIES)) {
+ check_capabilities((Map<String, Map>) theDefinition.get(CAPABILITIES), theContext);
}
// interfaces:
@@ -3470,8 +3471,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.properties, theContext);
}
@@ -3490,9 +3491,9 @@ substitute the canonical form for the short form so that checking does not have
theContext.exit();
}
- if (theDefinition.containsKey("valid_target_types")) {
+ if (theDefinition.containsKey(VALID_TARGET_TYPES)) {
checkTypeReference(Construct.Capability, theContext,
- ((List<String>) theDefinition.get("valid_target_types")).toArray(EMPTY_STRING_ARRAY));
+ ((List<String>) theDefinition.get(VALID_TARGET_TYPES)).toArray(EMPTY_STRING_ARRAY));
}
} finally {
theContext.exit();
@@ -3508,8 +3509,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.properties, theContext);
}
@@ -3539,8 +3540,8 @@ substitute the canonical form for the short form so that checking does not have
return;
}
- if (theDefinition.containsKey("properties")) {
- check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
+ if (theDefinition.containsKey(PROPERTIES)) {
+ check_properties((Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
checkTypeConstructFacet(Construct.Data, theName, theDefinition, Facet.properties, theContext);
}
} finally {
@@ -3594,8 +3595,9 @@ substitute the canonical form for the short form so that checking does not have
public void check_attributes(Map<String, Map> theDefinitions, CheckContext theContext) {
theContext.enter("attributes");
try {
- if (!checkDefinition("attributes", theDefinitions, theContext))
- return;
+ if (!checkDefinition("attributes", theDefinitions, theContext)) {
+ return;
+ }
for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) {
Map.Entry<String, Map> e = i.next();
@@ -3625,10 +3627,11 @@ substitute the canonical form for the short form so that checking does not have
}
public void check_properties(Map<String, Map> theDefinitions, CheckContext theContext) {
- theContext.enter("properties");
+ theContext.enter(PROPERTIES);
try {
- if (!checkDefinition("properties", theDefinitions, theContext))
- return;
+ if (!checkDefinition(PROPERTIES, theDefinitions, theContext)) {
+ return;
+ }
for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) {
Map.Entry<String, Map> e = i.next();
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java
index 295a1f2..acc0a4a 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java
@@ -20,125 +20,128 @@ import org.onap.sdc.common.onaplog.Enums.LogLevel;
public class CommonLocator implements TargetLocator {
- private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
- private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
-
- private Set<URI> searchPaths = new LinkedHashSet();
-
- /* will create a locator with 2 default search paths: the file directory
- * from where the app was and the jar from which this checker (actually this
- * class) was loaded */
- public CommonLocator() {
- addSearchPath(
- Paths.get(".").toAbsolutePath().normalize().toUri());
- }
-
- public CommonLocator(String... theSearchPaths) {
- for (String path: theSearchPaths) {
- addSearchPath(path);
- }
- }
-
- public boolean addSearchPath(URI theURI) {
-
- if (!theURI.isAbsolute()) {
- errLogger.log(LogLevel.WARN, this.getClass().getName(), "Search paths must be absolute uris: {}", theURI);
- return false;
- }
-
- return searchPaths.add(theURI);
- }
-
- public boolean addSearchPath(String thePath) {
- URI suri = null;
- try {
- suri = new URI(thePath);
- }
- catch(URISyntaxException urisx) {
- errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid search path: {} {}", thePath, urisx);
- return false;
- }
-
- return addSearchPath(suri);
- }
-
- public Iterable<URI> searchPaths() {
- return Iterables.unmodifiableIterable(this.searchPaths);
- }
-
- /**
- * Takes the given path and first URI resolves it and then attempts to open
- * it (a way of verifying its existence) against each search path and stops
- * at the first succesful test.
- */
- public Target resolve(String theName) {
- URI puri = null;
- InputStream pis = null;
-
- //try classpath
- URL purl = getClass().getClassLoader().getResource(theName);
- if (purl != null) {
- try {
- return new Target(theName, purl.toURI());
- }
- catch (URISyntaxException urisx) {
- errLogger.log(LogLevel.ERROR, this.getClass().getName(), "The file {} wasn't found {}", theName, urisx);
- }
- }
-
- //try absolute
- try {
- puri = new URI(theName);
- if (puri.isAbsolute()) {
- try {
- pis = puri.toURL().openStream();
- }
- catch (IOException iox) {
- errLogger.log(LogLevel.WARN, this.getClass().getName(), "The path {} is an absolute uri but it cannot be opened {}", theName, iox);
- return null;
- }
- }
- }
- catch(URISyntaxException urisx) {
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, urisx);
- //keep it silent but what are the chances ..
- }
-
- //try relative to the search paths
- for (URI suri: searchPaths) {
- try {
- puri = suri.resolve(theName);
- debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver trying {}", puri);
- pis = puri.toURL().openStream();
- return new Target(theName, puri.normalize());
- }
- catch (Exception x) {
- debugLogger.log(LogLevel.ERROR, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, x);
- continue;
- }
- finally {
- if (pis!= null) {
- try {
- pis.close();
- }
- catch (IOException iox) {
- }
- }
- }
- }
-
- return null;
- }
-
- public String toString() {
- return "CommonLocator(" + this.searchPaths + ")";
- }
-
-
- public static void main(String[] theArgs) {
- TargetLocator tl = new CommonLocator();
- tl.addSearchPath(java.nio.file.Paths.get("").toUri());
- tl.addSearchPath("file:///");
- debugLogger.log(LogLevel.DEBUG, CommonLocator.class.getName(), tl.resolve(theArgs[0]).toString());
- }
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ private Set<URI> searchPaths = new LinkedHashSet<>();
+
+ /* will create a locator with 2 default search paths: the file directory
+ * from where the app was and the jar from which this checker (actually this
+ * class) was loaded */
+ CommonLocator() {
+ addSearchPath(
+ Paths.get(".").toAbsolutePath().normalize().toUri());
+ }
+
+ public boolean addSearchPath(URI theURI) {
+
+ if (!theURI.isAbsolute()) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "Search paths must be absolute uris: {}", theURI);
+ return false;
+ }
+
+ return searchPaths.add(theURI);
+ }
+
+ public boolean addSearchPath(String thePath) {
+ URI suri;
+ try {
+ suri = new URI(thePath);
+ }
+ catch(URISyntaxException urisx) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid search path: {} {}", thePath, urisx);
+ return false;
+ }
+
+ return addSearchPath(suri);
+ }
+
+ public Iterable<URI> searchPaths() {
+ return Iterables.unmodifiableIterable(this.searchPaths);
+ }
+
+ /**
+ * Takes the given path and first URI resolves it and then attempts to open
+ * it (a way of verifying its existence) against each search path and stops
+ * at the first succesful test.
+ */
+ public Target resolve(String theName) {
+ URI puri = null;
+ InputStream pis = null;
+
+ //try classpath
+ URL purl = getClass().getClassLoader().getResource(theName);
+ if (purl != null) {
+ try {
+ return new Target(theName, purl.toURI());
+ }
+ catch (URISyntaxException urisx) {
+ errLogger.log(LogLevel.ERROR, this.getClass().getName(), "The file {} wasn't found {}", theName, urisx);
+ }
+ }
+
+ //try absolute
+ try {
+ puri = new URI(theName);
+ if (puri.isAbsolute()) {
+ pis = getPathInputStream(puri,theName);
+ if (pis == null){
+ return null;
+ }
+ }
+ }
+ catch(URISyntaxException urisx) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver failed attempting {} {}", theName, urisx);
+ //keep it silent but what are the chances ..
+ }
+
+ //try relative to the search paths
+ for (URI suri: searchPaths) {
+ try {
+ puri = suri.resolve(theName);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver trying {}", puri);
+ pis = puri.toURL().openStream();
+ return new Target(theName, puri.normalize());
+ }
+ catch (Exception x) {
+ debugLogger.log(LogLevel.ERROR, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, x);
+ }
+ finally {
+ if (pis!= null) {
+ try {
+ pis.close();
+ }
+ catch (IOException iox) {
+ debugLogger.log(LogLevel.ERROR, this.getClass().getName(),"Error closing input stream {}", iox);
+ }
+ }
+ }
+ }
+
+ return null;
+ }
+
+ private InputStream getPathInputStream(URI puri, String theName){
+ InputStream res = null;
+ try (InputStream pis = puri.toURL().openStream()){
+ res = pis;
+ }
+ catch (IOException iox) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "The path {} is an absolute uri but it cannot be opened {}", theName, iox);
+ }
+ return res;
+ }
+
+
+ public String toString() {
+ return "CommonLocator(" + this.searchPaths + ")";
+ }
+
+
+ public static void main(String[] theArgs) {
+ TargetLocator tl = new CommonLocator();
+ tl.addSearchPath(java.nio.file.Paths.get("").toUri());
+ tl.addSearchPath("file:///");
+ debugLogger.log(LogLevel.DEBUG, CommonLocator.class.getName(), tl.resolve(theArgs[0]).toString());
+ }
}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java
index b05cff9..76dfca3 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java
@@ -8,15 +8,15 @@ package org.onap.sdc.dcae.checker;
*/
public enum Construct {
Data,
- Requirement,
+ Requirement,
Capability,
Relationship,
Artifact,
Interface,
Node,
- Group,
- Policy,
- Workflow
+ Group,
+ Policy,
+ Workflow
}
diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java
index 70552bb..879e5dc 100644
--- a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java
+++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java
@@ -14,45 +14,33 @@ import java.util.regex.PatternSyntaxException;
import com.google.common.collect.Table;
import com.google.common.collect.HashBasedTable;
import org.onap.sdc.common.onaplog.OnapLoggerDebug;
-import org.onap.sdc.common.onaplog.OnapLoggerError;
import org.onap.sdc.common.onaplog.Enums.LogLevel;
/*
* String -- 'primitive tosca type' converters, used in verifying valuations
*/
public class Data {
- private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
private Data() {
}
-
- /*
- */
+
+
@FunctionalInterface
public static interface Evaluator {
-
public boolean eval(Object theExpr, Map theDef, Checker.CheckContext theCtx);
}
/* data type processing */
- private static Map<String,Type> typesByName = new HashMap<String,Type>();
+ private static Map<String,Type> typesByName = new HashMap<>();
static {
- //CoreType.String.toString();
- //CoreFunction.concat.toString();
- //Constraint.equal.toString();
}
-
public static Data.Type typeByName(String theName) {
return typesByName.getOrDefault(theName, userType);
}
-/*
- public static Evaluator getTypeEvaluator(Type theType) {
- }
-*/
/* Needs a better name ?? RValue??
* This is not an rvalue (C def) per se but the construct who's instances
@@ -101,7 +89,7 @@ public class Data {
(expr,def,ctx) -> Data.valueOf(ctx, expr, Boolean.class),
Data::evalScalarConstraints),
Null("null",
- (expr,def,ctx) -> expr.equals("null"),
+ (expr,def,ctx) -> "null".equals(expr),
null),
Timestamp("timestamp",
(expr,def,ctx) -> timestampRegex.matcher(expr.toString()).matches(),
@@ -114,7 +102,7 @@ public class Data {
/* use a scanner and check that the upper bound is indeed greater than
* the lower bound */
Range("range",
- (expr,def,ctx) -> { return rangeRegex.matcher(expr.toString()).matches();},
+ (expr,def,ctx) -> rangeRegex.matcher(expr.toString()).matches(),
null ),
Size("scalar-unit.size",
(expr,def,ctx) -> sizeRegex.matcher(expr.toString()).matches(),
@@ -128,20 +116,25 @@ public class Data {
private String toscaName;
- private Evaluator valueEvaluator,
- constraintsEvaluator;
+ private Evaluator valueEvaluator, constraintsEvaluator;
+
+
private CoreType(String theName, Evaluator theValueEvaluator, Evaluator theConstraintsEvaluator) {
this.toscaName = theName;
this.valueEvaluator = theValueEvaluator;
this.constraintsEvaluator = theConstraintsEvaluator;
- if (typesByName == null)
+ if (typesByName == null) {
throw new RuntimeException("No type index available!");
+ }
typesByName.put(this.toscaName, this);
}
+
+
+ @Override
public String toString() {
return this.toscaName;
}
@@ -260,8 +253,9 @@ public class Data {
Checker.CheckContext theCtx) {
Data.Type entryType = null;
Map entryTypeDef = (Map)theDef.get("entry_schema");
- if (null != entryTypeDef)
- entryType = typeByName((String)entryTypeDef.get("type"));
+ if (null != entryTypeDef) {
+ entryType = typeByName((String) entryTypeDef.get("type"));
+ }
boolean res = true;
for (Object val: theVals) {
@@ -271,39 +265,32 @@ public class Data {
f.evaluator().eval(val, entryTypeDef, theCtx)) {
res = false;
}
- else if (entryType != null &&
- !entryType.evaluator().eval(val, entryTypeDef, theCtx)) {
- res= false;
- //the error should hav been reported by the particular evaluator
- //theCtx.addError("Value " + val + " failed evaluation", null);
+ else if (entryType != null && !entryType.evaluator().eval(val, entryTypeDef, theCtx)) {
+ res = false;
}
}
return res;
}
- public static boolean evalListConstraints(Object theVal,
- Map theDef,
- Checker.CheckContext theCtx) {
+ public static boolean evalListConstraints(Object theVal, Map theDef, Checker.CheckContext theCtx) {
return evalCollectionConstraints((List)theVal, theDef, theCtx);
}
- public static boolean evalMapConstraints(Object theVal,
- Map theDef,
- Checker.CheckContext theCtx) {
+ public static boolean evalMapConstraints(Object theVal, Map theDef, Checker.CheckContext theCtx) {
return evalCollectionConstraints(((Map)theVal).values(), theDef, theCtx);
}
- private static boolean evalCollectionConstraints(Collection theVals,
- Map theDef,
- Checker.CheckContext theCtx) {
+ private static boolean evalCollectionConstraints(Collection theVals, Map theDef, Checker.CheckContext theCtx) {
//should check overall constraints
- if (theVals == null)
+ if (theVals == null) {
return true;
+ }
Map entryTypeDef = (Map)theDef.get("entry_schema");
- if (null == entryTypeDef)
+ if (null == entryTypeDef) {
return true;
+ }
String entryTypeName = (String)entryTypeDef.get("type");
Data.Type entryType = typeByName(entryTypeName);
@@ -311,11 +298,8 @@ public class Data {
boolean res = true;
for (Object val: theVals) {
Evaluator entryEvaluator = entryType.constraintsEvaluator();
- if (entryEvaluator != null &&
- !entryEvaluator.eval(val, entryTypeDef, theCtx)) {
+ if (entryEvaluator != null && !entryEvaluator.eval(val, entryTypeDef, theCtx)) {
res= false;
- //the constraints evaluator should have already added an error, but it also adds some context
- //theCtx.addError("Value " + val + " failed evaluation", null);
}
}
return res;
@@ -371,16 +355,12 @@ public class Data {
if (propVal != null) {
Data.Type propType = typeByName((String)propDef.get("type"));
- if (propType.constraintsEvaluator() != null &&
- !propType.constraintsEvaluator().eval(propVal, propDef, theCtx)) {
+ if (propType.constraintsEvaluator() != null && !propType.constraintsEvaluator().eval(propVal, propDef, theCtx)) {
res= false;
- //the constraints evaluator should have already added an error
- //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null);
}
}
else {
- if (Boolean.TRUE == (Boolean)propDef.getOrDefault("required", Boolean.FALSE) &&
- !propDef.containsKey("default")) {
+ if (Boolean.TRUE == propDef.getOrDefault("required", Boolean.FALSE) && !propDef.containsKey("default")) {
theCtx.addError("Property " + propEntry.getKey() + " failed 'required' constraint; definition is " + propDef, null);
res = false;
}
@@ -402,26 +382,6 @@ public class Data {
return false;
}
-/*
- private static boolean valueOf(Class theTarget,
- String theExpr,
- Checker.CheckContext theCtx) {
- try {
- theTarget.getMethod("valueOf", new Class[] {String.class})
- .invoke(null, theExpr);
- return true;
- }
- catch (InvocationTargetException itx) {
- theCtx.addError("Failed to parse " + theExpr + " as a " + theTarget.getName(), itx.getCause());
- return false;
- }
- catch (Exception x) {
- theCtx.addError("Failed to valueOf " + theExpr + " as a " + theTarget.getName(), x);
- return false;
- }
- }
-*/
-
/*
* Function e(valuation)
* ?
@@ -499,18 +459,15 @@ public class Data {
}
}
- private static boolean evalConcat(
- Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ private static boolean evalConcat(Object theVal, Map theDef, Checker.CheckContext theCtx) {
return true;
}
- private static boolean evalToken(
- Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ private static boolean evalToken(Object theVal, Map theDef, Checker.CheckContext theCtx) {
return true;
}
- private static boolean evalGetInput(
- Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ private static boolean evalGetInput(Object theVal, Map theDef, Checker.CheckContext theCtx) {
Map val = (Map)theVal;
Map.Entry entry = (Map.Entry)val.entrySet().iterator().next();
@@ -527,8 +484,9 @@ public class Data {
return false;
}
- if (theDef == null)
+ if (theDef == null) {
return true;
+ }
//the output must be type compatible with the input
String targetType = (String)theDef.get("type");
@@ -554,24 +512,24 @@ public class Data {
Object theVal, Map theDef,
EnumSet<Facet> theFacets, Checker.CheckContext theCtx) {
- Map val = (Map)theVal;
- Map.Entry entry = (Map.Entry)val.entrySet().iterator().next();
+ Map val = (Map) theVal;
+ Map.Entry entry = (Map.Entry) val.entrySet().iterator().next();
if (!(entry.getValue() instanceof List)) {
- theCtx.addError("get_property: argument must be a List" ,null);
+ theCtx.addError("get_property: argument must be a List", null);
return false;
}
- List args = (List)entry.getValue();
+ List args = (List) entry.getValue();
if (args.size() < 2) {
theCtx.addError("'get_property' has at least 2 arguments", null);
return false;
}
//the first argument is a node or relationship template
- String tmpl = (String)args.get(0);
- Construct tmplConstruct = null;
- Map tmplSpec = null;
+ String tmpl = (String) args.get(0);
+ Construct tmplConstruct;
+ Map tmplSpec;
if ("SELF".equals(tmpl)) {
tmpl = theCtx.enclosingConstruct(Construct.Node);
@@ -580,27 +538,23 @@ public class Data {
if (tmpl == null) {
theCtx.addError("'get_property' invalid SELF reference: no node or relationship template in scope at " + theCtx.getPath(), null);
return false;
- }
- else {
+ } else {
tmplConstruct = Construct.Relationship;
}
- }
- else {
+ } else {
tmplConstruct = Construct.Node;
}
tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), tmplConstruct, tmpl);
- }
- else if ("SOURCE".equals("tmpl")) {
+ } else if ("SOURCE".equals("tmpl")) {
//we are in the scope of a relationship template and this is the source node template.
tmpl = theCtx.enclosingConstruct(Construct.Relationship);
if (tmpl == null) {
theCtx.addError("'get_property' invalid SOURCE reference: no relationship template in scope at " + theCtx.getPath(), null);
return false;
}
-
+
return true;
- }
- else if ("TARGET".equals("tmpl")) {
+ } else if ("TARGET".equals("tmpl")) {
//we are in the scope of a relationship template and this is the target node template.
tmpl = theCtx.enclosingConstruct(Construct.Relationship);
if (tmpl == null) {
@@ -609,8 +563,7 @@ public class Data {
}
return true;
- }
- else if ("HOST".equals("tmpl")) {
+ } else if ("HOST".equals("tmpl")) {
tmpl = theCtx.enclosingConstruct(Construct.Node);
if (tmpl == null) {
theCtx.addError("'get_property' invalid HOST reference: no node template in scope at " + theCtx.getPath(), null);
@@ -618,8 +571,7 @@ public class Data {
}
return true;
- }
- else {
+ } else {
//try node template first
tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Node, tmpl);
if (tmplSpec == null) {
@@ -628,20 +580,18 @@ public class Data {
if (tmplSpec == null) {
theCtx.addError("'get_data' invalid template reference '" + tmpl + "': no node or relationship template with this name", null);
return false;
- }
- else {
+ } else {
tmplConstruct = Construct.Relationship;
}
- }
- else {
+ } else {
tmplConstruct = Construct.Node;
}
}
int facetNameIndex = 1;
Construct facetConstruct = tmplConstruct; //who's construct the facet is supposed to belong to
- Map facetConstructSpec = null;
- String facetConstructType = null;
+ Map facetConstructSpec = null;
+ String facetConstructType = null;
if (tmplConstruct.equals(Construct.Node) &&
args.size() > 2) {
@@ -654,62 +604,56 @@ public class Data {
//while the spec does not make it explicit this can only take place
//if the first argument turned out to be a node template (as relationship
//templates/types do not have capabilities/requirements
- String secondArg = (String)args.get(1);
+ String secondArg = (String) args.get(1);
if ((facetConstructSpec = theCtx.catalog().getFacetDefinition(
- tmplConstruct,
- (String)tmplSpec.get("type"),
- Facet.capabilities,
- secondArg)) != null) {
+ tmplConstruct,
+ (String) tmplSpec.get("type"),
+ Facet.capabilities,
+ secondArg)) != null) {
facetNameIndex = 2;
facetConstruct = Construct.Capability;
- facetConstructType = (String)facetConstructSpec.get("type");
- }
- else if ((facetConstructSpec = theCtx.catalog().getRequirementDefinition(
- tmplConstruct,
- (String)tmplSpec.get("type"),
- secondArg)) != null) {
+ facetConstructType = (String) facetConstructSpec.get("type");
+ } else if ((facetConstructSpec = theCtx.catalog().getRequirementDefinition(
+ tmplConstruct,
+ (String) tmplSpec.get("type"),
+ secondArg)) != null) {
facetNameIndex = 2;
facetConstruct = Construct.Capability;
-
+
//find the specof the capability this requirement points to
//TODO: check, can the capability reference be anything else but a capability tyep?
- facetConstructType = (String)facetConstructSpec.get("capability");
+ facetConstructType = (String) facetConstructSpec.get("capability");
}
- }
- else {
+ } else {
//we'll attempt to handle it as a property of the node template
facetConstruct = Construct.Node;
facetConstructSpec = tmplSpec;
- facetConstructType = (String)facetConstructSpec.get("type");
+ facetConstructType = (String) facetConstructSpec.get("type");
}
-
+
//validate the facet name
Map facetSpec = null;
- {
- String facetName = (String)args.get(facetNameIndex);
- for (Facet facet: theFacets) {
- facetSpec = theCtx.catalog()
- .getFacetDefinition(
- facetConstruct,
- facetConstructType,
- facet,
- facetName);
- if (facetSpec != null)
- break;
- }
- if (facetSpec == null) {
-//TODO: not the greatest message if the call strated with a requirement ..
- theCtx.addError("'get_data' invalid reference, '" + facetConstruct + "' " + facetConstructType + " has no " + theFacets + " with name " + facetName, null);
- return false;
+ String facetName = (String) args.get(facetNameIndex);
+ for (Facet facet : theFacets) {
+ facetSpec = theCtx.catalog()
+ .getFacetDefinition(
+ facetConstruct,
+ facetConstructType,
+ facet,
+ facetName);
+ if (facetSpec != null) {
+ break;
}
}
- //the rest of the arguments have to resolve to a field of the property's
- //data type; the propertySpec contains the type specification
- for (int i = facetNameIndex + 1; i < args.size(); i++) {
+ if (facetSpec == null) {
+//TODO: not the greatest message if the call strated with a requirement ..
+ theCtx.addError("'get_data' invalid reference, '" + facetConstruct + "' " + facetConstructType + " has no " + theFacets + " with name " + facetName, null);
+ return false;
}
+
return true;
}
@@ -777,13 +721,15 @@ public class Data {
private static Object getConstraintValue(Map theDef,
Constraint theConstraint) {
List<Map> constraints = (List<Map>)theDef.get("constraints");
- if (null == constraints)
+ if (null == constraints) {
return null;
+ }
for(Map constraint: constraints) {
Object val = constraint.get(theConstraint.toString());
- if (val != null)
+ if (val != null) {
return val;
+ }
}
return null;
}
@@ -802,24 +748,20 @@ public class Data {
pattern;
}
-
/* hold the constraint evaluators for pairs of type/constraint.
* If a pair is not present than the given constraint does not apply
* to the type.
*/
- private static Table<Type,Constraint,Evaluator> typeConstraintEvaluator =null;
+ private static Table<Type,Constraint,Evaluator> typeConstraintEvaluator = null;
- public static Evaluator
- getTypeConstraintEvaluator(Type theType, Constraint theConstraint) {
+ public static Evaluator getTypeConstraintEvaluator(Type theType, Constraint theConstraint) {
if (typeConstraintEvaluator == null) {
typeConstraintEvaluator = HashBasedTable.create();
typeConstraintEvaluator.put(CoreType.String, Constraint.equal,
(val,def,ctx) -> val.equals(getConstraintValue(def,Constraint.equal)));
typeConstraintEvaluator.put(CoreType.String, Constraint.valid_values,
- (val,def,ctx) -> {
- return ((List)getConstraintValue(def,Constraint.valid_values)).contains(val);
- });
+ (val,def,ctx) -> ((List)getConstraintValue(def,Constraint.valid_values)).contains(val));
typeConstraintEvaluator.put(CoreType.String, Constraint.length,
(val,def,ctx) -> ((String)val).length() == ((Number)getConstraintValue(def,Constraint.length)).intValue());
typeConstraintEvaluator.put(CoreType.String, Constraint.min_length,
diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Defaultable.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Defaultable.java
index f1de3fc..a2acfee 100644
--- a/dcaedt_validator/kwalify/src/main/java/kwalify/Defaultable.java
+++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Defaultable.java
@@ -8,6 +8,6 @@ package kwalify;
* interface to have default value
*/
public interface Defaultable {
- Object getDefault();
- void setDefault(Object value);
+ Rule getDefault();
+ void setDefault(Rule value);
}
diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/DefaultableHashMap.java b/dcaedt_validator/kwalify/src/main/java/kwalify/DefaultableHashMap.java
index c2c625c..5846f1b 100644
--- a/dcaedt_validator/kwalify/src/main/java/kwalify/DefaultableHashMap.java
+++ b/dcaedt_validator/kwalify/src/main/java/kwalify/DefaultableHashMap.java
@@ -4,7 +4,6 @@
package kwalify;
-import java.io.Serializable;
import java.util.HashMap;
/**
@@ -14,18 +13,18 @@ public class DefaultableHashMap extends HashMap implements Defaultable {
private static final long serialVersionUID = -5224819562023897380L;
- private Object defaultValue = null;
+ private Rule defaultValue;
public DefaultableHashMap() {
super();
}
- public Object getDefault() { return defaultValue; }
+ public Rule getDefault() { return defaultValue; }
- public void setDefault(Object value) { defaultValue = value; }
+ public void setDefault(Rule value) { defaultValue = value; }
@Override
public Object get(Object key) {
- return containsKey(key) ? super.get(key) : defaultValue;
+ return containsKey(key) ? (Rule)super.get(key) : defaultValue;
}
}
diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Messages.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Messages.java
index b77f04b..e0bafb1 100644
--- a/dcaedt_validator/kwalify/src/main/java/kwalify/Messages.java
+++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Messages.java
@@ -1,25 +1,22 @@
/*
- * @(#)Messages.java $Rev: 4 $ $Release: 0.5.1 $
- *
* copyright(c) 2005 kuwata-lab all rights reserved.
*/
package kwalify;
import java.util.ResourceBundle;
-//import java.util.Locale;
/**
* set of utility methods around messages.
*
- * @revision $Rev: 4 $
- * @release $Release: 0.5.1 $
*/
public class Messages {
- private static final String __basename = "kwalify.messages";
- private static ResourceBundle __messages = ResourceBundle.getBundle(__basename);
- //private static ResourceBundle __messages = ResourceBundle.getBundle(__basename, Locale.getDefault());
+ private static final String KWALIFY_MESSAGES = "kwalify.messages";
+ private static ResourceBundle __messages = ResourceBundle.getBundle(KWALIFY_MESSAGES);
+
+ // So that no one instantiate Messages and make sonar happy
+ private Messages(){}
public static String message(String key) {
return __messages.getString(key);
@@ -31,10 +28,9 @@ public class Messages {
public static String buildMessage(String key, Object value, Object[] args) {
String msg = message(key);
- assert msg != null;
if (args != null) {
- for (int i = 0; i < args.length; i++) { // don't use MessageFormat
- msg = msg.replaceFirst("%[sd]", escape(args[i]));
+ for (Object arg : args) { // don't use MessageFormat
+ msg = msg.replaceFirst("%[sd]", escape(arg));
}
}
if (value != null && !Types.isCollection(value)) {
@@ -44,8 +40,6 @@ public class Messages {
}
private static String escape(Object obj) {
- //return obj.toString().replaceAll("\\", "\\\\").replace("\n", "\\n"); // J2SK1.4 doesn't support String#replace(CharSequence, CharSequence)!
return obj.toString().replaceAll("\\\\", "\\\\\\\\").replaceAll("\\n", "\\\\n");
}
-
}
diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/MetaValidator.java b/dcaedt_validator/kwalify/src/main/java/kwalify/MetaValidator.java
index c8c21a7..8c4d6a2 100644
--- a/dcaedt_validator/kwalify/src/main/java/kwalify/MetaValidator.java
+++ b/dcaedt_validator/kwalify/src/main/java/kwalify/MetaValidator.java
@@ -1,126 +1,138 @@
/*
- * @(#)MetaValidator.java $Rev: 4 $ $Release: 0.5.1 $
- *
* copyright(c) 2005 kuwata-lab all rights reserved.
*/
package kwalify;
-import org.onap.sdc.common.onaplog.OnapLoggerDebug;
import org.onap.sdc.common.onaplog.OnapLoggerError;
import org.onap.sdc.common.onaplog.Enums.LogLevel;
import java.util.Map;
import java.util.List;
-import java.util.Iterator;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.util.regex.PatternSyntaxException;
/**
* meta validator to validate schema definition
- *
- * @revision $Rev: 4 $
- * @release $Release: 0.5.1 $
*/
public class MetaValidator extends Validator {
+ private static final String RANGE = "range";
+ private static final String MAX_EX = "max-ex";
+ private static final String MIN_EX = "min-ex";
+ private static final String LENGTH = "length";
+ private static final String SEQUENCE = "sequence";
+ private static final String ENUM_CONFLICT = "enum.conflict";
+ private static final String SCALAR_CONFLICT = "scalar.conflict";
+ private static final String IDENT = "ident";
+ private static final String IDENT1 = "ident:";
+ private static final String MAPPING = "mapping";
+ private static final String PATTERN = "pattern:";
+ private static final String PATTERN1 = "pattern";
+ private static final String TYPE_MAP = " type: map\n";
+ private static final String TYPE_STR = " type: str\n";
+ private static final String TYPE_BOOL = " type: bool\n";
+ private static final String MAPPING1 = " mapping:\n";
+ private static final String TYPE_SCALAR = " type: scalar\n";
+ private static final String TYPE_INT = " type: int\n";
+
private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
- private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
- public static final String META_SCHEMA = ""
- + "name: MAIN\n"
- + "type: map\n"
- + "required: yes\n"
- + "mapping: &main-rule\n"
- + " \"name\":\n"
- + " type: str\n"
- + " \"desc\":\n"
- + " type: str\n"
- + " \"type\":\n"
- + " type: str\n"
- + " #required: yes\n"
- + " enum:\n"
- + " - seq\n"
- + " #- sequence\n"
- + " #- list\n"
- + " - map\n"
- + " #- mapping\n"
- + " #- hash\n"
- + " - str\n"
- + " #- string\n"
- + " - int\n"
- + " #- integer\n"
- + " - float\n"
- + " - number\n"
- + " #- numeric\n"
- + " - bool\n"
- + " #- boolean\n"
- + " - text\n"
- + " - date\n"
- + " - time\n"
- + " - timestamp\n"
- + " #- object\n"
- + " - any\n"
- + " - scalar\n"
- + " #- collection\n"
- + " \"required\":\n"
- + " type: bool\n"
- + " \"enum\":\n"
- + " type: seq\n"
- + " sequence:\n"
- + " - type: scalar\n"
- + " unique: yes\n"
- + " \"pattern\":\n"
- + " type: str\n"
- + " \"assert\":\n"
- + " type: str\n"
- + " pattern: /\\bval\\b/\n"
- + " \"range\":\n"
- + " type: map\n"
- + " mapping:\n"
- + " \"max\":\n"
- + " type: scalar\n"
- + " \"min\":\n"
- + " type: scalar\n"
- + " \"max-ex\":\n"
- + " type: scalar\n"
- + " \"min-ex\":\n"
- + " type: scalar\n"
- + " \"length\":\n"
- + " type: map\n"
- + " mapping:\n"
- + " \"max\":\n"
- + " type: int\n"
- + " \"min\":\n"
- + " type: int\n"
- + " \"max-ex\":\n"
- + " type: int\n"
- + " \"min-ex\":\n"
- + " type: int\n"
- + " \"ident\":\n"
- + " type: bool\n"
- + " \"unique\":\n"
- + " type: bool\n"
- + " \"sequence\":\n"
- + " name: SEQUENCE\n"
- + " type: seq\n"
- + " sequence:\n"
- + " - type: map\n"
- + " mapping: *main-rule\n"
- + " name: MAIN\n"
- + " #required: yes\n"
- + " \"mapping\":\n"
- + " name: MAPPING\n"
- + " type: map\n"
- + " mapping:\n"
- + " =:\n"
- + " type: map\n"
- + " mapping: *main-rule\n"
- + " name: MAIN\n"
- + " #required: yes\n"
- ;
+ private static final String META_SCHEMA = new StringBuilder().
+ append("").
+ append("name: MAIN\n").
+ append("type: map\n").
+ append("required: yes\n").
+ append("mapping: &main-rule\n").
+ append(" \"name\":\n").
+ append(TYPE_STR).
+ append(" \"desc\":\n").
+ append(TYPE_STR).
+ append(" \"type\":\n").
+ append(TYPE_STR).
+ append(" #required: yes\n").
+ append(" enum:\n").
+ append(" - seq\n").
+ append(" #- sequence\n").
+ append(" #- list\n").
+ append(" - map\n").
+ append(" #- mapping\n").
+ append(" #- hash\n").
+ append(" - str\n").
+ append(" #- string\n").
+ append(" - int\n").
+ append(" #- integer\n").
+ append(" - float\n").
+ append(" - number\n").
+ append(" #- numeric\n").
+ append(" - bool\n").
+ append(" #- boolean\n").
+ append(" - text\n").
+ append(" - date\n").
+ append(" - time\n").
+ append(" - timestamp\n").
+ append(" #- object\n").
+ append(" - any\n").
+ append(" - scalar\n").
+ append(" #- collection\n").
+ append(" \"required\":\n").
+ append(TYPE_BOOL).
+ append(" \"enum\":\n").
+ append(" type: seq\n").
+ append(" sequence:\n").
+ append(" - type: scalar\n").
+ append(" unique: yes\n").
+ append(" \"pattern\":\n").
+ append(TYPE_STR).
+ append(" \"assert\":\n").
+ append(TYPE_STR).
+ append(" pattern: /\\bval\\b/\n").
+ append(" \"range\":\n").
+ append(TYPE_MAP).
+ append(MAPPING1).
+ append(" \"max\":\n").
+ append(TYPE_SCALAR).
+ append(" \"min\":\n").
+ append(TYPE_SCALAR).
+ append(" \"max-ex\":\n").
+ append(TYPE_SCALAR).
+ append(" \"min-ex\":\n").
+ append(TYPE_SCALAR).
+ append(" \"length\":\n").
+ append(TYPE_MAP).
+ append(MAPPING1).
+ append(" \"max\":\n").
+ append(TYPE_INT).
+ append(" \"min\":\n").
+ append(TYPE_INT).
+ append(" \"max-ex\":\n").
+ append(TYPE_INT).
+ append(" \"min-ex\":\n").
+ append(TYPE_INT).
+ append(" \"ident\":\n").
+ append(TYPE_BOOL).
+ append(" \"unique\":\n").
+ append(TYPE_BOOL).
+ append(" \"sequence\":\n").
+ append(" name: SEQUENCE\n").
+ append(" type: seq\n").
+ append(" sequence:\n").
+ append(" - type: map\n").
+ append(" mapping: *main-rule\n").
+ append(" name: MAIN\n").
+ append(" #required: yes\n").
+ append(" \"mapping\":\n").
+ append(" name: MAPPING\n").
+ append(TYPE_MAP).
+ append(MAPPING1).
+ append(" =:\n").
+ append(" type: map\n").
+ append(" mapping: *main-rule\n").
+ append(" name: MAIN\n").
+ append(" #required: yes\n").
+ toString();
/**
*
@@ -140,6 +152,11 @@ public class MetaValidator extends Validator {
private static Validator __instance;
+ private MetaValidator(Map schema) {
+ super(schema);
+ }
+
+
public static Validator instance() {
synchronized (MetaValidator.class) {
if (__instance == null) {
@@ -147,6 +164,7 @@ public class MetaValidator extends Validator {
Map schema = (Map) YamlUtil.load(META_SCHEMA);
__instance = new MetaValidator(schema);
} catch (SyntaxException ex) {
+ errLogger.log(LogLevel.INFO,"MetaValidator","Failed validating schema: {}",ex);
assert false;
}
}
@@ -155,291 +173,224 @@ public class MetaValidator extends Validator {
return __instance;
}
- private MetaValidator(Map schema) {
- super(schema);
- }
-
+ @Override
public void postValidationHook(Object value, Rule rule, ValidationContext theContext) {
if (value == null) {
- return; // realy?
+ return; // really?
}
if (! "MAIN".equals(rule.getName())) {
return;
}
- //
assert value instanceof Map;
Map map = (Map)value;
String type = (String)map.get("type");
if (type == null) {
type = Types.getDefaultType();
}
- //Class type_class = Types.typeClass(type);
- //if (type_class == null) {
- // theContext.addError(validationError("type.unknown", rule, path + "/type", type, null));
- //}
- //
- //String pattern;
- //if ((pattern = (String)map.get("pattern")) != null) {
- if (map.containsKey("pattern")) {
- String pattern = (String)map.get("pattern");
+
+ if (map.containsKey(PATTERN1)) {
+ String pattern = (String)map.get(PATTERN1);
Matcher m = Util.matcher(pattern, "\\A\\/(.*)\\/([mi]?[mi]?)\\z");
String pat = m.find() ? m.group(1) : pattern;
try {
Pattern.compile(pat);
} catch (PatternSyntaxException ex) {
- theContext.addError("pattern.syntaxerr", rule, "pattern", pattern, null);
+ errLogger.log(LogLevel.INFO,"MetaValidator","pattern.syntaxerr: {}",ex);
+ theContext.addError("pattern.syntaxerr", rule, PATTERN1, pattern, null);
}
}
- //
- //List enum_list;
- //if ((enum_list = (List)map.get("enum")) != null) {
if (map.containsKey("enum")) {
- List enum_list = (List)map.get("enum");
+ List enumList = (List)map.get("enum");
if (Types.isCollectionType(type)) {
theContext.addError("enum.notscalar", rule, "enum:", (Object[])null);
} else {
- for (Iterator it = enum_list.iterator(); it.hasNext(); ) {
- Object elem = it.next();
- if (! Types.isCorrectType(elem, type)) {
- theContext.addError("enum.type.unmatch", rule, "enum", elem, new Object[] { Types.typeName(type) });
- }
- }
+ checkEnum(rule, theContext, type, enumList);
}
}
- //
- //String assert_str;
- //if ((assert_str = (String)map.get("assert")) != null) {
if (map.containsKey("assert")) {
errLogger.log(LogLevel.ERROR, this.getClass().getName(), "*** warning: sorry, 'assert:' is not supported in current version of Kwalify-java.");
- //String assert_str = (String)map.get("assert");
- //if (! Util.matches(assert_str, "\\bval\\b")) {
- // theContext.addError(validationError("assert.noval", rule, path + "/assert", assert_str, null);
- //}
- //try {
- // Expression.parse(assert_str);
- //} catch (InvalidExpressionException ex) {
- // theContext.addError(validationError("assert.syntaxerr", rule, path + "/assert", assert_str, null));
- //}
+
}
- //
- //Map range;
- //if ((range = (Map)map.get("range")) != null) {
- if (map.containsKey("range")) {
- Map range = (Map)map.get("range");
- //if (! (range instanceof Map)) {
- // theContext.addError(validtionError("range.notmap", rule, path + "/range", range, null));
- //} else
- if (Types.isCollectionType(type) || type.equals("bool") || type.equals("any")) {
+
+ if (map.containsKey(RANGE)) {
+ Map range = (Map)map.get(RANGE);
+
+ if (Types.isCollectionType(type) || "bool".equals(type) || "any".equals(type)) {
theContext.addError("range.notscalar", rule, "range:", null, null);
} else {
- for (Iterator it = range.keySet().iterator(); it.hasNext(); ) {
- String k = (String)it.next();
- Object v = range.get(k);
- if (! Types.isCorrectType(v, type)) {
- theContext.addError("range.type.unmatch", rule, "range/" + k, v, new Object[] { Types.typeName(type) });
- }
- }
+ rangeCheck(rule, theContext, type, range);
}
- if (range.containsKey("max") && range.containsKey("max-ex")) {
- theContext.addError("range.twomax", rule, "range", null, null);
+ if (range.containsKey("max") && range.containsKey(MAX_EX)) {
+ theContext.addError("range.twomax", rule, RANGE, null, null);
}
- if (range.containsKey("min") && range.containsKey("min-ex")) {
- theContext.addError("range.twomin", rule, "range", null, null);
+ if (range.containsKey("min") && range.containsKey(MIN_EX)) {
+ theContext.addError("range.twomin", rule, RANGE, null, null);
}
Object max = range.get("max");
Object min = range.get("min");
- Object max_ex = range.get("max-ex");
- Object min_ex = range.get("min-ex");
- Object[] args = null;
- //String error_symbol = null;
+ Object maxEx = range.get(MAX_EX);
+ Object minEx = range.get(MIN_EX);
+ Object[] args;
if (max != null) {
if (min != null && Util.compareValues(max, min) < 0) {
args = new Object[] { max, min };
- theContext.addError("range.maxltmin", rule, "range", null, args);
- } else if (min_ex != null && Util.compareValues(max, min_ex) <= 0) {
- args = new Object[] { max, min_ex };
- theContext.addError("range.maxleminex", rule, "range", null, args);
+ theContext.addError("range.maxltmin", rule, RANGE, null, args);
+ } else if (minEx != null && Util.compareValues(max, minEx) <= 0) {
+ args = new Object[] { max, minEx };
+ theContext.addError("range.maxleminex", rule, RANGE, null, args);
}
- } else if (max_ex != null) {
- if (min != null && Util.compareValues(max_ex, min) <= 0) {
- args = new Object[] { max_ex, min };
- theContext.addError("range.maxexlemin", rule, "range", null, args);
- } else if (min_ex != null && Util.compareValues(max_ex, min_ex) <= 0) {
- args = new Object[] { max_ex, min_ex };
- theContext.addError("range.maxexleminex", rule, "range", null, args);
+ } else if (maxEx != null) {
+ if (min != null && Util.compareValues(maxEx, min) <= 0) {
+ args = new Object[] { maxEx, min };
+ theContext.addError("range.maxexlemin", rule, RANGE, null, args);
+ } else if (minEx != null && Util.compareValues(maxEx, minEx) <= 0) {
+ args = new Object[] { maxEx, minEx };
+ theContext.addError("range.maxexleminex", rule, RANGE, null, args);
}
}
}
- //
- //Map length;
- //if ((length = (Map)map.get("length")) != null) {
- if (map.containsKey("length")) {
- Map length = (Map)map.get("length");
- //if (! (length instanceof Map)) {
- // theContext.addError(validtionError("length.notmap", rule, path + "/length", length, null));
- //} else
- if (! (type.equals("str") || type.equals("text"))) {
+ if (map.containsKey(LENGTH)) {
+ Map length = (Map)map.get(LENGTH);
+
+ if (! ("str".equals(type) || "text".equals(type))) {
theContext.addError("length.nottext", rule, "length:", (Object[])null);
}
- //for (Iterator it = length.keySet().iterator(); it.hasNext(); ) {
- // String k = (String)it.next();
- // Object v = length.get(k);
- // if (k == null || ! (k.equals("max") || k.equals("min") || k.equals("max-ex") || k.equals("min-ex"))) {
- // theContext.addError(validationError("length.undefined", rule, path + "/length/" + k, "" + k + ":", null));
- // } else if (! (v instanceof Integer)) {
- // theContext.addError(validationError("length.notint", rule, path + "/length/" + k, v, null));
- // }
- //}
- if (length.containsKey("max") && length.containsKey("max-ex")) {
- theContext.addError("length.twomax", rule, "length", (Object[])null);
+
+ if (length.containsKey("max") && length.containsKey(MAX_EX)) {
+ theContext.addError("length.twomax", rule, LENGTH, (Object[])null);
}
- if (length.containsKey("min") && length.containsKey("min-ex")) {
- theContext.addError("length.twomin", rule, "length", (Object[])null);
+ if (length.containsKey("min") && length.containsKey(MIN_EX)) {
+ theContext.addError("length.twomin", rule, LENGTH, (Object[])null);
}
Integer max = (Integer)length.get("max");
Integer min = (Integer)length.get("min");
- Integer max_ex = (Integer)length.get("max-ex");
- Integer min_ex = (Integer)length.get("min-ex");
- Object[] args = null;
- //String error_symbol = null;
+ Integer maxEx = (Integer)length.get(MAX_EX);
+ Integer minEx = (Integer)length.get(MIN_EX);
+ Object[] args;
if (max != null) {
if (min != null && max.compareTo(min) < 0) {
args = new Object[] { max, min };
- theContext.addError("length.maxltmin", rule, "length", null, args);
- } else if (min_ex != null && max.compareTo(min_ex) <= 0) {
- args = new Object[] { max, min_ex };
- theContext.addError("length.maxleminex", rule, "length", null, args);
+ theContext.addError("length.maxltmin", rule, LENGTH, null, args);
+ } else if (minEx != null && max.compareTo(minEx) <= 0) {
+ args = new Object[] { max, minEx };
+ theContext.addError("length.maxleminex", rule, LENGTH, null, args);
}
- } else if (max_ex != null) {
- if (min != null && max_ex.compareTo(min) <= 0) {
- args = new Object[] { max_ex, min };
- theContext.addError("length.maxexlemin", rule, "length", null, args);
- } else if (min_ex != null && max_ex.compareTo(min_ex) <= 0) {
- args = new Object[] { max_ex, min_ex };
- theContext.addError("length.maxexleminex", rule, "length", null, args);
+ } else if (maxEx != null) {
+ if (min != null && maxEx.compareTo(min) <= 0) {
+ args = new Object[] { maxEx, min };
+ theContext.addError("length.maxexlemin", rule, LENGTH, null, args);
+ } else if (minEx != null && maxEx.compareTo(minEx) <= 0) {
+ args = new Object[] { maxEx, minEx };
+ theContext.addError("length.maxexleminex", rule, LENGTH, null, args);
}
}
}
- //
- //Boolean unique;
- //if ((unique = (Boolean)map.get("unique")) != null) {
+
if (map.containsKey("unique")) {
Boolean unique = (Boolean)map.get("unique");
- if (unique.booleanValue() == true && Types.isCollectionType(type)) {
+ if (unique && Types.isCollectionType(type)) {
theContext.addError("unique.notscalar", rule, "unique:", (Object[])null);
}
if (theContext.getPath().length() == 0) {
theContext.addError("unique.onroot", rule, "", "unique:", null);
}
}
- //
- //Boolean ident;
- //if ((ident = (Boolean)map.get("ident")) != null) {
- if (map.containsKey("ident")) {
- Boolean ident = (Boolean)map.get("ident");
- if (ident.booleanValue() == true && Types.isCollectionType(type)) {
- theContext.addError("ident.notscalar", rule, "ident:", (Object[])null);
+
+ if (map.containsKey(IDENT)) {
+ Boolean ident = (Boolean)map.get(IDENT);
+ if (ident && Types.isCollectionType(type)) {
+ theContext.addError("ident.notscalar", rule, IDENT1, (Object[])null);
}
if (theContext.getPath().length() == 0) {
- theContext.addError("ident.onroot", rule, "/", "ident:", (Object[])null);
+ theContext.addError("ident.onroot", rule, "/", IDENT1, null);
}
}
- //
- //List seq;
- //if ((seq = (List)map.get("sequence")) != null) {
- if (map.containsKey("sequence")) {
- List seq = (List)map.get("sequence");
- //if (! (seq instanceof List)) {
- // theContext.addError(validationError("sequence.notseq", rule, path + "/sequence", seq, null));
- //} else
- if (seq == null || seq.size() == 0) {
- theContext.addError("sequence.noelem", rule, "sequence", seq, null);
+
+ if (map.containsKey(SEQUENCE)) {
+ List seq = (List)map.get(SEQUENCE);
+
+ if (seq == null || seq.isEmpty()) {
+ theContext.addError("sequence.noelem", rule, SEQUENCE, seq, null);
} else if (seq.size() > 1) {
- theContext.addError("sequence.toomany", rule, "sequence", seq, null);
+ theContext.addError("sequence.toomany", rule, SEQUENCE, seq, null);
} else {
Object item = seq.get(0);
assert item instanceof Map;
Map m = (Map)item;
- Boolean ident2 = (Boolean)m.get("ident");
- if (ident2 != null && ident2.booleanValue() == true && ! "map".equals(m.get("type"))) {
- theContext.addError("ident.notmap", null, "sequence/0", "ident:", null);
+ Boolean ident2 = (Boolean)m.get(IDENT);
+ if (ident2 != null && ident2 && ! "map".equals(m.get("type"))) {
+ theContext.addError("ident.notmap", null, "sequence/0", IDENT1, null);
}
}
}
- //
- //Map mapping;
- //if ((mapping = (Map)map.get("mapping")) != null) {
- if (map.containsKey("mapping")) {
- Map mapping = (Map)map.get("mapping");
- //if (mapping != null && ! (mapping instanceof Map)) {
- // theContext.addError(validationError("mapping.notmap", rule, path + "/mapping", mapping, null));
- //} else
- Object default_value = null;
+ if (map.containsKey(MAPPING)) {
+ Map mapping = (Map)map.get(MAPPING);
+
+ Object defaultValue = null;
if (mapping != null && mapping instanceof Defaultable) {
- default_value = ((Defaultable)mapping).getDefault();
+ defaultValue = ((Defaultable)mapping).getDefault();
}
- if (mapping == null || (mapping.size() == 0 && default_value == null)) {
- theContext.addError("mapping.noelem", rule, "mapping", mapping, null);
+ if (mapping == null || (mapping.size() == 0 && defaultValue == null)) {
+ theContext.addError("mapping.noelem", rule, MAPPING, mapping, null);
}
}
- //
- if (type.equals("seq")) {
- if (! map.containsKey("sequence")) {
+ if ("seq".equals(type)) {
+ if (! map.containsKey(SEQUENCE)) {
theContext.addError("seq.nosequence", rule, null, (Object[])null);
}
- //if (map.containsKey("enum")) {
- // theContext.addError(validationError("seq.conflict", rule, path, "enum:", null));
- //}
- if (map.containsKey("pattern")) {
- theContext.addError("seq.conflict", rule, "pattern:", (Object[])null);
+ if (map.containsKey(PATTERN1)) {
+ theContext.addError("seq.conflict", rule, PATTERN, (Object[])null);
}
- if (map.containsKey("mapping")) {
+ if (map.containsKey(MAPPING)) {
theContext.addError("seq.conflict", rule, "mapping:", (Object[])null);
}
- //if (map.containsKey("range")) {
- // theContext.addError(validationError("seq.conflict", rule, path, "range:", null));
- //}
- //if (map.containsKey("length")) {
- // theContext.addError(validationError("seq.conflict", rule, path, "length:", null));
- //}
- } else if (type.equals("map")) {
- if (! map.containsKey("mapping")) {
+ } else if ("map".equals(type)) {
+ if (! map.containsKey(MAPPING)) {
theContext.addError("map.nomapping", rule, null, (Object[])null);
}
- //if (map.containsKey("enum")) {
- // theContext.addError(validationError("map.conflict", rule, path, "enum:", null));
- //}
- if (map.containsKey("pattern")) {
- theContext.addError("map.conflict", rule, "pattern:", (Object[])null);
+ if (map.containsKey(PATTERN1)) {
+ theContext.addError("map.conflict", rule, PATTERN, (Object[])null);
}
- if (map.containsKey("sequence")) {
+ if (map.containsKey(SEQUENCE)) {
theContext.addError("map.conflict", rule, "sequence:", (Object[])null);
}
- //if (map.containsKey("range")) {
- // theContext.addError(validationError("map.conflict", rule, path, "range:", null));
- //}
- //if (map.containsKey("length")) {
- // theContext.addError(validationError("map.conflict", rule, path, "length:", null));
- //}
} else {
- if (map.containsKey("sequence")) {
- theContext.addError("scalar.conflict", rule, "sequence:", (Object[])null);
+ if (map.containsKey(SEQUENCE)) {
+ theContext.addError(SCALAR_CONFLICT, rule, "sequence:", (Object[])null);
}
- if (map.containsKey("mapping")) {
- theContext.addError("scalar.conflict", rule, "mapping:", (Object[])null);
+ if (map.containsKey(MAPPING)) {
+ theContext.addError(SCALAR_CONFLICT, rule, "mapping:", (Object[])null);
}
if (map.containsKey("enum")) {
- if (map.containsKey("range")) {
- theContext.addError("enum.conflict", rule, "range:", (Object[])null);
+ if (map.containsKey(RANGE)) {
+ theContext.addError(ENUM_CONFLICT, rule, "range:", (Object[])null);
}
- if (map.containsKey("length")) {
- theContext.addError("enum.conflict", rule, "length:", (Object[])null);
+ if (map.containsKey(LENGTH)) {
+ theContext.addError(ENUM_CONFLICT, rule, "length:", (Object[])null);
}
- if (map.containsKey("pattern")) {
- theContext.addError("enum.conflict", rule, "pattern:", (Object[])null);
+ if (map.containsKey(PATTERN1)) {
+ theContext.addError(ENUM_CONFLICT, rule, PATTERN, (Object[])null);
}
}
}
}
+ private void checkEnum(Rule rule, ValidationContext theContext, String type, List enumList) {
+ for (Object elem : enumList) {
+ if (!Types.isCorrectType(elem, type)) {
+ theContext.addError("enum.type.unmatch", rule, "enum", elem, new Object[]{Types.typeName(type)});
+ }
+ }
+ }
+
+ private void rangeCheck(Rule rule, ValidationContext theContext, String type, Map range) {
+ for (Object o : range.keySet()) {
+ String k = (String) o;
+ Object v = range.get(k);
+ if (!Types.isCorrectType(v, type)) {
+ theContext.addError("range.type.unmatch", rule, "range/" + k, v, new Object[]{Types.typeName(type)});
+ }
+ }
+ }
+
}
diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/PlainYamlParser.java b/dcaedt_validator/kwalify/src/main/java/kwalify/PlainYamlParser.java
index 5f23a19..dd44403 100644
--- a/dcaedt_validator/kwalify/src/main/java/kwalify/PlainYamlParser.java
+++ b/dcaedt_validator/kwalify/src/main/java/kwalify/PlainYamlParser.java
@@ -85,17 +85,13 @@ public class PlainYamlParser implements Parser {
seq.set(index, value);
}
- Map createMapping() {
- return new DefaultableHashMap();
- }
-
private void setMappingValueWith(Map map, Object key, Object value) {
map.put(key, value);
}
void setMappingDefault(Map map, Object value) {
if (map instanceof Defaultable) {
- ((Defaultable)map).setDefault(value);
+ ((Defaultable)map).setDefault((Rule)value);
}
}
@@ -316,14 +312,14 @@ public class PlainYamlParser implements Parser {
private Map parseFlowMapping(int depth) throws SyntaxException {
assert currentChar() == '{';
- Map map = createMapping();
+ Map map = new DefaultableHashMap();
int ch = getChar();
if (ch != '}') {
Object[] pair = parseFlowMappingItem(depth + 1);
Object key = pair[0];
Object value = pair[1];
setMappingValueWith(map, key, value);
- while ((ch = currentChar()) == ',') {
+ while ((currentChar()) == ',') {
ch = getChar();
if (ch == '}') {
throw syntaxError("mapping item required (or last comman is extra.");
@@ -368,7 +364,8 @@ public class PlainYamlParser implements Parser {
scalar = sb.toString();
} else {
sb.append((char)ch);
- while ((ch = getCurrentCharacter()) >= 0 && ch != ':' && ch != ',' && ch != ']' && ch != '}') {
+ String lookup = ":,]}";
+ while ((ch = getCurrentCharacter()) >= 0 && lookup.indexOf(ch) == -1) {
sb.append((char)ch);
}
scalar = toScalar(sb.toString().trim());
@@ -543,15 +540,7 @@ public class PlainYamlParser implements Parser {
} else if (slen < indent) {
throw syntaxError("invalid indent in block text.");
} else {
- if (n > 0) {
- if (blockChar == '>' && sb.length() > 0) {
- sb.deleteCharAt(sb.length() - 1);
- }
- for (int i = 0; i < n; i++) {
- sb.append('\n');
- }
- n = 0;
- }
+ n = indentHandler(blockChar, sb, n);
str = currentLine.substring(indent);
}
}
@@ -563,6 +552,11 @@ public class PlainYamlParser implements Parser {
if (currentLine != null && Util.matches(currentLine, "^ *#")) {
getLine();
}
+ processIndicator(blockChar, indicator, sep, sb, n);
+ return createScalar(text + sb.toString());
+ }
+
+ private void processIndicator(char blockChar, char indicator, char sep, StringBuilder sb, int n) {
switch (indicator) {
case '+':
handlePlus(blockChar, sb, n);
@@ -575,7 +569,19 @@ public class PlainYamlParser implements Parser {
sb.setCharAt(sb.length() - 1, '\n');
}
}
- return createScalar(text + sb.toString());
+ }
+
+ private int indentHandler(char blockChar, StringBuilder sb, int indent) {
+ if (indent > 0) {
+ if (blockChar == '>' && sb.length() > 0) {
+ sb.deleteCharAt(sb.length() - 1);
+ }
+ for (int i = 0; i < indent; i++) {
+ sb.append('\n');
+ }
+ return 0;
+ }
+ return indent;
}
private void handleMinus(char sep, StringBuilder sb) {
@@ -637,7 +643,7 @@ public class PlainYamlParser implements Parser {
private Map parseMapping(int column, String value) throws SyntaxException {
assert Util.matches(value, REGEXP2);
- Map map = createMapping();
+ Map map = new DefaultableHashMap();
while (true) {
Matcher m = Util.matcher(value, REGEXP2);
if (! m.find()) {
@@ -670,16 +676,23 @@ public class PlainYamlParser implements Parser {
Matcher m2 = Util.matcher(currentLine, REGEXP1);
m2.find();
int indent = m2.group(1).length();
- if (indent < column) {
+ if (checkIndent(column, indent)) {
break;
- } else if (indent > column) {
- throw syntaxError("invalid indent of mapping.");
}
value = m2.group(2);
}
return map;
}
+ private boolean checkIndent(int column, int indent) throws SyntaxException {
+ if (indent < column) {
+ return true;
+ } else if (indent > column) {
+ throw syntaxError("invalid indent of mapping.");
+ }
+ return false;
+ }
+
private Object parseScalar(String value) {
Object data = createScalar(toScalar(value));
@@ -690,38 +703,66 @@ public class PlainYamlParser implements Parser {
private Object toScalar(String value) {
Matcher m;
- if ((m = Util.matcher(value, "^\"(.*)\"([ \t]*#.*$)?")).find()) {
+ m = Util.matcher(value, "^\"(.*)\"([ \t]*#.*$)?");
+ if (m.find()) {
return m.group(1);
- } else if ((m = Util.matcher(value, "^'(.*)'([ \t]*#.*$)?")).find()) {
+ }
+
+ m = Util.matcher(value, "^'(.*)'([ \t]*#.*$)?");
+ if (m.find()) {
return m.group(1);
- } else if ((m = Util.matcher(value, "^(.*\\S)[ \t]*#")).find()) {
+ }
+
+ m = Util.matcher(value, "^(.*\\S)[ \t]*#");
+ if (m.find()) {
value = m.group(1);
}
if (Util.matches(value, "^-?0x\\d+$")) {
return Integer.parseInt(value, 16);
- } else if (Util.matches(value, "^-?0\\d+$")) {
+ }
+
+ if (Util.matches(value, "^-?0\\d+$")) {
return Integer.parseInt(value, 8);
- } else if (Util.matches(value, "^-?\\d+$")) {
+ }
+
+ if (Util.matches(value, "^-?\\d+$")) {
return Integer.parseInt(value, 10);
- } else if (Util.matches(value, "^-?\\d+\\.\\d+$")) {
+ }
+
+ if (Util.matches(value, "^-?\\d+\\.\\d+$")) {
return Double.parseDouble(value);
- } else if (Util.matches(value, "^(true|yes|on)$")) {
+ }
+
+ if (Util.matches(value, "^(true|yes|on)$")) {
return Boolean.TRUE;
- } else if (Util.matches(value, "^(false|no|off)$")) {
+ }
+
+ if (Util.matches(value, "^(false|no|off)$")) {
return Boolean.FALSE;
- } else if (Util.matches(value, "^(null|~)$")){
+ }
+
+ if (Util.matches(value, "^(null|~)$")){
return null;
- } else if (Util.matches(value, "^:(\\w+)$")) {
+ }
+
+ if (Util.matches(value, "^:(\\w+)$")) {
return value;
- } else if ((m = Util.matcher(value, "^(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d)$")).find()) {
+ }
+
+ m = Util.matcher(value, "^(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d)$");
+ if (m.find()) {
int year = Integer.parseInt(m.group(1));
int month = Integer.parseInt(m.group(2));
int day = Integer.parseInt(m.group(3));
Calendar cal = Calendar.getInstance();
+ //noinspection MagicConstant
cal.set(year, month, day, 0, 0, 0);
return cal.getTime();
- } else if ((m = Util.matcher(value, "^(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d)(?:[Tt]|[ \t]+)(\\d\\d?):(\\d\\d):(\\d\\d)(\\.\\d*)?(?:Z|[ \t]*([-+]\\d\\d?)(?::(\\d\\d))?)?$")).find()) {
+ }
+
+ m = Util.matcher(value, "^(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d)(?:[Tt]|[ \t]+)(\\d\\d?):(\\d\\d):(\\d\\d)(\\.\\d*)?(?:Z|[ \t]*([-+]\\d\\d?)(?::(\\d\\d))?)?$");
+ if (m.find()) {
int year = Integer.parseInt(m.group(1));
int month = Integer.parseInt(m.group(2));
int day = Integer.parseInt(m.group(3));
@@ -731,12 +772,13 @@ public class PlainYamlParser implements Parser {
String timezone = "GMT" + m.group(8) + ":" + m.group(9);
Calendar cal = Calendar.getInstance();
+ //noinspection MagicConstant
cal.set(year, month, day, hour, min, sec);
cal.setTimeZone(TimeZone.getTimeZone(timezone));
return cal.getTime();
- } else {
- return value;
}
+
+ return value;
}
}
diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Rule.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Rule.java
index 8dbe0b7..29a0fb5 100644
--- a/dcaedt_validator/kwalify/src/main/java/kwalify/Rule.java
+++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Rule.java
@@ -5,12 +5,7 @@
package kwalify;
import java.io.Serializable;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.IdentityHashMap;
-import java.util.Iterator;
+import java.util.*;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.util.regex.PatternSyntaxException;
@@ -88,32 +83,32 @@ public class Rule implements Serializable{
private static final String UNIQUE3 = "unique: ";
private static final String ENUM2 = "enum:\n";
private static final String RANGE3 = "range: { ";
- private static final String NAME = "name";
- private static final String DESC = "desc";
+ private static final String NAME_CONSTANT = "name";
+ private static final String DESC_CONSTANT = "desc";
private static final String SHORT = "short";
- private static final String REQUIRED = "required";
+ private static final String REQUIRED_CONSTANT = "required";
private static final String TYPE = "type";
- private static final String PATTERN = "pattern";
- private static final String SEQUENCE = "sequence";
+ private static final String PATTERN_CONSTANT = "pattern";
+ private static final String SEQUENCE_CONSTANT = "sequence";
private static final String MAPPING = "mapping";
private static final String ASSERT = "assert";
- private static final String RANGE = "range";
- private static final String LENGTH = "length";
- private static final String IDENT = "ident";
- private static final String UNIQUE = "unique";
+ private static final String RANGE_CONSTANT = "range";
+ private static final String LENGTH_CONSTANT = "length";
+ private static final String IDENT_CONSTANT = "ident";
+ private static final String UNIQUE_CONSTANT = "unique";
private static final String ENUM = "enum:";
private static final String ENUM1 = "/enum";
- public static final String MAX = "max";
- public static final String MIN = "min";
+ private static final String MAX = "max";
+ private static final String MIN = "min";
private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
private Rule parent;
private String name = null;
private String desc = null;
- private String _short = null; //added by jora: only used for map types
+ private String shortValue = null; //added by jora: only used for map types
private boolean required = false;
- private String _type = null;
+ private String typeValue = null;
private Class typeClass = null;
private String pattern = null;
private Pattern patternRegexp = null;
@@ -126,20 +121,20 @@ public class Rule implements Serializable{
private boolean ident = false;
private boolean unique = false;
- private static final int CODE_NAME = NAME.hashCode();
- private static final int CODE_DESC = DESC.hashCode();
+ private static final int CODE_NAME = NAME_CONSTANT.hashCode();
+ private static final int CODE_DESC = DESC_CONSTANT.hashCode();
private static final int CODE_SHORT = SHORT.hashCode();
- private static final int CODE_REQUIRED = REQUIRED.hashCode();
+ private static final int CODE_REQUIRED = REQUIRED_CONSTANT.hashCode();
private static final int CODE_TYPE = TYPE.hashCode();
- private static final int CODE_PATTERN = PATTERN.hashCode();
- private static final int CODE_LENGTH = LENGTH.hashCode();
- private static final int CODE_RANGE = RANGE.hashCode();
+ private static final int CODE_PATTERN = PATTERN_CONSTANT.hashCode();
+ private static final int CODE_LENGTH = LENGTH_CONSTANT.hashCode();
+ private static final int CODE_RANGE = RANGE_CONSTANT.hashCode();
private static final int CODE_ASSERT = ASSERT.hashCode();
- private static final int CODE_IDENT = IDENT.hashCode();
- private static final int CODE_UNIQUE = UNIQUE.hashCode();
+ private static final int CODE_IDENT = IDENT_CONSTANT.hashCode();
+ private static final int CODE_UNIQUE = UNIQUE_CONSTANT.hashCode();
private static final int CODE_ENUM = ENUM.hashCode();
private static final int CODE_MAPPING = MAPPING.hashCode();
- private static final int CODE_SEQUENCE = SEQUENCE.hashCode();
+ private static final int CODE_SEQUENCE = SEQUENCE_CONSTANT.hashCode();
public Rule(Object schema, Rule parent) {
if (schema != null) {
@@ -175,21 +170,23 @@ public class Rule implements Serializable{
public String getName() { return name; }
public void setName(String name) { this.name = name; }
- public String getShort() { return _short; }
- public void setShort(String key) { _short = key; }
+ public String getShort() { return shortValue; }
+ public void setShort(String key) { shortValue = key; }
public boolean isRequired() { return required; }
public void setRequired(boolean required) { this.required = required; }
- public String getType() { return _type; }
- public void setType(String type) { this._type = type; }
+ public String getType() { return typeValue; }
+ public void setType(String type) { this.typeValue = type; }
public String getPattern() { return pattern; }
public void setPattern(String pattern) { this.pattern = pattern; }
public Pattern getPatternRegexp() { return patternRegexp; }
- public List getEnum() { return enumList; }
+ public List getEnum() {
+ return enumList;
+ }
public void setEnum(List enumList) { this.enumList = enumList; }
public List getSequence() { return sequence; }
@@ -245,29 +242,29 @@ public class Rule implements Serializable{
if (code == CODE_TYPE && key.equals(TYPE)) {
// done
- } else if (code == CODE_NAME && key.equals(NAME)) {
+ } else if (code == CODE_NAME && key.equals(NAME_CONSTANT)) {
initNameValue(value);
- } else if (code == CODE_DESC && key.equals(DESC)) {
+ } else if (code == CODE_DESC && key.equals(DESC_CONSTANT)) {
initDescValue(value);
} else if (code == CODE_SHORT && key.equals(SHORT)) {
initShortValue(value, rule, path);
- } else if (code == CODE_REQUIRED && key.equals(REQUIRED)) {
+ } else if (code == CODE_REQUIRED && key.equals(REQUIRED_CONSTANT)) {
initRequiredValue(value, rule, path);
- } else if (code == CODE_PATTERN && key.equals(PATTERN)) {
+ } else if (code == CODE_PATTERN && key.equals(PATTERN_CONSTANT)) {
initPatternValue(value, rule, path);
} else if (code == CODE_ENUM && key.equals(ENUM)) {
initEnumValue(value, rule, path);
} else if (code == CODE_ASSERT && key.equals(ASSERT)) {
initAssertValue(value, rule, path);
- } else if (code == CODE_RANGE && key.equals(RANGE)) {
+ } else if (code == CODE_RANGE && key.equals(RANGE_CONSTANT)) {
initRangeValue(value, rule, path);
- } else if (code == CODE_LENGTH && key.equals(LENGTH)) {
+ } else if (code == CODE_LENGTH && key.equals(LENGTH_CONSTANT)) {
initLengthValue(value, rule, path);
- } else if (code == CODE_IDENT && key.equals(IDENT)) {
+ } else if (code == CODE_IDENT && key.equals(IDENT_CONSTANT)) {
initIdentValue(value, rule, path);
- } else if (code == CODE_UNIQUE && key.equals(UNIQUE)) {
+ } else if (code == CODE_UNIQUE && key.equals(UNIQUE_CONSTANT)) {
initUniqueValue(value, rule, path);
- } else if (code == CODE_SEQUENCE && key.equals(SEQUENCE)) {
+ } else if (code == CODE_SEQUENCE && key.equals(SEQUENCE_CONSTANT)) {
rule = initSequenceValue(value, rule, path, ruleTable);
} else if (code == CODE_MAPPING && key.equals(MAPPING)) {
rule = initMappingValue(value, rule, path, ruleTable);
@@ -282,12 +279,12 @@ public class Rule implements Serializable{
value = Types.getDefaultType();
}
if (! (value instanceof String)) {
- throw schemaError(TYPE_NOTSTR, rule, path + TYPE1, _type, null);
+ throw schemaError(TYPE_NOTSTR, rule, path + TYPE1, typeValue, null);
}
- _type = (String)value;
- typeClass = Types.typeClass(_type);
- if (! Types.isBuiltinType(_type)) {
- throw schemaError(TYPE_UNKNOWN, rule, path + TYPE1, _type, null);
+ typeValue = (String)value;
+ typeClass = Types.typeClass(typeValue);
+ if (! Types.isBuiltinType(typeValue)) {
+ throw schemaError(TYPE_UNKNOWN, rule, path + TYPE1, typeValue, null);
}
}
@@ -305,12 +302,12 @@ public class Rule implements Serializable{
//the short form specification is to be interpreted as key if the type is a map or as an
//index if the target is a sequence (as index 0 actually)
- if (!Types.isCollectionType(_type)) {
+ if (!Types.isCollectionType(typeValue)) {
throw schemaError("range.notcollection", rule, path + "/short", value, null);
}
//we should also verify that it points to a declared key of the mapping .. not really, as it would
//fail the overall grammar
- _short = value.toString();
+ shortValue = value.toString();
}
private void initRequiredValue(Object value, Rule rule, String path) {
@@ -353,14 +350,14 @@ public class Rule implements Serializable{
throw schemaError("enum.notseq", rule, path + ENUM1, value, null);
}
enumList = (List)value;
- if (Types.isCollectionType(_type)) {
+ if (Types.isCollectionType(typeValue)) {
throw schemaError("enum.notscalar", rule, path, ENUM, null);
}
Map elemTable = new HashMap();
for (Iterator it = enumList.iterator(); it.hasNext(); ) {
Object elem = it.next();
if (! Util.isInstanceOf(elem, typeClass)) {
- throw schemaError("enum.type.unmatch", rule, path + ENUM1, elem, new Object[] { Types.typeName(_type) });
+ throw schemaError("enum.type.unmatch", rule, path + ENUM1, elem, new Object[] { Types.typeName(typeValue) });
}
if (elemTable.containsKey(elem)) {
throw schemaError("enum.duplicate", rule, path + ENUM1, elem, null);
@@ -385,7 +382,7 @@ public class Rule implements Serializable{
if (! (value instanceof Map)) {
throw schemaError("range.notmap", rule, path + RANGE1, value, null);
}
- if (Types.isCollectionType(_type) || "bool".equals(_type)) {
+ if (Types.isCollectionType(typeValue) || "bool".equals(typeValue)) {
throw schemaError("range.notscalar", rule, path, RANGE2, null);
}
range = (Map)value;
@@ -394,7 +391,7 @@ public class Rule implements Serializable{
Object rval = range.get(rkey);
if (MAX.equals(rkey) || MIN.equals(rkey) || rkey.equals(MAX_EX) || rkey.equals(MIN_EX)) {
if (! Util.isInstanceOf(rval, typeClass)) {
- String typename = Types.typeName(_type);
+ String typename = Types.typeName(typeValue);
throw schemaError("range.type.unmatch", rule, path + "/range/" + rkey, rval, new Object[] { typename });
}
} else {
@@ -439,7 +436,7 @@ public class Rule implements Serializable{
throw schemaError("length.notmap", rule, path + LENGTH1, value, null);
}
length = (Map)value;
- if (! ("str".equals(_type) || "text".equals(_type))) {
+ if (! ("str".equals(typeValue) || "text".equals(typeValue))) {
throw schemaError("length.nottext", rule, path, LENGTH2, null);
}
for (String k : length.keySet()) {
@@ -490,7 +487,7 @@ public class Rule implements Serializable{
}
ident = (Boolean) value;
required = true;
- if (Types.isCollectionType(_type)) {
+ if (Types.isCollectionType(typeValue)) {
throw schemaError(IDENT_NOTSCALAR, rule, path, IDENT1, null);
}
if (EMPTY_STRING.equals(path)) {
@@ -507,7 +504,7 @@ public class Rule implements Serializable{
throw schemaError(UNIQUE_NOTBOOL, rule, path + UNIQUE2, value, null);
}
unique = (Boolean) value;
- if (Types.isCollectionType(_type)) {
+ if (Types.isCollectionType(typeValue)) {
throw schemaError(UNIQUE_NOTSCALAR, rule, path, UNIQUE1, null);
}
if (path.equals(EMPTY_STRING)) {
@@ -557,6 +554,7 @@ public class Rule implements Serializable{
}
// create hash of rule
_mapping = new DefaultableHashMap();
+
if (defaultValue != null) {
rule = (Rule)ruleTable.get(defaultValue);
if (rule == null) {
@@ -565,15 +563,20 @@ public class Rule implements Serializable{
}
_mapping.setDefault(rule);
}
+
// put rules into _mapping
- Map map = (Map)value;
- for (Iterator it = map.keySet().iterator(); it.hasNext(); ) {
- Object k = it.next();
- Object v = map.get(k); // DefaultableHashMap
+ rule = putRulesIntoMap((Map) value, rule, path, ruleTable);
+ return rule;
+ }
+
+ private Rule putRulesIntoMap(Map value, Rule rule, String path, Map ruleTable) {
+ Map map = value;
+ for (Object k : map.keySet()) {
+ Object v = map.get(k);
if (v == null) {
v = new DefaultableHashMap();
}
- rule = (Rule)ruleTable.get(v);
+ rule = (Rule) ruleTable.get(v);
if (rule == null) {
rule = new Rule(null, this);
rule.init(v, path + MAPPING4 + k, ruleTable);
@@ -589,8 +592,8 @@ public class Rule implements Serializable{
private void checkConfliction(Map hash, Rule rule, String path) {
- if ("seq".equals(_type)) {
- if (! hash.containsKey(SEQUENCE)) {
+ if ("seq".equals(typeValue)) {
+ if (! hash.containsKey(SEQUENCE_CONSTANT)) {
throw schemaError("seq.nosequence", rule, path, null, null);
}
if (enumList != null) {
@@ -608,7 +611,7 @@ public class Rule implements Serializable{
if (length != null) {
throw schemaError(SEQ_CONFLICT, rule, path, LENGTH2, null);
}
- } else if (_type.equals(MAP)) {
+ } else if (typeValue.equals(MAP)) {
if (! hash.containsKey(MAPPING)) {
throw schemaError("map.nomapping", rule, path, null, null);
}
@@ -665,8 +668,8 @@ public class Rule implements Serializable{
if (desc != null) {
sb.append(indent).append(DESC1).append(desc).append("\n");
}
- if (_type != null) {
- sb.append(indent).append(TYPE2).append(_type).append("\n");
+ if (typeValue != null) {
+ sb.append(indent).append(TYPE2).append(typeValue).append("\n");
}
if (required) {
sb.append(indent).append(REQUIRED2).append(required).append("\n");
diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Validator.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Validator.java
index e0f5af0..9129c53 100644
--- a/dcaedt_validator/kwalify/src/main/java/kwalify/Validator.java
+++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Validator.java
@@ -77,6 +77,7 @@ public class Validator {
}
protected void postValidationHook(Object value, Rule rule, ValidationContext context) {
+ // nothing
}
private void _validateRule(Object value, Rule rule, ValidationContext context) {
diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/YamlParser.java b/dcaedt_validator/kwalify/src/main/java/kwalify/YamlParser.java
index b5789d3..7192e5d 100644
--- a/dcaedt_validator/kwalify/src/main/java/kwalify/YamlParser.java
+++ b/dcaedt_validator/kwalify/src/main/java/kwalify/YamlParser.java
@@ -94,7 +94,7 @@ public class YamlParser extends PlainYamlParser {
}
protected Map createMapping() {
- Map map = super.createMapping();
+ Map map = new DefaultableHashMap();
linenumsTable.put(map, new HashMap());
return map;
}