summaryrefslogtreecommitdiffstats
path: root/javatoscachecker/checker/src/main/java/org/onap/tosca
diff options
context:
space:
mode:
Diffstat (limited to 'javatoscachecker/checker/src/main/java/org/onap/tosca')
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CSARRepository.java282
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Canonicals.java200
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Catalog.java459
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CatalogException.java29
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Checker.java3661
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CheckerException.java30
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CommonLocator.java156
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Construct.java34
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Data.java923
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Facet.java49
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Messages.java54
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Paths.java96
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Report.java115
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Repository.java62
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Stage.java33
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Target.java109
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetError.java55
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetLocator.java34
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Workflows.java287
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Catalogs.java49
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Checks.java42
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Validates.java41
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/package-info.java47
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifact.java32
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactType.java42
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactTypes.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifacts.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attribute.java34
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attributes.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/AttributesAssignments.java21
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capabilities.java22
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilitiesAssignments.java22
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capability.java61
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityAssignment.java39
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityType.java36
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityTypes.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraint.java41
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraints.java19
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataType.java40
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataTypes.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/EntrySchema.java27
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Group.java42
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupType.java56
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupTypes.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Groups.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Import.java31
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Imports.java19
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Input.java43
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Inputs.java19
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceType.java58
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceTypes.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Metadata.java20
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Models.java86
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeFilter.java28
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplate.java60
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplates.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeType.java55
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTypes.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operation.java32
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operations.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Outputs.java19
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Parameter.java45
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policies.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policy.java42
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyType.java89
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyTypes.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Properties.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PropertiesAssignments.java22
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Property.java41
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Range.java32
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplate.java45
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplates.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipType.java48
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTypes.java18
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repositories.java19
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repository.java55
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirement.java50
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementAssignment.java55
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirements.java19
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementsAssignments.java19
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ServiceTemplate.java70
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Status.java23
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Substitution.java54
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAMap.java25
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAObject.java63
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAProxy.java179
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCASeq.java24
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterface.java80
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterfaces.java19
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TopologyTemplate.java44
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterface.java47
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterfaces.java19
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/package-info.java30
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/package-info.java142
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/JSP.java659
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Process.java45
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessBuilder.java38
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Processor.java25
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessorException.java43
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/TargetInfo.java32
-rw-r--r--javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/package-info.java23
101 files changed, 10404 insertions, 0 deletions
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CSARRepository.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CSARRepository.java
new file mode 100644
index 0000000..6375185
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CSARRepository.java
@@ -0,0 +1,282 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.InputStream;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.InputStreamReader;
+import java.io.BufferedReader;
+
+import java.net.URI;
+import java.net.URL;
+import java.net.MalformedURLException;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Properties;
+import java.util.Collections;
+
+import java.util.zip.ZipInputStream;
+import java.util.zip.ZipEntry;
+
+import java.util.function.BiFunction;
+
+import org.apache.commons.io.IOUtils;
+
+/**
+ * Handles targets located within the same CSAR file.
+ * This is where the knowledge about the structure of a CSAR file should be placed.
+ * TOSCA 1.0 CSAR archive compliant.
+ */
+public class CSARRepository extends Repository {
+
+
+ private String metaEntryName = "TOSCA-Metadata/TOSCA.meta";
+
+ private byte[] data;
+ private Properties meta = new Properties();
+ private Map<URI, Target> entries = null;
+
+ public CSARRepository(String theName, URI theRoot) throws IOException {
+ super(theName, theRoot);
+ load();
+ }
+
+ private void load() throws IOException {
+ InputStream is = null;
+ try {
+ is = this.getRoot().toURL().openStream();
+ this.data = IOUtils.toByteArray(is);
+ }
+ finally {
+ if (is != null) {
+ try {
+ is.close();
+ }
+ catch(IOException iox) {}
+ }
+ }
+ }
+
+ //one should read the meta-inf/MANIFEST.MF file before deciding that a file is text
+ private Object processData(BiFunction<ZipEntry,InputStream,Object> theProcessor) {
+
+ ZipInputStream archiveInputStream = new ZipInputStream(new ByteArrayInputStream(this.data));
+ Object result = null;
+ try {
+ ZipEntry archiveEntry = null;
+ while ((archiveEntry = archiveInputStream.getNextEntry()) != null) {
+ result = theProcessor.apply(archiveEntry, archiveInputStream);
+ if (null != result)
+ return result;
+ archiveInputStream.closeEntry();
+ }
+ }
+ catch (IOException iox) {
+ log.log(Level.WARNING, "Failed to read archive", iox);
+ }
+ finally {
+ try {
+ archiveInputStream.close();
+ }
+ catch (IOException iox) {
+ }
+ }
+ return result;
+ }
+
+ /* this will be useful when processing new style meta information .. */
+ private Object processMeta() {
+ return
+ processData( (entry,stream) -> {
+ if (!entry.getName().equals(this.metaEntryName))
+ return null;
+
+ return readMeta(stream);
+ });
+ }
+
+ private Boolean readMeta(InputStream theStream) {
+ BufferedReader reader = null;
+ try {
+ reader = new BufferedReader(new InputStreamReader(theStream, "UTF-8"));
+ this.meta.load(reader);
+ return Boolean.TRUE;
+ }
+ catch(IOException iox) {
+ log.log(Level.WARNING, "Failed to read archive meta entry", iox);
+ return Boolean.FALSE;
+ }
+ finally {
+ /*
+ if (reader != null) {
+ try {
+ reader.close();
+ }
+ catch (IOException iox) {
+ }
+ }
+ */
+ //!!Do not close as it is used with processData which does the entry close itself
+ }
+ }
+
+ /*
+ private Boolean readMeta() {
+ BufferedReader reader = null;
+ try {
+ reader = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
+ String line = null;
+ while ((line = reader.readLine()) != null) {
+ //TODO:
+ }
+ return Boolean.TRUE;
+ }
+ catch (IOException iox) {
+ log.log(Level.WARNING, "Failed to read archive meta entry", iox);
+ return Boolean.FALSE;
+ }
+ finally {
+ if (reader != null) {
+ try {
+ reader.close();
+ }
+ catch (IOException iox) {
+ }
+ }
+ //!!Do not close as it is used with processData which does the entry close itself
+ }
+ }
+ */
+
+ private Map<URI,Target> entries() {
+ if (this.entries == null) {
+ this.entries = new HashMap<URI, Target>();
+ processData( (entry,stream) -> {
+ URI entryURI = this.rootURI.resolve(entry.getName());
+ this.entries.put(entryURI, new CsarTarget(entry.getName(), entryURI));
+
+ if (entry.getName().equals(this.metaEntryName))
+ readMeta(stream);
+
+ return null;
+ });
+ }
+ return this.entries;
+ }
+
+ public Target mainTarget() {
+ return entries().get(rootURI.resolve(this.meta.getProperty("Entry-Definitions")));
+ }
+
+ public Iterable<Target> targets() {
+ return entries().values();
+ }
+
+ /** */
+ public Target resolve(URI theURI) {
+ return entries().get(theURI);
+ }
+
+ public TargetLocator getTargetLocator() {
+ return new CSARTargetLocator();
+ }
+
+ /**
+ */
+ private class CSARTargetLocator implements TargetLocator {
+
+ /** */
+ public boolean addSearchPath(URI theURI) {
+ return false;
+ }
+
+ /** */
+ public boolean addSearchPath(String thePath) {
+ return false;
+ }
+
+ /** */
+ public Iterable<URI> searchPaths() {
+ return Collections.singleton(CSARRepository.this.rootURI);
+ }
+
+ /** */
+ public Target resolve(String theName) {
+ Target t = entries().get(CSARRepository.this.rootURI.resolve(theName));
+ if (t == null) {
+ //fallback: check relative to the main target folder
+ t = entries().get(CSARRepository.this.mainTarget().getLocation().resolve(theName));
+ }
+ return t;
+ }
+ }
+
+ /** */
+ private class CsarTarget extends Target {
+
+ private String content;
+
+ private CsarTarget(String theName, URI theUri) {
+ super(theName, theUri);
+ }
+
+ private String content() {
+ if (this.content == null) {
+ this.content = (String)processData( (entry,stream) -> {
+ //go over the entries and find the one with a matching name
+ ByteArrayOutputStream out = null;
+ if (entry.getName().equals(getName())) {
+ byte[] buffer = new byte[4096];
+ int len = 0;
+ out = new ByteArrayOutputStream();
+ try {
+ while ((len = stream.read(buffer)) > 0) {
+ out.write(buffer, 0, len);
+ }
+ log.info(entry.getName() + ": " + out.toString("UTF-8"));
+ }
+ catch (IOException iox) {
+ log.warning("Failed to read entry data: " + iox);
+ return out = null;
+ }
+ }
+ //!!Do not close as it is used with processData which does the entry close itself
+
+ try {
+ return (out != null) ? out.toString("UTF-8") : null;
+ }
+ catch (UnsupportedEncodingException uex) {
+ log.warning("Failed to process entry data as string: " + uex);
+ return "";
+ }
+ });
+ }
+ return this.content;
+ }
+
+ public Reader open() throws IOException {
+ return new StringReader(content());
+ }
+ }
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Canonicals.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Canonicals.java
new file mode 100644
index 0000000..3c12844
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Canonicals.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.util.List;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.TreeMap;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+import kwalify.Validator;
+import kwalify.Rule;
+import kwalify.Types;
+
+import com.google.common.collect.Table;
+import com.google.common.collect.HashBasedTable;
+
+import org.apache.commons.jxpath.JXPathContext;
+import org.apache.commons.jxpath.JXPathException;
+
+import org.onap.tosca.checker.annotations.Validates;
+
+/**
+ * Constructs and collects the canonical form during the validation step (syntax check) based on the short form
+ * indicator from the grammar specification.
+ * The TOSCA spec indicates a 'short' form for most of the constructs but we want checking to be able to work on
+ * the canonical form and not to have to handle the short form explicitly. Additionally tis makes for a simpler
+ * grammar specification.
+ */
+public class Canonicals {
+
+ private Logger log = Logger.getLogger("com.att.research.asc.chcker.Canonicals");
+
+ private Table<Target, String, Object> canonicals = null; //HashBasedTable.create();
+
+ @Validates(rule="", timing=Validates.Timing.pre)
+ public boolean pre_validate_short_form(
+ Object theValue, Rule theRule, Validator.ValidationContext theContext) {
+
+ String hint = theRule.getShort();
+ if (theValue != null &&
+ hint != null) {
+
+ log.finer("Attempting canonical at " + theContext.getPath() + ", rule " + theRule.getName());
+
+ Object canonical = null;
+ //if the canonical form requires a collection
+ if (Types.isCollectionType(theRule.getType())) {
+ //and the actual value isn't one
+ if( !(theValue instanceof Map || theValue instanceof List)) {
+ //used to use singleton map/list here (was good for catching errors)
+ //but there is the possibility of short forms within short forms so
+ //the created canonicals need to accomodate other values.
+ if (Types.isMapType(theRule.getType())) {
+ canonical = new HashMap();
+ ((Map)canonical).put(hint, theValue);
+ }
+ else {
+ //the hint is irrelevant here but we should impose a value when the target is a list
+ canonical = new LinkedList();
+ ((List)canonical).add(theValue);
+ }
+ }
+ else {
+ //we can accomodate:
+ // map to list of map transformation
+ if (!Types.isMapType(theRule.getType()) /* a seq */ &&
+ theValue instanceof Map) {
+ canonical = new LinkedList();
+ ((List)canonical).add(theValue);
+ }
+ else {
+ log.fine("Grammar for rule " + theRule.getName() + " (at " + theContext.getPath() + ") would require unsupported short form transformation: " + theValue.getClass() + " to " + theRule.getType());
+ return false;
+ }
+ }
+
+ int errc = theContext.errorCount();
+ //validateRule(canonical, rule, context);
+ if (errc != theContext.errorCount()) {
+ //this would indicate that the grammar is not well specified
+ log.warning("Short notation for " + theRule.getName() + " through " + hint + " at " + theContext.getPath() + " failed validation");
+ }
+ else {
+ log.finer("Short notation for " + theRule.getName() + " through " + hint + " at " + theContext.getPath() + " passed validation. Canonical form is " + canonical);
+
+ if (this.canonicals != null)
+ this.canonicals.put(
+ ((Checker.TOSCAValidator)theContext.getValidator()).getTarget(),
+ theContext.getPath(), canonical);
+ else {
+ applyCanonical(
+ ((Checker.TOSCAValidator)theContext.getValidator()).getTarget().getTarget(),
+ theContext.getPath(), canonical);
+ }
+ return true;
+ }
+ }
+ else {
+ log.info("Grammar for rule " + theRule.getName() + " (at " + theContext.getPath() + ") would require unsupported short form transformation: " + theValue.getClass() + " to " + theRule.getType());
+ return false;
+ }
+ }
+ return false;
+ }
+
+
+ protected Object applyCanonical(Object theTarget,
+ String thePath,
+ Object theCanonical) {
+
+ //Object canonical = theCanonicals.get(thePath);
+ //if (canonical != null) {
+ String path = patchWhitespaces(
+ patchIndexes(thePath));
+ try {
+ JXPathContext.newContext(theTarget).setValue(path, theCanonical);
+ log.finer("Applied canonical form at: " + path);
+
+ // if (doRemove)
+ // theCanonicals.remove(thePath);
+ }
+ catch (JXPathException jxpx) {
+ log.log(Level.WARNING, "Failed to apply canonical to " + theTarget, jxpx);
+ }
+ //}
+ return theCanonical;
+ }
+
+ public void applyCanonicals(Target theTarget) {
+
+ if (this.canonicals == null) {
+ return;
+ }
+
+ Map<String, Object> targetCanonicals = this.canonicals.row(theTarget);
+ if (targetCanonicals != null) {
+ log.finest("applying canonicals: " + targetCanonicals);
+ for(Map.Entry<String, Object> canonicalEntry: targetCanonicals.entrySet()) {
+ applyCanonical(theTarget.getTarget(), canonicalEntry.getKey(), canonicalEntry.getValue());
+ }
+ }
+ }
+
+ /** Given that we remembered the canonical forms that were needed during
+ * validation to replace the short forms we can apply them to the target
+ * yaml.
+ * We take advantage here of the fact that the context path maintained
+ * during validation is compatible with (j)xpath, with the exception of
+ * sequence/array indentation ..
+ */
+
+ private Pattern indexPattern = Pattern.compile("/\\p{Digit}+"),
+ spacePattern = Pattern.compile("\\s");
+
+ private String patchIndexes(CharSequence thePath) {
+ Matcher m = indexPattern.matcher(thePath);
+ StringBuffer path = new StringBuffer();
+ while (m.find()) {
+ String index = m.group();
+ index = "[" + (Integer.valueOf(index.substring(1)).intValue() + 1) + "]";
+ m.appendReplacement(path, m.quoteReplacement(index));
+ }
+ m.appendTail(path);
+ return path.toString();
+ }
+
+
+ private String patchWhitespaces(String thePath) {
+ String[] elems = thePath.split("/");
+ StringBuffer path = new StringBuffer();
+ for (int i = 0; i < elems.length; i++) {
+ if (spacePattern.matcher(elems[i]).find())
+ path.append("[@name='")
+ .append(elems[i])
+ .append("']");
+ else
+ path.append("/")
+ .append(elems[i]);
+ }
+ return path.toString();
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Catalog.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Catalog.java
new file mode 100644
index 0000000..35d0d1c
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Catalog.java
@@ -0,0 +1,459 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.util.Iterator;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.Set;
+import java.util.Map;
+import java.util.List;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import java.util.stream.Collectors;
+
+import java.net.URI;
+
+import com.google.common.base.Predicate;
+import com.google.common.base.Function;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.Table;
+import com.google.common.collect.HashBasedTable;
+
+/*
+ * Oddball: tracking inputs as data templates could be seen as rather
+ * odd but we see them as instances of data types, in the same way node
+ * templates are instances of node types.
+ */
+public class Catalog {
+
+ /* Type hierarchies are stored as maps from a type name to its definition
+ * Not the best but easy to follow hierarchies towards their root ..
+ */
+ private EnumMap<Construct, Map<String,Map>> types =
+ new EnumMap<Construct, Map<String,Map>>(Construct.class);
+ /* track templates: we track templates (tye instances) first per target then per contruct.
+ * This allows us to share the catalog among multiple templates sharign the same type set
+ */
+ private Map<Target, EnumMap<Construct, Map<String,Map>>> templates =
+ new HashMap<Target, EnumMap<Construct, Map<String,Map>>>();
+
+ private Catalog parent;
+
+ public Catalog(Catalog theParent) {
+ this.parent = theParent;
+ /* there are no requirement types, they are the same as capability types */
+ types.put(Construct.Data, new LinkedHashMap<String, Map>());
+ types.put(Construct.Capability, new LinkedHashMap<String, Map>());
+ types.put(Construct.Relationship, new LinkedHashMap<String, Map>());
+ types.put(Construct.Artifact, new LinkedHashMap<String, Map>());
+ types.put(Construct.Interface, new LinkedHashMap<String, Map>());
+ types.put(Construct.Node, new LinkedHashMap<String, Map>());
+ types.put(Construct.Group, new LinkedHashMap<String, Map>());
+ types.put(Construct.Policy, new LinkedHashMap<String, Map>());
+
+ }
+
+ public Catalog() {
+ this(null);
+ }
+
+ public boolean addType(Construct theConstruct, String theName, Map theDef) {
+ if (hasType(theConstruct, theName)) {
+ return false;
+ }
+ getConstructTypes(theConstruct).put(theName, theDef);
+ return true;
+ }
+
+ public Map getTypeDefinition(Construct theConstruct, String theName) {
+ Map<String, Map> constructTypes = getConstructTypes(theConstruct);
+ Map typeDef = constructTypes.get(theName);
+ if (typeDef == null && this.parent != null) {
+ return this.parent.getTypeDefinition(theConstruct, theName);
+ }
+ return typeDef;
+ }
+
+ public boolean hasType(Construct theConstruct, String theName) {
+ Map<String, Map> constructTypes = getConstructTypes(theConstruct);
+ boolean res = constructTypes.containsKey(theName);
+ if (!res && this.parent != null) {
+ res = this.parent.hasType(theConstruct, theName);
+ }
+ return res;
+ }
+
+ public Map<String, Map> getConstructTypes(Construct theConstruct) {
+ Map<String, Map> constructTypes = this.types.get(theConstruct);
+ if (null == constructTypes) {
+ throw new RuntimeException("Something worse is cooking here!",
+ new CatalogException("No types for construct " + theConstruct));
+ }
+ return constructTypes;
+ }
+
+ protected Iterator<Map.Entry<String,Map>>
+ typesIterator(Construct theConstruct) {
+ List<Map.Entry<String,Map>> constructTypes =
+ new ArrayList<Map.Entry<String,Map>>(
+ this.types.get(theConstruct).entrySet());
+ Collections.reverse(constructTypes);
+ return (this.parent == null)
+ ? constructTypes.iterator()
+ : Iterators.concat(constructTypes.iterator(),
+ this.parent.typesIterator(theConstruct));
+ }
+
+ /* this will iterate through the type hierarchy for the given type, included.
+ */
+ public Iterator<Map.Entry<String,Map>>
+ hierarchy(Construct theConstruct, final String theName) {
+ return Iterators.filter(typesIterator(theConstruct),
+ new Predicate<Map.Entry<String,Map>>() {
+ Object next = theName;
+ public boolean apply(Map.Entry<String,Map> theEntry) {
+ if (next != null && next.equals(theEntry.getKey())) {
+ next = theEntry.getValue().get("derived_from");
+ return true;
+ }
+ else
+ return false;
+ }
+ });
+ }
+
+ public boolean isDerivedFrom(Construct theConstruct, String theType, String theBaseType) {
+
+ Iterator<Map.Entry<String,Map>> hierachyIterator =
+ hierarchy(theConstruct, theType);
+ while (hierachyIterator.hasNext()) {
+ Map.Entry<String,Map> typeDef = hierachyIterator.next();
+
+ if (typeDef.getKey().equals(theBaseType)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /* We go over the type hierarchy and retain only an iterator over the
+ * elements of the given facet for each type in the hierarchy.
+ * We concatenate these iterators and filter out duplicates.
+ * TODO: cannot just filter out duplicates - a redefinition can refine the one in the base construct so we
+ * should merge them!
+ */
+ public Iterator<Map.Entry> facets(Construct theConstruct,
+ final Facet theFacet,
+ final String theName) {
+ return
+ Iterators.filter(
+ Iterators.concat(
+ Iterators.transform(
+ hierarchy(theConstruct, theName),
+ new Function<Map.Entry<String,Map>, Iterator<Map.Entry>>() {
+ public Iterator<Map.Entry> apply(Map.Entry<String,Map> theEntry) {
+ Map m = (Map)theEntry.getValue().get(theFacet.name());
+ return m == null
+ ? Collections.emptyIterator()
+ : m.entrySet().iterator();
+ }
+ }
+ )
+ ),
+ new Predicate<Map.Entry>() {
+ Set insts = new HashSet();
+ public boolean apply(Map.Entry theEntry) {
+ return !insts.contains(theEntry.getKey());
+ }
+ }
+ );
+ }
+
+ //no need to specify a construct, only nodes can have requirements
+ public Iterator<Map.Entry> requirements(final String theName) {
+ return
+ Iterators.concat(
+ Iterators.transform(
+ hierarchy(Construct.Node, theName),
+ new Function<Map.Entry<String,Map>, Iterator<Map.Entry>>() {
+ public Iterator<Map.Entry> apply(Map.Entry<String,Map> theEntry) {
+ List<Map> l = (List<Map>)theEntry.getValue().get("requirements");
+ return l == null
+ ? Collections.emptyIterator()
+ : Iterators.concat(
+ Iterators.transform(
+ l.iterator(),
+ new Function<Map, Iterator<Map.Entry>> () {
+ public Iterator<Map.Entry> apply(Map theEntry) {
+ return theEntry.entrySet().iterator();
+ }
+ }
+ )
+ );
+ }
+ }
+ )
+ );
+ }
+
+ /* Example: find the definition of property 'port' of the node type
+ * tosca.nodes.Database (properties being a facet of the node construct)
+ *
+ * Note: the definition of a facet is cumulative, i.e. more specialized
+ * definitions contribute (by overwriting) to the
+ */
+ public Map getFacetDefinition(Construct theConstruct,
+ String theConstructTypeName,
+ Facet theFacet,
+ String theName) {
+ Map def = null;
+ Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName);
+ while (ti.hasNext()) {
+ //this is where requirements would yield a List ..
+ Map<String,Map> fset = (Map<String,Map>)
+ //theFacet.iterator(
+ ti.next().getValue().get(theFacet.name());
+ if (fset != null) {
+ def = def == null ? fset.get(theName)
+ : mergeDefinitions(def, fset.get(theName));
+ }
+ }
+ return def;
+ }
+
+ public Map getRequirementDefinition(Construct theConstruct,
+ String theConstructTypeName,
+ String theName) {
+ Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName);
+ while (ti.hasNext()) {
+ //this is where requirements yield a List ..
+ List<Map> reqs = (List<Map>)
+ ti.next().getValue().get("requirements");
+ for (Map req: reqs) {
+ Map.Entry reqe = (Map.Entry)req.entrySet().iterator().next();
+ if (theName.equals(reqe.getKey())) {
+ return (Map)reqe.getValue();
+ }
+ }
+ }
+ return null;
+ }
+
+ /* */
+ private EnumMap<Construct,Map<String,Map>> getTemplates(Target theTarget) {
+ EnumMap<Construct, Map<String,Map>> targetTemplates = templates.get(theTarget);
+ if (targetTemplates == null) {
+ targetTemplates = new EnumMap<Construct,Map<String,Map>>(Construct.class);
+ targetTemplates.put(Construct.Data, new LinkedHashMap<String, Map>());
+ targetTemplates.put(Construct.Relationship, new LinkedHashMap<String, Map>());
+ targetTemplates.put(Construct.Node, new LinkedHashMap<String, Map>());
+ targetTemplates.put(Construct.Group, new LinkedHashMap<String, Map>());
+ targetTemplates.put(Construct.Policy, new LinkedHashMap<String, Map>());
+
+ templates.put(theTarget, targetTemplates);
+ }
+ return targetTemplates;
+ }
+
+ public Map<String,Map> getTargetTemplates(Target theTarget, Construct theConstruct) {
+ return getTemplates(theTarget).get(theConstruct);
+ }
+
+ public void addTemplate(Target theTarget, Construct theConstruct, String theName, Map theDef)
+ throws CatalogException {
+ Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
+ if (null == constructTemplates) {
+ throw new CatalogException("No such thing as " + theConstruct + " templates");
+ }
+ if (constructTemplates.containsKey(theName)) {
+ throw new CatalogException(theConstruct + " template '" + theName + "' re-declaration");
+ }
+ constructTemplates.put(theName, theDef);
+ }
+
+ public boolean hasTemplate(Target theTarget, Construct theConstruct, String theName) {
+ Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
+ return constructTemplates != null &&
+ constructTemplates.containsKey(theName);
+ }
+
+ public Map getTemplate(Target theTarget, Construct theConstruct, String theName) {
+ Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct);
+ if (constructTemplates != null)
+ return constructTemplates.get(theName);
+ else
+ return null;
+ }
+
+ public static Map mergeDefinitions(Map theAggregate, Map theIncrement) {
+ if (theIncrement == null)
+ return theAggregate;
+
+ for(Map.Entry e: (Set<Map.Entry>)theIncrement.entrySet()) {
+ theAggregate.putIfAbsent(e.getKey(), e.getValue());
+ }
+ return theAggregate;
+ }
+
+ /* tracks imports, i.e.targets */
+ private LinkedHashMap<URI, Target> targets =
+ new LinkedHashMap<URI, Target>();
+ /* tracks dependencies between targets, i.e. the 'adjency' matrix defined by
+ * the 'import' relationship */
+ private Table<Target,Target,Boolean> imports = HashBasedTable.create();
+
+
+ /*
+ * theParent contains an 'include/import' statement pointing to the Target
+ */
+ public boolean addTarget(Target theTarget, Target theParent) {
+ boolean cataloged = hasTarget(theTarget.getLocation());
+
+ if(!cataloged) {
+ targets.put(theTarget.getLocation(), theTarget);
+ }
+
+ if (theParent != null) {
+ imports.put(theParent, theTarget, Boolean.TRUE);
+ }
+
+ return !cataloged;
+ }
+
+ public boolean hasTarget(URI theLocation) {
+ return this.targets.containsKey(theLocation) ? true
+ : this.parent != null ? this.parent.hasTarget(theLocation)
+ : false;
+ }
+
+ public Target getTarget(URI theLocation) {
+ Target t = this.targets.get(theLocation);
+ if (t == null && this.parent != null) {
+ t = this.parent.getTarget(theLocation);
+ }
+ return t;
+ }
+
+ public Collection<Target> targets() {
+ return targets.values();
+ }
+
+ /* Targets that no other targets depend on */
+ public Collection<Target> topTargets() {
+ return targets.values()
+ .stream()
+ .filter(t -> !imports.containsColumn(t))
+ .collect(Collectors.toList());
+
+ }
+
+ public String importString(Target theTarget) {
+ return importString(theTarget, " ");
+ }
+
+ private String importString(Target theTarget, String thePrefix) {
+ StringBuilder sb = new StringBuilder("");
+ Map<Target,Boolean> parents = imports.column(theTarget);
+ if (parents != null) {
+ for (Target p: parents.keySet()) {
+ sb.append(thePrefix)
+ .append("from ")
+ .append(p.getLocation())
+ .append("\n")
+ .append(importString(p, thePrefix + " "));
+ }
+ //we only keep the positive relationships
+ }
+ return sb.toString();
+ }
+
+ /* */
+ private class TargetComparator implements Comparator<Target> {
+
+ /* @return 1 if there is a dependency path from TargetOne to TargetTwo, -1 otherwise */
+ public int compare(Target theTargetOne, Target theTargetTwo) {
+ if (hasPath(theTargetTwo, theTargetOne))
+ return -1;
+
+ if (hasPath(theTargetOne, theTargetTwo))
+ return 1;
+
+ return 0;
+ }
+
+ public boolean hasPath(Target theStart, Target theEnd) {
+ Map<Target,Boolean> deps = imports.row(theStart);
+ if (deps.containsKey(theEnd))
+ return true;
+ for (Target dep: deps.keySet()) {
+ if (hasPath(dep, theEnd))
+ return true;
+ }
+ return false;
+ }
+ }
+
+ public Collection<Target> sortedTargets() {
+ List keys = new ArrayList(this.targets.values());
+ Collections.sort(keys, new TargetComparator());
+ return keys;
+ }
+
+ public static void main(String[] theArgs) throws Exception {
+
+ Catalog cat = new Catalog();
+
+ Target a = new Target("a", new URI("a")),
+ b = new Target("b", new URI("b")),
+ c = new Target("c", new URI("c")),
+ d = new Target("d", new URI("d"));
+
+ cat.addTarget(a, null);
+ cat.addTarget(b, null);
+ cat.addTarget(c, null);
+ cat.addTarget(d, null);
+
+ cat.addTarget(b, c);
+ cat.addTarget(a, c);
+ cat.addTarget(c, d);
+ cat.addTarget(a, b);
+
+ //System.out.println(cat.importString(c));
+ for (Target t: cat.sortedTargets())
+ System.out.println(t);
+
+ Catalog root = new Catalog();
+ root.addType(Construct.Node, "_a", Collections.emptyMap());
+ root.addType(Construct.Node, "__a", Collections.singletonMap("derived_from", "_a"));
+ root.addType(Construct.Node, "___a", Collections.singletonMap("derived_from", "_a"));
+
+ Catalog base = new Catalog(root);
+ base.addType(Construct.Node, "_b", Collections.singletonMap("derived_from", "__a"));
+ base.addType(Construct.Node, "__b", Collections.singletonMap("derived_from", "_b"));
+ base.addType(Construct.Node, "__b_", Collections.singletonMap("derived_from", "_a"));
+
+ if (theArgs.length > 0) {
+ Iterator<Map.Entry<String, Map>> ti =
+ base.hierarchy(Construct.Node, theArgs[0]);
+ while (ti.hasNext()) {
+ System.out.println("> " + ti.next().getKey());
+ }
+ }
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CatalogException.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CatalogException.java
new file mode 100644
index 0000000..1f6b602
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CatalogException.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+
+/**
+ *
+ */
+public class CatalogException extends Exception {
+
+ public CatalogException(String theMsg, Throwable theCause) {
+ super(theMsg, theCause);
+ }
+
+ public CatalogException(String theMsg) {
+ super(theMsg);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Checker.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Checker.java
new file mode 100644
index 0000000..9991c86
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Checker.java
@@ -0,0 +1,3661 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.lang.reflect.Method;
+import java.lang.reflect.InvocationTargetException;
+
+import java.io.File;
+import java.io.InputStream;
+import java.io.FileInputStream;
+import java.io.Reader;
+import java.io.FileReader;
+import java.io.InputStreamReader;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.FileNotFoundException;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.MalformedURLException;
+
+import java.util.HashMap;
+import java.util.TreeMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.ListIterator;
+import java.util.Map;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.LinkedList;
+import java.util.ArrayList;
+import java.util.Set;
+import java.util.Properties;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Arrays;
+import java.util.MissingResourceException;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+import java.util.stream.Collectors;
+
+import javax.naming.CompositeName;
+import javax.naming.InvalidNameException;
+
+import org.yaml.snakeyaml.Yaml;
+
+import com.google.common.collect.Maps;
+import com.google.common.collect.MapDifference;
+import com.google.common.reflect.Invokable;
+
+import com.google.common.io.CharStreams;
+
+import com.google.common.collect.Table;
+import com.google.common.collect.HashBasedTable;
+
+import kwalify.YamlParser;
+import kwalify.Validator;
+import kwalify.Rule;
+import kwalify.Types;
+import kwalify.SchemaException;
+import kwalify.SyntaxException;
+import kwalify.ValidationException;
+
+import org.apache.commons.jxpath.JXPathContext;
+import org.apache.commons.jxpath.JXPathException;
+
+import org.apache.commons.lang.reflect.ConstructorUtils;
+
+import org.reflections.Reflections;
+import org.reflections.util.FilterBuilder;
+import org.reflections.util.ConfigurationBuilder;
+import org.reflections.scanners.TypeAnnotationsScanner;
+import org.reflections.scanners.SubTypesScanner;
+import org.reflections.scanners.MethodAnnotationsScanner;
+import org.reflections.adapters.JavaReflectionAdapter;
+
+import org.onap.tosca.checker.annotations.Checks;
+import org.onap.tosca.checker.annotations.Catalogs;
+import org.onap.tosca.checker.annotations.Validates;
+
+import static org.onap.tosca.checker.Messages.Message;
+
+
+/*
+ * To consider: model consistency checking happens now along with validation
+ * (is implemented as part of the validation hooks). It might be better to
+ * separate the 2 stages and perform all the consistency checking once
+ * validation is completed.
+ */
+public class Checker {
+
+
+ public static void main(String[] theArgs) {
+
+ if (theArgs.length == 0) {
+ System.err.println("checker resource_to_validate [processor]*");
+ return;
+ }
+
+ try {
+ Catalog cat = Checker.check(new File(theArgs[0]));
+
+ for (Target t: cat.targets()) {
+ System.err.println(t.getLocation() + "\n" + cat.importString(t) + "\n" + t.getReport());
+ }
+
+ for (Target t: cat.sortedTargets()) {
+ System.out.println(t);
+ }
+
+ }
+ catch (Exception x) {
+ x.printStackTrace();
+ }
+ }
+
+
+ private Target target = null; //what we're validating at the moment
+ private Map<String, Target> grammars = new HashMap<String, Target>(); //grammars for the different tosca versions
+
+ private CheckerConfiguration config = new CheckerConfiguration();
+ private Catalog catalog;
+ private TargetLocator locator = new CommonLocator();
+
+ private Table<String, Method, Object> handlers = HashBasedTable.create();
+ private Messages messages;
+ private Logger log = Logger.getLogger(Checker.class.getName());
+
+ private static String[] EMPTY_STRING_ARRAY = new String[0];
+
+
+ public Checker() throws CheckerException {
+ loadGrammars();
+ loadAnnotations();
+ messages = new Messages();
+ }
+
+ /* Need a proper way to indicate where the grammars are and how they should be identified
+ */
+ private final String[] grammarFiles = new String[] {"tosca/tosca_simple_yaml_1_0.grammar",
+ "tosca/tosca_simple_yaml_1_1.grammar"};
+ private void loadGrammars() throws CheckerException {
+
+ for (String grammarFile: grammarFiles) {
+ Target grammarTarget = this.locator.resolve(grammarFile);
+ if (grammarTarget == null) {
+ log.warning("Failed to locate grammar " + grammarFile);
+ continue;
+ }
+
+ parseTarget(grammarTarget);
+ if (grammarTarget.getReport().hasErrors()) {
+ log.warning("Invalid grammar " + grammarFile + ": " + grammarTarget.getReport().toString());
+ continue;
+ }
+
+ List versions = null;
+ try {
+ versions = (List)
+ ((Map)
+ ((Map)
+ ((Map)grammarTarget.getTarget())
+ .get("mapping"))
+ .get("tosca_definitions_version"))
+ .get("enum");
+ }
+ catch (Exception x) {
+ log.warning("Invalid grammar " + grammarFile + ": cannot locate tosca_definitions_versions");
+ }
+ if (versions == null || versions.isEmpty()) {
+ log.warning("Invalid grammar " + grammarFile + ": no tosca_definitions_versions specified");
+ continue;
+ }
+
+ for (Object version: versions) {
+ this.grammars.put(version.toString(), grammarTarget);
+ }
+ }
+
+ log.finer("Loaded grammars: " + this.grammars);
+ }
+
+ private void loadAnnotations() throws CheckerException {
+
+ Reflections reflections = new Reflections(
+ new ConfigurationBuilder()
+ .forPackages("org.onap.tosca")
+ .filterInputsBy(new FilterBuilder()
+ .include(".*\\.class")
+ )
+ .setScanners(new TypeAnnotationsScanner(),
+ new SubTypesScanner(),
+ new MethodAnnotationsScanner())
+ .setExpandSuperTypes(false)
+ //.setMetadataAdapter(new JavaReflectionAdapter())
+ );
+
+ Map<Class, Object> refs = new HashMap<Class, Object>();
+ Set<Method> methods = null;
+
+ //very similar but annotatons cannot be handled in a more 'generic' manner
+
+ methods = reflections.getMethodsAnnotatedWith(Checks.class);
+ for (Method method: methods) {
+ handlers.put("checks:" + method.getAnnotation(Checks.class).path(),
+ method,
+ refs.computeIfAbsent(method.getDeclaringClass(), type -> newInstance(type)));
+ }
+
+ methods = reflections.getMethodsAnnotatedWith(Catalogs.class);
+ for (Method method: methods) {
+ handlers.put("catalogs:" + method.getAnnotation(Catalogs.class).path(),
+ method,
+ refs.computeIfAbsent(method.getDeclaringClass(), type -> newInstance(type)));
+ }
+
+ methods = reflections.getMethodsAnnotatedWith(Validates.class);
+ for (Method method: methods) {
+ Validates annotation = method.getAnnotation(Validates.class);
+ handlers.put(annotation.timing() + "-validates:" + annotation.rule(),
+ method,
+ refs.computeIfAbsent(method.getDeclaringClass(), type -> newInstance(type)));
+ }
+ }
+
+
+ private Object newInstance(Class theType) {
+ try {
+ return (getClass() == theType) ? this
+ : theType.newInstance();
+ }
+ catch(Exception x) {
+ throw new RuntimeException(x);
+ }
+ }
+
+ /*
+ * Lookup one of the handlers, by handler type
+ */
+ public <T> T getHandler(Class<T> theType) {
+ return (T)handlers.values()
+ .stream()
+ .filter(h -> theType.isInstance(h))
+ .findFirst()
+ .orElse(null);
+ }
+
+ public CheckerConfiguration configuration() {
+ return this.config;
+ }
+
+ public void setTargetLocator(TargetLocator theLocator) {
+ this.locator = theLocator;
+ }
+
+ public TargetLocator getTargetLocator() {
+ return this.locator;
+ }
+
+ public Collection<Target> targets() {
+ if (this.catalog == null)
+ throw new IllegalStateException("targets are only available after check");
+
+ return this.catalog.targets();
+ }
+
+ public Catalog catalog() {
+ return this.catalog;
+ }
+
+ /* a facility for handling all files in a target directory ..
+ */
+ public static Catalog check(File theSource)
+ throws CheckerException {
+
+ Catalog catalog = new Catalog(commonsCatalog());
+ Checker checker = new Checker();
+ try {
+ if (theSource.isDirectory()) {
+ for (File f: theSource.listFiles()) {
+ if (f.isFile()) {
+ checker.check(new Target(theSource.getCanonicalPath(), f.toURI().normalize()), catalog);
+ }
+ }
+ }
+ else {
+ checker.check(new Target(theSource.getCanonicalPath(), theSource.toURI().normalize()), catalog);
+ }
+ }
+ catch (IOException iox) {
+ throw new CheckerException("Failed to initialize target", iox);
+ }
+
+ return catalog;
+ }
+
+ /**
+ * Main checking process entry point. In this case the source is passed to the locator in order to
+ * obtain a {@link org.onap.tosca.checker.Target target}, and then all other processing stages are performed.
+ * @param String the string representation of the uri pointing to the document/template to be processed
+ * @throws CheckerException for any checker encountered error
+ */
+ public void check(String theSource)
+ throws CheckerException {
+ check(theSource, buildCatalog());
+ }
+
+ /**
+ * Main checking entry point using a pre-computed Catalog. Same as {@link org.onap.tosca.checker.Chacker#check(String) check} except that the given catalog information is available. i.e. all types available in the given catalog types are
+available and the available targets won't be re-processed.
+ */
+ public void check(String theSource, Catalog theCatalog)
+ throws CheckerException {
+ Target tgt =
+ this.locator.resolve(theSource);
+ if (null == tgt) {
+ throw new CheckerException("Unable to locate the target " + theSource);
+ }
+
+ check(tgt, theCatalog);
+ }
+
+ /**
+ * Starts the processing after the localization phase, i.e. the Target is obtained/constructed outside the checker.
+ * @param Target the Target representation of the document/template to be processed. The actual target content (yaml
+ * character string) is obtained by calling {@link org.onap.tosca.checker.Target#open() open} on the target
+ * @throws CheckerException for any checker encountered error
+ */
+ public void check(Target theTarget) throws CheckerException {
+ check(theTarget, buildCatalog());
+ }
+
+ /**
+ *
+ * @param Target the Target representation of the document/template to be processed. The actual target content (yaml
+ * character string) is obtained by calling {@link org.onap.tosca.checker.Target#open() open} on the target
+ * @param theCatalog a default catalog providing common construct definitions
+ * @throws CheckerException for any checker encountered error
+ */
+ public void check(Target theTarget, Catalog theCatalog) throws CheckerException {
+
+ this.catalog = theCatalog;
+ this.locator.addSearchPath(theTarget.getLocation());
+
+ if (this.catalog.addTarget(theTarget, null)) {
+ List<Target> targets = parseTarget(theTarget);
+ if (theTarget.getReport().hasErrors())
+ return;
+ for (Target target: targets) {
+ this.catalog.addTarget(target, null);
+ //what about this -> this.catalog.addTarget(target, theTarget);
+ if (!validateTarget(target).getReport().hasErrors()) {
+ checkTarget(target);
+ }
+ }
+ }
+ }
+
+ /**
+ * Starts the processing after the {@link org.onap.tosca.checker.Staget#parsed parsed} stage. As such the Target must
+ * have been located (content is available) and {@link org.onap.tosca.checker.Staget#parsed parsed} (the parsed form
+ * is stored within the Target, {@see org.onap.tosca.checker.Target#getTarget getTarget}).
+ * The target will be validated (grammar) and chcked (consistency). While the checker uses snakeyaml to parse
+ * a yaml document using this entry point allows one to use any other yaml parser for a long as it produces a
+ * compatible representation (java primitive types object representations, Strings, Maps and Lists).
+ *
+ * @param theTarget the processing subject, located and parsed.
+ * @throws CheckerException for any checker encountered error
+ */
+ public void validate(Target theTarget) throws CheckerException {
+ validate(theTarget, buildCatalog());
+ }
+
+ /**
+ *
+ * @param theTarget the processing subject, located and parsed.
+ * @param theCatalog a default catalog providing common construct definitions
+ * @throws CheckerException
+ */
+ public void validate(Target theTarget, Catalog theCatalog) throws CheckerException {
+ this.catalog = theCatalog;
+ this.locator.addSearchPath(theTarget.getLocation());
+
+ if (this.catalog.addTarget(theTarget, null)) {
+ if (!validateTarget(theTarget).getReport().hasErrors()) {
+ checkTarget(theTarget);
+ }
+ }
+ }
+
+
+ /* */
+ protected List<Target> parseTarget(final Target theTarget)
+ throws CheckerException {
+ log.entering(getClass().getName(), "parseTarget", theTarget);
+
+ Reader source = null;
+ try {
+ source = theTarget.open();
+ }
+ catch (IOException iox) {
+ throw new CheckerException("Failed to open target " + theTarget, iox);
+ }
+
+
+ List<Object> yamlRoots = new ArrayList<Object>();
+ try {
+ Yaml yaml = new Yaml();
+ for (Object yamlRoot: yaml.loadAll(source)) {
+ yamlRoots.add(yamlRoot);
+ }
+
+ //yamlRoots.add(
+ // new YamlParser(CharStreams.toString(source)).parse());
+ }
+/*
+ catch(SyntaxException sx) {
+ System.out.println(sx.getLineNumber() + ": " + sx.getMessage());
+ theTarget.report(sx);
+ }
+*/
+ catch(Exception x) {
+ theTarget.report(x);
+ return Collections.EMPTY_LIST;
+ //return Collections.singletonSet(theTarget);
+ }
+ finally {
+ try {
+ source.close();
+ }
+ catch (IOException iox) {
+ //just log it
+ }
+ }
+
+ List<Target> targets = new ArrayList<Target>(yamlRoots.size());
+ if (yamlRoots.size() == 1) {
+ //he target turned out to be a bare document
+ theTarget.setTarget(yamlRoots.get(0));
+ theTarget.setStage(Stage.parsed);
+ targets.add(theTarget);
+ }
+ else {
+ //the target turned out to be a stream containing multiple documents
+ for (int i = 0; i < yamlRoots.size(); i++) {
+/*
+!!We're changing the target below, i.e. we're changing the target implementation hence caching implementation will suffer!!
+*/
+ Target target = new Target(theTarget.getName(),
+ fragmentTargetURI(theTarget.getLocation(), String.valueOf(i)));
+ target.setTarget(yamlRoots.get(i));
+ target.setStage(Stage.parsed);
+ targets.add(target);
+ }
+ }
+
+ log.exiting(getClass().getName(), "parseTarget", theTarget);
+ return targets;
+ }
+
+ protected URI fragmentTargetURI(URI theRoot, String theFragment) {
+ try {
+ return new URI(theRoot.getScheme(),
+ theRoot.getSchemeSpecificPart(),
+ theFragment);
+ }
+ catch(URISyntaxException urisx) {
+ throw new RuntimeException();
+ }
+ }
+
+ protected Target validateTarget(Target theTarget)
+ throws CheckerException {
+ log.entering(getClass().getName(), "validateTarget", theTarget);
+
+ String version = (String)
+ ((Map)theTarget.getTarget())
+ .get("tosca_definitions_version");
+ if (version == null)
+ throw new CheckerException("Target " + theTarget + " does not specify a tosca_definitions_version");
+
+ Target grammar = this.grammars.get(version);
+ if (grammar == null)
+ throw new CheckerException("Target " + theTarget + " specifies unknown tosca_definitions_version " + version);
+
+ TOSCAValidator validator = null;
+ try {
+ validator = new TOSCAValidator(theTarget, grammar.getTarget());
+ }
+ catch (SchemaException sx) {
+ throw new CheckerException("Grammar error at: " + sx.getPath(), sx);
+ }
+
+ theTarget.getReport().addAll(
+ validator.validate(theTarget.getTarget()));
+ theTarget.setStage(Stage.validated);
+
+ if (!theTarget.getReport().hasErrors()) {
+ //applyCanonicals(theTarget.getTarget(), validator.canonicals);
+ }
+
+ log.exiting(getClass().getName(), "validateTarget", theTarget);
+ return theTarget;
+ }
+
+ /** */
+ protected Target checkTarget(Target theTarget) throws CheckerException {
+
+ log.entering(getClass().getName(), "checkTarget", theTarget);
+
+ CheckContext ctx = new CheckContext(theTarget);
+ //start at the top
+ check_service_template_definition(
+ (Map<String,Object>)theTarget.getTarget(), ctx);
+
+ theTarget.setStage(Stage.checked);
+ log.exiting(getClass().getName(), "checkTarget", theTarget);
+ return theTarget;
+ }
+
+ private String errorReport(List<Throwable> theErrors) {
+ StringBuilder sb = new StringBuilder(theErrors.size() + " errors");
+ for (Throwable x: theErrors) {
+ sb.append("\n");
+ if (x instanceof ValidationException) {
+ ValidationException vx = (ValidationException)x;
+ //.apend("at ")
+ //.append(error.getLineNumber())
+ //.append(" : ")
+ sb.append("[")
+ .append(vx.getPath())
+ .append("] ");
+ }
+ else if (x instanceof TargetError) {
+ TargetError tx = (TargetError)x;
+ sb.append("[")
+ .append(tx.getLocation())
+ .append("] ");
+ }
+ sb.append(x.getMessage());
+ if (x.getCause() != null) {
+ sb.append("\n\tCaused by:\n")
+ .append(x.getCause());
+ }
+ }
+ sb.append("\n");
+ return sb.toString();
+ }
+
+
+ protected void range_definition_post_validation_handler(Object theValue, Rule theRule, Validator.ValidationContext theContext) {
+ log.entering("", "range_definition", theContext.getPath());
+
+ assert theRule.getType().equals("seq");
+ List bounds = (List)theValue;
+
+ if (bounds.size() != 2) {
+ theContext.addError("Too many values in bounds specification", theRule, theValue, null);
+ return;
+ }
+
+ try {
+ Double.parseDouble(bounds.get(0).toString());
+ }
+ catch(NumberFormatException nfe) {
+ theContext.addError("Lower bound not a number", theRule, theValue, null);
+ }
+
+ try {
+ Double.parseDouble(bounds.get(1).toString());
+ }
+ catch(NumberFormatException nfe) {
+ if (!"UNBOUNDED".equals(bounds.get(1).toString())) {
+ theContext.addError("Upper bound not a number or 'UNBOUNDED'", theRule, theValue, null);
+ }
+ }
+
+ }
+
+ public void check_properties(
+ Map<String,Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("properties");
+ try {
+ if(!checkDefinition("properties", theDefinitions, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_property_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_property_definition(
+ String theName, Map theDefinition, CheckContext theContext) {
+ theContext.enter(theName);
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+ //check the type
+ if (!checkDataType (theName, theDefinition, theContext)) {
+ return;
+ }
+ //check default value is compatible with type
+ Object defaultValue = theDefinition.get("default");
+ if (defaultValue != null) {
+ checkDataValuation(defaultValue, theDefinition, theContext);
+ }
+
+ theContext.exit();
+ }
+
+ public void check_attributes(
+ Map<String,Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("attributes");
+ try {
+ if(!checkDefinition("attributes", theDefinitions, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_attribute_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_attribute_definition(
+ String theName, Map theDefinition, CheckContext theContext) {
+ theContext.enter(theName);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+ if (!checkDataType(theName, theDefinition, theContext)) {
+ return;
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* top level rule, we collected the whole information set.
+ * this is where checking starts
+ */
+ protected void check_service_template_definition(
+ Map<String, Object> theDef, CheckContext theContext) {
+ theContext.enter("");
+
+ if (theDef == null) {
+ theContext.addError("Empty template", null);
+ return;
+ }
+
+ catalogs("", theDef, theContext); //root
+//!!! imports need to be processed first now that catalogging takes place at check time!!
+ //first catalog whatever it is there to be cataloged so that the checks can perform cross-checking
+ for (Iterator<Map.Entry<String,Object>> ri = theDef.entrySet().iterator();
+ ri.hasNext(); ) {
+ Map.Entry<String,Object> e = ri.next();
+ catalogs(e.getKey(), e.getValue(), theContext);
+ }
+
+ checks("", theDef, theContext); //root
+ for (Iterator<Map.Entry<String,Object>> ri = theDef.entrySet().iterator();
+ ri.hasNext(); ) {
+ Map.Entry<String,Object> e = ri.next();
+ checks(e.getKey(), e.getValue(), theContext);
+ }
+ theContext.exit();
+ }
+
+ @Catalogs(path="/data_types")
+ protected void catalog_data_types(
+ Map<String, Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("data_types");
+ try {
+ catalogTypes(Construct.Data, theDefinitions, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Checks(path="/data_types")
+ protected void check_data_types(
+ Map<String, Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("data_types");
+
+ try {
+ if(!checkDefinition("data_types", theDefinitions, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_data_type_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ protected void check_data_type_definition(String theName,
+ Map theDefinition,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Data);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ checkTypeConstruct(
+ Construct.Data, theName, theDefinition, theContext);
+
+ if (theDefinition.containsKey("properties")) {
+ check_properties(
+ (Map<String,Map>)theDefinition.get("properties"), theContext);
+ checkTypeConstructFacet(Construct.Data, theName, theDefinition,
+ Facet.properties, theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Catalogs(path="/capability_types")
+ protected void catalog_capability_types(
+ Map<String, Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("capability_types");
+ try {
+ catalogTypes(Construct.Capability, theDefinitions, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Checks(path="/capability_types")
+ protected void check_capability_types(
+ Map<String, Map> theTypes, CheckContext theContext) {
+ theContext.enter("capability_types");
+ try {
+ if(!checkDefinition("capability_types", theTypes, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theTypes.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_capability_type_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ protected void check_capability_type_definition(String theName,
+ Map theDefinition,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Capability);
+
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ checkTypeConstruct(
+ Construct.Capability, theName, theDefinition, theContext);
+
+ if (theDefinition.containsKey("properties")) {
+ check_properties(
+ (Map<String,Map>)theDefinition.get("properties"), theContext);
+ checkTypeConstructFacet(Construct.Capability, theName, theDefinition,
+ Facet.properties, theContext);
+ }
+
+ if (theDefinition.containsKey("attributes")) {
+ check_attributes(
+ (Map<String,Map>)theDefinition.get("attributes"), theContext);
+ checkTypeConstructFacet(Construct.Capability, theName, theDefinition,
+ Facet.attributes, theContext);
+ }
+
+ //valid_source_types: see capability_type_definition
+ //unclear: how is the valid_source_types list definition eveolving across
+ //the type hierarchy: additive, overwriting, ??
+ if (theDefinition.containsKey("valid_source_types")) {
+ checkTypeReference(Construct.Node, theContext,
+ ((List<String>)theDefinition.get("valid_source_types")).toArray(EMPTY_STRING_ARRAY));
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Catalogs(path="/relationship_types")
+ protected void catalog_relationship_types(
+ Map<String, Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("relationship_types");
+ try {
+ catalogTypes(Construct.Relationship, theDefinitions, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Checks(path="/relationship_types")
+ protected void check_relationship_types(
+ Map<String, Map> theDefinition, CheckContext theContext) {
+ theContext.enter("relationship_types");
+ try {
+ if(!checkDefinition("relationship_types", theDefinition, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_relationship_type_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ protected void check_relationship_type_definition(String theName,
+ Map theDefinition,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Relationship);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ checkTypeConstruct(
+ Construct.Relationship, theName, theDefinition, theContext);
+
+ if (theDefinition.containsKey("properties")) {
+ check_properties(
+ (Map<String,Map>)theDefinition.get("properties"), theContext);
+ checkTypeConstructFacet(Construct.Relationship, theName, theDefinition,
+ Facet.properties, theContext);
+ }
+
+ if (theDefinition.containsKey("attributes")) {
+ check_properties(
+ (Map<String,Map>)theDefinition.get("attributes"), theContext);
+ checkTypeConstructFacet(Construct.Relationship, theName, theDefinition,
+ Facet.attributes, theContext);
+ }
+
+ Map<String,Map> interfaces = (Map<String,Map>)theDefinition.get("interfaces");
+ if (interfaces != null) {
+ theContext.enter("interfaces");
+ for (Iterator<Map.Entry<String,Map>> i =
+ interfaces.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_type_interface_definition(
+ e.getKey(), e.getValue(), theContext);
+ }
+ theContext.exit();
+ }
+
+ if (theDefinition.containsKey("valid_target_types")) {
+ checkTypeReference(Construct.Capability, theContext,
+ ((List<String>)theDefinition.get("valid_target_types")).toArray(EMPTY_STRING_ARRAY));
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Catalogs(path="/artifact_types")
+ protected void catalog_artifact_types(
+ Map<String, Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("artifact_types");
+ try {
+ catalogTypes(Construct.Artifact, theDefinitions, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Checks(path="/artifact_types")
+ protected void check_artifact_types(
+ Map<String, Map> theDefinition, CheckContext theContext) {
+ theContext.enter("artifact_types");
+ try {
+ if(!checkDefinition("artifact_types", theDefinition, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_artifact_type_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ protected void check_artifact_type_definition(String theName,
+ Map theDefinition,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Artifact);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ checkTypeConstruct(
+ Construct.Artifact, theName, theDefinition, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Catalogs(path="/interface_types")
+ protected void catalog_interface_types(
+ Map<String, Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("interface_types");
+ try {
+ catalogTypes(Construct.Interface, theDefinitions, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Checks(path="/interface_types")
+ protected void check_interface_types(
+ Map<String, Map> theDefinition, CheckContext theContext) {
+ theContext.enter("interface_types");
+ try {
+ if(!checkDefinition("interface_types", theDefinition, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_interface_type_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ protected void check_interface_type_definition(String theName,
+ Map theDefinition,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Interface);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ checkTypeConstruct(
+ Construct.Interface, theName, theDefinition, theContext);
+
+ //not much else here: a list of operation_definitions, each with its
+ //implementation and inputs
+
+ //check that common inputs are re-defined in a compatible manner
+
+ //check that the interface operations are overwritten in a compatible manner
+ //for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet()
+
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Catalogs(path="/node_types")
+ protected void catalog_node_types(
+ Map<String, Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("node_types");
+ try {
+ catalogTypes(Construct.Node, theDefinitions, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Checks(path="/node_types")
+ protected void check_node_types(
+ Map<String, Map> theDefinition, CheckContext theContext) {
+ theContext.enter("node_types");
+ try {
+ if(!checkDefinition("node_types", theDefinition, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_node_type_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+
+ /* */
+ protected void check_node_type_definition(String theName,
+ Map theDefinition,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Node);
+
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ checkTypeConstruct(
+ Construct.Node, theName, theDefinition, theContext);
+
+ if (theDefinition.containsKey("properties")) {
+ check_properties(
+ (Map<String,Map>)theDefinition.get("properties"), theContext);
+ checkTypeConstructFacet(Construct.Node, theName, theDefinition,
+ Facet.properties, theContext);
+ }
+
+ if (theDefinition.containsKey("attributes")) {
+ check_properties(
+ (Map<String,Map>)theDefinition.get("attributes"), theContext);
+ checkTypeConstructFacet(Construct.Node, theName, theDefinition,
+ Facet.attributes, theContext);
+ }
+
+ //requirements
+ if (theDefinition.containsKey("requirements")) {
+ check_requirements(
+ (List<Map>)theDefinition.get("requirements"), theContext);
+ }
+
+ //capabilities
+ if (theDefinition.containsKey("capabilities")) {
+ check_capabilities(
+ (Map<String,Map>)theDefinition.get("capabilities"), theContext);
+ }
+
+ //interfaces:
+ Map<String,Map> interfaces =
+ (Map<String,Map>)theDefinition.get("interfaces");
+ if (interfaces != null) {
+ try {
+ theContext.enter("interfaces");
+ for (Iterator<Map.Entry<String,Map>> i =
+ interfaces.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_type_interface_definition(
+ e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ //artifacts
+
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Catalogs(path="/group_types")
+ protected void catalog_group_types(
+ Map<String, Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("group_types");
+ try {
+ catalogTypes(Construct.Group, theDefinitions, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Checks(path="/group_types")
+ protected void check_group_types(
+ Map<String, Map> theDefinition, CheckContext theContext) {
+ theContext.enter("group_types");
+ try {
+ if(!checkDefinition("group_types", theDefinition, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_group_type_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ protected void check_group_type_definition(String theName,
+ Map theDefinition,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Group);
+
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ checkTypeConstruct(
+ Construct.Group, theName, theDefinition, theContext);
+
+ if (theDefinition.containsKey("properties")) {
+ check_properties(
+ (Map<String,Map>)theDefinition.get("properties"), theContext);
+ checkTypeConstructFacet(Construct.Group, theName, theDefinition,
+ Facet.properties, theContext);
+ }
+
+ if (theDefinition.containsKey("targets")) {
+ checkTypeReference(Construct.Node, theContext,
+ ((List<String>)theDefinition.get("targets")).toArray(EMPTY_STRING_ARRAY));
+ }
+
+ //interfaces
+ Map<String,Map> interfaces =
+ (Map<String,Map>)theDefinition.get("interfaces");
+ if (interfaces != null) {
+ try {
+ theContext.enter("interfaces");
+ for (Iterator<Map.Entry<String,Map>> i =
+ interfaces.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_type_interface_definition(
+ e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Catalogs(path="/policy_types")
+ protected void catalog_policy_types(
+ Map<String, Map> theDefinitions, CheckContext theContext) {
+ theContext.enter("policy_types");
+ try {
+ catalogTypes(Construct.Policy, theDefinitions, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Checks(path="/policy_types")
+ protected void check_policy_types(
+ Map<String, Map> theDefinition, CheckContext theContext) {
+ theContext.enter("policy_types");
+ try {
+ if(!checkDefinition("policy_types", theDefinition, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_policy_type_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ protected void check_policy_type_definition(String theName,
+ Map theDefinition,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Policy);
+
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDefinition, theContext)) {
+ return;
+ }
+
+ checkTypeConstruct(
+ Construct.Policy, theName, theDefinition, theContext);
+
+ if (theDefinition.containsKey("properties")) {
+ check_properties(
+ (Map<String,Map>)theDefinition.get("properties"), theContext);
+ checkTypeConstructFacet(Construct.Policy, theName, theDefinition,
+ Facet.properties, theContext);
+ }
+
+ //the targets can be known node types or group types
+ List<String> targets = (List<String>)theDefinition.get("targets");
+ if (targets != null) {
+ if (checkDefinition("targets", targets, theContext)) {
+ for (String target: targets) {
+ if (!(this.catalog.hasType(Construct.Node, target) ||
+ this.catalog.hasType(Construct.Group, target))) {
+ theContext.addError(
+ Message.INVALID_TYPE_REFERENCE, "targets", target, Arrays.asList(Construct.Node, Construct.Group));
+ }
+ }
+ }
+ }
+
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ //checking of actual constructs (capability, ..)
+
+ /* First, interface types do not have a hierarchical organization (no
+ * 'derived_from' in a interface type definition).
+ * So, when interfaces (with a certain type) are defined in a node
+ * or relationship type (and they can define new? operations), what
+ * is there to check:
+ * Can operations here re-define their declaration from the interface
+ * type spec?? From A.5.11.3 we are to understand indicates override to be
+ * the default interpretation .. but they talk about sub-classing so it
+ * probably intended as a reference to the node or relationship type
+ * hierarchy and not the interface type (no hierarchy there).
+ * Or is this a a case of augmentation where new operations can be added??
+ */
+ protected void check_type_interface_definition(
+ String theName, Map theDef, CheckContext theContext) {
+ theContext.enter(theName);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+
+ if (!checkTypeReference(Construct.Interface, theContext, (String)theDef.get("type")))
+ return;
+
+ if (theDef.containsKey("inputs")) {
+ check_inputs((Map<String, Map>)theDef.get("inputs"), theContext);
+ }
+
+ //operations: all entries except for 'type' and 'inputs'
+ /*
+ for (Iterator<Map.Entry<String,Map>> i =
+ theDef.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ String ename = e.getKey();
+ if ("type".equals(ename) || "inputs".equals(ename)) {
+ continue;
+ }
+ ?? check_operation_definition(ename, e.getValue(), theContext);
+ }
+ */
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ protected void check_capabilities(Map<String,Map> theDefinition,
+ CheckContext theContext) {
+ theContext.enter("capabilities");
+ try {
+ if(!checkDefinition("capabilities", theDefinition, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_capability_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* A capability definition appears within the context ot a node type
+ */
+ protected void check_capability_definition(String theName,
+ Map theDef,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Capability);
+
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+
+ //check capability type
+ if(!checkTypeReference(Construct.Capability, theContext, (String)theDef.get("type")))
+ return;
+
+ //check properties
+ if (!checkFacetAugmentation(
+ Construct.Capability, theDef, Facet.properties, theContext))
+ return;
+
+ //check attributes
+ if (!checkFacetAugmentation(
+ Construct.Capability, theDef, Facet.attributes, theContext))
+ return;
+
+ //valid_source_types: should point to valid template nodes
+ if (theDef.containsKey("valid_source_types")) {
+ checkTypeReference(Construct.Node, theContext,
+ ((List<String>)theDef.get("valid_source_types")).toArray(EMPTY_STRING_ARRAY));
+ //per A.6.1.4 there is an additinal check to be performed here:
+ //"Any Node Type (names) provides as values for the valid_source_types keyname SHALL be type-compatible (i.e., derived from the same parent Node Type) with any Node Types defined using the same keyname in the parent Capability Type."
+ }
+ //occurences: were verified in range_definition
+
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_artifact_definition(String theName,
+ Map theDef,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Artifact);
+
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+ //check artifact type
+ if(!checkTypeReference(Construct.Artifact, theContext, (String)theDef.get("type")))
+ return;
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_requirements(List<Map> theDefinition,
+ CheckContext theContext) {
+ theContext.enter("requirements");
+ try {
+ if(!checkDefinition("requirements", theDefinition, theContext))
+ return;
+
+ for (Iterator<Map> i = theDefinition.iterator(); i.hasNext(); ) {
+ Map e = i.next();
+ Iterator<Map.Entry<String, Map>> ei =
+ (Iterator<Map.Entry<String, Map>>)e.entrySet().iterator();
+ Map.Entry<String, Map> eie = ei.next();
+ check_requirement_definition(eie.getKey(), eie.getValue(), theContext);
+ assert ei.hasNext() == false;
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_requirement_definition(String theName,
+ Map theDef,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Requirement);
+
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+ //check capability type
+ String capabilityType = (String)theDef.get("capability");
+ if (null != capabilityType) {
+ checkTypeReference(Construct.Capability, theContext, capabilityType);
+ }
+
+ //check node type
+ String nodeType = (String)theDef.get("node");
+ if (null != nodeType) {
+ checkTypeReference(Construct.Node, theContext, nodeType);
+ }
+
+ //check relationship type
+ Map relationshipSpec = (Map)theDef.get("relationship");
+ String relationshipType = null;
+ if (null != relationshipSpec) {
+ relationshipType = (String)relationshipSpec.get("type");
+ if (relationshipType != null) { //should always be the case
+ checkTypeReference(Construct.Relationship,theContext,relationshipType);
+ }
+
+ Map<String,Map> interfaces = (Map<String,Map>)
+ relationshipSpec.get("interfaces");
+ if (interfaces != null) {
+ //augmentation (additional properties or operations) of the interfaces
+ //defined by the above relationship types
+
+ //check that the interface types are known
+ for (Map interfaceDef : interfaces.values()) {
+ checkTypeReference(Construct.Interface, theContext, (String)interfaceDef.get("type"));
+ }
+ }
+ }
+
+ //cross checks
+
+ //the capability definition might come from the capability type or from the capability definition
+ //within the node type. We might have more than one as a node might specify multiple capabilities of the
+ //same type.
+ //the goal here is to cross check the compatibility of the valid_source_types specification in the
+ //target capability definition (if that definition contains a valid_source_types entry).
+ List<Map> capabilityDefs = new LinkedList<Map>();
+ //nodeType exposes capabilityType
+ if (nodeType != null) {
+ Map<String,Map> capabilities =
+ findTypeFacetByType(Construct.Node, nodeType,
+ Facet.capabilities, capabilityType);
+ if (capabilities.isEmpty()) {
+ theContext.addError("The node type " + nodeType + " does not appear to expose a capability of a type compatible with " + capabilityType, null);
+ }
+ else {
+ for (Map.Entry<String,Map> capability: capabilities.entrySet()) {
+ //this is the capability as it was defined in the node type
+ Map capabilityDef = capability.getValue();
+ //if it defines a valid_source_types then we're working with it,
+ //otherwise we're working with the capability type it points to.
+ //The spec does not make it clear if the valid_source_types in a capability definition augments or
+ //overwrites the one from the capabilityType (it just says they must be compatible).
+ if (capabilityDef.containsKey("valid_source_types")) {
+ capabilityDefs.add(capabilityDef);
+ }
+ else {
+ capabilityDef =
+ catalog.getTypeDefinition(Construct.Capability, (String)capabilityDef.get("type"));
+ if (capabilityDef.containsKey("valid_source_types")) {
+ capabilityDefs.add(capabilityDef);
+ }
+ else {
+ //!!if there is a capability that does not have a valid_source_type than there is no reason to
+ //make any further verification (as there is a valid node_type/capability target for this requirement)
+ capabilityDefs.clear();
+ break;
+ }
+ }
+ }
+ }
+ }
+ else {
+ Map capabilityDef = catalog.getTypeDefinition(Construct.Capability, capabilityType);
+ if (capabilityDef.containsKey("valid_source_types")) {
+ capabilityDefs.add(capabilityDef);
+ }
+ }
+
+ //check that the node type enclosing this requirement definition
+ //is in the list of valid_source_types
+ if (!capabilityDefs.isEmpty()) {
+ String enclosingNodeType =
+ theContext.enclosingConstruct(Construct.Node).name();
+ assert enclosingNodeType != null;
+
+ if (!capabilityDefs.stream().anyMatch(
+ (Map capabilityDef)->{
+ List<String> valid_source_types =
+ (List<String>)capabilityDef.get("valid_source_types");
+ return valid_source_types.stream().anyMatch(
+ (String source_type)-> catalog.isDerivedFrom(
+ Construct.Node, enclosingNodeType, source_type));
+ })) {
+ theContext.addError("Node type: " + enclosingNodeType + " not compatible with any of the valid_source_types provided in the definition of compatible capabilities", null);
+
+ }
+
+ /*
+ boolean found = false;
+ for (Map capabilityDef: capabilityDefs) {
+
+ List<String> valid_source_types =
+ (List<String>)capabilityDef.get("valid_source_types");
+ String enclosingNodeType =
+ theContext.enclosingConstruct(Construct.Node);
+ assert enclosingNodeType != null;
+
+ //make sure enclosingNodeType is compatible (same or derived from)
+ //one valid source type
+ for (String source_type: valid_source_types) {
+ if (catalog.isDerivedFrom(
+ Construct.Node, enclosingNodeType, source_type)) {
+ found = true;
+ break;
+ }
+ }
+ }
+
+ if (!found) {
+ //the message is not great as it points to the declared
+ //capabilityType which is not necessarly where the information
+ //is coming from
+ theContext.addError("Node type: " + enclosingNodeType + " not compatible with any of the valid_source_types " + valid_source_types + " provided in the definition of capability " + capabilityType, null);
+ }
+ */
+ }
+
+ //if we have a relationship type, check if it has a valid_target_types
+ //if it does, make sure that the capability type is compatible with one
+ //of them
+ if (relationshipType != null) { //should always be the case
+ Map relationshipTypeDef = catalog.getTypeDefinition(
+ Construct.Relationship, relationshipType);
+ if (relationshipTypeDef != null) {
+ List<String> valid_target_types =
+ (List<String>)relationshipTypeDef.get("valid_target_types");
+ if (valid_target_types != null) {
+ boolean found = false;
+ for (String target_type: valid_target_types) {
+ if (catalog.isDerivedFrom(
+ Construct.Capability, capabilityType, target_type)) {
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ theContext.addError("Capability type: " + capabilityType + " not compatible with any of the valid_target_types " + valid_target_types + " provided in the definition of relationship type " + relationshipType, null);
+ }
+ }
+ }
+ }
+
+ //relationship declares the capabilityType in its valid_target_type set
+ //in A.6.9 'Relationship Type' the spec does not indicate how inheritance
+ //is to be applied to the valid_target_type spec: cumulative, overwrites,
+ //so we treat it as an overwrite.
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ //topology_template_definition and sub-rules
+ /* */
+ @Checks(path="/topology_template")
+ protected void check_topology_template(
+ Map<String,Map> theDef, final CheckContext theContext) {
+
+ theContext.enter("topology_template");
+
+ try {
+ theDef.entrySet().stream()
+ .forEach(e -> catalogs(e.getKey(), e.getValue(), theContext));
+
+ theDef.entrySet().stream()
+ .forEach(e -> checks(e.getKey(), e.getValue(), theContext));
+/*
+ for (Iterator<Map.Entry<String,Object>> ri = theDef.entrySet().iterator();
+ ri.hasNext(); ) {
+ Map.Entry<String,Object> e = ri.next();
+ checks(e.getKey(), e.getValue(), theContext);
+ }
+*/
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /*
+ * Once the syntax of the imports section is validated parse/validate/catalog * all the imported template information
+ */
+ @Checks(path="/imports")
+ protected void check_imports(List theImports, CheckContext theContext) {
+ theContext.enter("imports");
+
+ for (ListIterator li = theImports.listIterator(); li.hasNext(); ) {
+ Object importEntry = li.next(),
+ importFile = ((Map)mapEntry(importEntry).getValue()).get("file");
+ Target tgt = null;
+ try {
+ tgt = catalog.getTarget( (URI)importFile );
+ }
+ catch (ClassCastException ccx) {
+ System.out.println("Import is " + importFile);
+ }
+
+ if (tgt == null) {
+ //malfunction
+ theContext.addError("Checking import '" + importFile + "': failed at a previous stage", null);
+ return;
+ }
+
+ if (tgt.getReport().hasErrors()) {
+ //import failed parsing or validation, we skip it
+ continue;
+ }
+
+ if (tgt.getStage() == Stage.checked) {
+ //been here before, this target had already been processed
+ continue;
+ }
+
+ //import should have been fully processed by now ???
+ log.log(Level.FINE, "Processing import " + tgt + ".");
+ try {
+ checkTarget(tgt);
+ }
+ catch (CheckerException cx) {
+ theContext.addError("Failure checking import '" + tgt + "'", cx);
+ }
+
+ }
+ theContext.exit();
+ }
+
+ /* */
+ @Checks(path="/topology_template/substitution_mappings")
+ protected void check_substitution_mappings(Map<String, Object> theSub,
+ CheckContext theContext) {
+ theContext.enter("substitution_mappings");
+ try {
+ //type is mandatory
+ String type = (String)theSub.get("node_type");
+ if (!checkTypeReference(Construct.Node, theContext, type)) {
+ return;
+ }
+
+ Map<String,List> capabilities = (Map<String,List>)theSub.get("capabilities");
+ if (null != capabilities) {
+ for (Map.Entry<String,List> ce: capabilities.entrySet()) {
+ //the key must be a capability of the type
+ if (null == findTypeFacetByName(Construct.Node, type,
+ Facet.capabilities, ce.getKey())) {
+ theContext.addError("Unknown node type capability: " + ce.getKey() + ", type " + type, null);
+ }
+ //the value is a 2 element list: first is a local node,
+ //second is the name of one of its capabilities
+ List target = ce.getValue();
+ if (target.size() != 2) {
+ theContext.addError("Invalid capability mapping: " + target + ", expecting 2 elements", null);
+ continue;
+ }
+
+ String targetNode = (String)target.get(0),
+ targetCapability = (String)target.get(1);
+
+ Map<String,Object> targetNodeDef = (Map<String,Object>)
+ this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode);
+ if (null == targetNodeDef) {
+ theContext.addError("Invalid capability mapping node template: " + targetNode, null);
+ continue;
+ }
+
+ String targetNodeType = (String)targetNodeDef.get("type");
+ if (null == findTypeFacetByName(Construct.Node, targetNodeType,
+ Facet.capabilities, targetCapability)) {
+ theContext.addError("Invalid capability mapping capability: " + targetCapability + ". No such capability found for node template " + targetNode + ", of type " + targetNodeType, null);
+ }
+ }
+ }
+
+ Map<String,List> requirements = (Map<String,List>)theSub.get("requirements");
+ if (null != requirements) {
+ for (Map.Entry<String,List> re: requirements.entrySet()) {
+ //the key must be a requirement of the type
+ if (null == findNodeTypeRequirementByName(type, re.getKey())) {
+ theContext.addError("Unknown node type requirement: " + re.getKey() + ", type " + type, null);
+ }
+
+ List target = re.getValue();
+ if (target.size() != 2) {
+ theContext.addError("Invalid requirement mapping: " + target + ", expecting 2 elements", null);
+ continue;
+ }
+
+ String targetNode = (String)target.get(0),
+ targetRequirement = (String)target.get(1);
+
+ Map<String,Object> targetNodeDef = (Map<String,Object>)
+ this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode);
+ if (null == targetNodeDef) {
+ theContext.addError("Invalid requirement mapping node template: " + targetNode, null);
+ continue;
+ }
+
+ String targetNodeType = (String)targetNodeDef.get("type");
+ if (null == findNodeTypeRequirementByName(targetNodeType,targetRequirement)) {
+ theContext.addError("Invalid requirement mapping requirement: " + targetRequirement + ". No such requirement found for node template " + targetNode + ", of type " + targetNodeType, null);
+ }
+ }
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Catalogs(path="/topology_template/inputs")
+ protected void catalog_inputs(Map<String, Map> theInputs,
+ CheckContext theContext) {
+ theContext.enter("inputs");
+
+ try {
+ catalogTemplates(Construct.Data, theInputs, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Checks(path="/topology_template/inputs")
+ protected void check_inputs(Map<String, Map> theInputs,
+ CheckContext theContext) {
+ theContext.enter("inputs");
+
+ try {
+ if(!checkDefinition("inputs", theInputs, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theInputs.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_input_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ protected void check_input_definition(String theName,
+ Map theDef,
+ CheckContext theContext) {
+ theContext.enter(theName);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+ //
+ if (!checkDataType(theName, theDef, theContext)) {
+ return;
+ }
+ //check default value
+ Object defaultValue = theDef.get("default");
+ if (defaultValue != null) {
+ checkDataValuation(defaultValue, theDef, theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Checks(path="topology_template/outputs")
+ protected void check_outputs(Map<String, Map> theOutputs,
+ CheckContext theContext) {
+ theContext.enter("outputs");
+
+ try {
+ if(!checkDefinition("outputs", theOutputs, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theOutputs.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_output_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_output_definition(String theName,
+ Map theDef,
+ CheckContext theContext) {
+ theContext.enter(theName);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+ //check the expression
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Checks(path="/topology_template/groups")
+ protected void check_groups(Map<String, Map> theGroups,
+ CheckContext theContext) {
+ theContext.enter("groups");
+
+ try {
+ if(!checkDefinition("groups", theGroups, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theGroups.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_group_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_group_definition(String theName,
+ Map theDef,
+ CheckContext theContext) {
+ theContext.enter(theName);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+
+ if (!checkTypeReference(Construct.Group, theContext, (String)theDef.get("type")))
+ return;
+
+ if (!checkFacet(
+ Construct.Group, theDef, Facet.properties, theContext))
+ return;
+
+ if (theDef.containsKey("targets")) {
+ //checkTemplateReference(Construct.Node, theContext,
+ // ((List<String>)theDef.get("targets")).toArray(EMPTY_STRING_ARRAY));
+
+ List<String> targetsTypes = (List<String>)
+ this.catalog.getTypeDefinition(Construct.Group,
+ (String)theDef.get("type"))
+ .get("targets");
+
+ List<String> targets = (List<String>)theDef.get("targets");
+ for (String target: targets) {
+ if (!this.catalog.hasTemplate(theContext.target(),Construct.Node, target)) {
+ theContext.addError("The 'targets' entry must contain a reference to a node template, '" + target + "' is not one", null);
+ }
+ else {
+ if (targetsTypes != null) {
+ String targetType = (String)
+ this.catalog.getTemplate(theContext.target(), Construct.Node, target).get("type");
+
+ boolean found = false;
+ for (String type: targetsTypes) {
+ found = this.catalog
+ .isDerivedFrom(Construct.Node, targetType, type);
+ if (found)
+ break;
+ }
+
+ if (!found) {
+ theContext.addError("The 'targets' entry '" + target + "' is not type compatible with any of types specified in policy type targets", null);
+ }
+ }
+ }
+ }
+ }
+
+ if (theDef.containsKey("interfaces")) {
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Checks(path="/topology_template/policies")
+ protected void check_policies(List<Map<String,Map>> thePolicies,
+ CheckContext theContext) {
+ theContext.enter("policies");
+
+ try {
+ if(!checkDefinition("policies", thePolicies, theContext))
+ return;
+
+ for (Map<String,Map> policy: thePolicies) {
+ assert policy.size() == 1;
+ Map.Entry<String,Map> e = policy.entrySet().iterator().next();
+ check_policy_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_policy_definition(String theName,
+ Map theDef,
+ CheckContext theContext) {
+ theContext.enter(theName);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theDef, theContext)) {
+ return;
+ }
+
+ if (!checkTypeReference(Construct.Policy, theContext, (String)theDef.get("type")))
+ return;
+
+ if (!checkFacet(
+ Construct.Policy, theDef, Facet.properties, theContext))
+ return;
+
+ //targets: must point to node or group templates (that are of a type
+ //specified in the policy type definition, if targets were specified
+ //there).
+ if (theDef.containsKey("targets")) {
+ List<String> targetsTypes = (List<String>)
+ this.catalog.getTypeDefinition(Construct.Policy,
+ (String)theDef.get("type"))
+ .get("targets");
+
+ List<String> targets = (List<String>)theDef.get("targets");
+ for (String target: targets) {
+ Construct targetConstruct = null;
+
+ if (this.catalog.hasTemplate(theContext.target(),Construct.Group, target)) {
+ targetConstruct = Construct.Group;
+ }
+ else if (this.catalog.hasTemplate(theContext.target(),Construct.Node, target)) {
+ targetConstruct = Construct.Node;
+ }
+ else {
+ theContext.addError(Message.INVALID_TEMPLATE_REFERENCE, "targets", target, new Object[] {"node", "group"});
+ }
+
+ if (targetConstruct != null &&
+ targetsTypes != null) {
+ //get the target type and make sure is compatible with the types
+ //indicated in the type spec
+ String targetType = (String)
+ this.catalog.getTemplate(theContext.target(), targetConstruct, target).get("type");
+
+ boolean found = false;
+ for (String type: targetsTypes) {
+ found = this.catalog
+ .isDerivedFrom(targetConstruct, targetType, type);
+ if (found)
+ break;
+ }
+
+ if (!found) {
+ theContext.addError("The 'targets' " + targetConstruct + " entry '" + target + "' is not type compatible with any of types specified in policy type targets", null);
+ }
+ }
+ }
+ }
+
+ if (theDef.containsKey("triggers")) {
+ List<Map> triggers = (List<Map>)theDef.get("triggers");
+ //TODO
+ }
+
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Catalogs(path="/topology_template/node_templates")
+ protected void catalog_node_templates(Map<String, Map> theTemplates,
+ CheckContext theContext) {
+ theContext.enter("node_templates");
+
+ try {
+ catalogTemplates(Construct.Node, theTemplates, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ @Checks(path="/topology_template/node_templates")
+ protected void check_node_templates(Map<String, Map> theTemplates,
+ CheckContext theContext) {
+ theContext.enter("node_templates");
+ try {
+ if(!checkDefinition("node_templates", theTemplates, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theTemplates.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_node_template_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /* */
+ protected void check_node_template_definition(String theName,
+ Map theNode,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Node);
+
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theNode, theContext)) {
+ return;
+ }
+
+ if (!checkTypeReference(Construct.Node, theContext, (String)theNode.get("type")))
+ return;
+
+ //copy
+ String copy = (String)theNode.get("copy");
+ if (copy != null) {
+ if (!checkTemplateReference(Construct.Node, theContext, copy)) {
+ theContext.addError(Message.INVALID_TEMPLATE_REFERENCE, "copy", copy, Construct.Node);
+ }
+ else {
+ //the 'copy' node specification should be used to provide 'defaults'
+ //for this specification, we should check them
+ }
+ }
+
+ /* check that we operate on properties and attributes within the scope of
+ the specified node type */
+ if (!checkFacet(
+ Construct.Node, /*theName,*/theNode, Facet.properties, theContext))
+ return;
+
+ if (!checkFacet(
+ Construct.Node, /*theName,*/theNode, Facet.attributes, theContext))
+ return;
+
+ //requirement assignment seq
+ if (theNode.containsKey("requirements")) {
+ check_requirements_assignment_definition(
+ (List<Map>)theNode.get("requirements"), theContext);
+ }
+
+ //capability assignment map: subject to augmentation
+ if (theNode.containsKey("capabilities")) {
+ check_capabilities_assignment_definition(
+ (Map<String,Map>)theNode.get("capabilities"), theContext);
+ }
+
+ //interfaces
+ if (theNode.containsKey("interfaces")) {
+ check_template_interfaces_definition(
+ (Map<String,Map>)theNode.get("interfaces"), theContext);
+ }
+
+ //artifacts: artifacts do not have different definition forms/syntax
+ //depending on the context (type or template) but they are still subject
+ //to 'augmentation'
+ if (theNode.containsKey("artifacts")) {
+ check_template_artifacts_definition(
+ (Map<String,Object>)theNode.get("artifacts"), theContext);
+ }
+
+ /* node_filter: the context to which the node filter is applied is very
+ * wide here as opposed to the node filter specification in a requirement
+ * assignment which has a more strict context (target node/capability are
+ * specified).
+ * We could check that there are nodes in this template having the
+ * properties/capabilities specified in this filter, i.e. the filter has
+ * a chance to succeed.
+ */
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ @Checks(path="/topology_template/relationship_templates")
+ protected void check_relationship_templates(Map theTemplates,
+ CheckContext theContext) {
+ theContext.enter("relationship_templates");
+
+ for (Iterator<Map.Entry<String,Map>> i = theTemplates.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_relationship_template_definition(e.getKey(), e.getValue(), theContext);
+ }
+ theContext.exit();
+ }
+
+ /* */
+ protected void check_relationship_template_definition(
+ String theName,
+ Map theRelationship,
+ CheckContext theContext) {
+ theContext.enter(theName, Construct.Relationship);
+ try {
+ if (!checkName(theName, theContext) ||
+ !checkDefinition(theName, theRelationship, theContext)) {
+ return;
+ }
+
+ if (!checkTypeReference(Construct.Relationship, theContext, (String)theRelationship.get("type")))
+ return;
+
+ String copy = (String)theRelationship.get("copy");
+ if (copy != null) {
+ if (!checkTemplateReference(Construct.Relationship, theContext, copy)) {
+ theContext.addError(Message.INVALID_TEMPLATE_REFERENCE, "copy", copy, Construct.Relationship);
+ }
+ }
+
+ /* check that we operate on properties and attributes within the scope of
+ the specified relationship type */
+ if (!checkFacet(Construct.Relationship, theRelationship,
+ Facet.properties, theContext))
+ return;
+
+ if (!checkFacet(Construct.Relationship, theRelationship,
+ Facet.attributes, theContext))
+ return;
+
+ /* interface definitions
+ note: augmentation is allowed here so not clear what to check ..
+ maybe report augmentations if so configured .. */
+
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ //requirements and capabilities assignment appear in a node templates
+ protected void check_requirements_assignment_definition(
+ List<Map> theRequirements, CheckContext theContext) {
+ theContext.enter("requirements");
+ try {
+ if(!checkDefinition("requirements", theRequirements, theContext))
+ return;
+
+ //the node type for the node template enclosing these requirements
+ String nodeType = (String)catalog.getTemplate(
+ theContext.target(),
+ Construct.Node,
+ theContext.enclosingConstruct(Construct.Node).name())
+ .get("type");
+
+ for(Iterator<Map> ri = theRequirements.iterator(); ri.hasNext(); ) {
+ Map<String,Map> requirement = (Map<String,Map>)ri.next();
+
+ Iterator<Map.Entry<String,Map>> rai =
+ (Iterator<Map.Entry<String,Map>>)requirement.entrySet().iterator();
+
+ Map.Entry<String,Map> requirementEntry = rai.next();
+ assert !rai.hasNext();
+
+ String requirementName = requirementEntry.getKey();
+ Map requirementDef = findNodeTypeRequirementByName(
+ nodeType, requirementName);
+
+ if (requirementDef == null /*&&
+ !config.allowAugmentation()*/) {
+ theContext.addError("No requirement " + requirementName + " was defined for the node type " + nodeType, null);
+ continue;
+ }
+
+ check_requirement_assignment_definition(
+ requirementName, requirementEntry.getValue(), requirementDef, theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_requirement_assignment_definition(
+ String theRequirementName,
+ Map theAssignment,
+ Map theDefinition,
+ CheckContext theContext) {
+ theContext//.enter("requirement_assignment")
+ .enter(theRequirementName, Construct.Requirement);
+
+ //grab the node type definition to verify compatibility
+
+ try {
+ //node assignment
+ boolean targetNodeIsTemplate = false;
+ String targetNode = (String)theAssignment.get("node");
+ if (targetNode == null) {
+ targetNode = (String)theDefinition.get("node");
+ //targetNodeIsTemplate stays false, targetNode must be a type
+ }
+ else {
+ //the value must be a node template or a node type
+ targetNodeIsTemplate = isTemplateReference(
+ Construct.Node, theContext, targetNode);
+ if (!targetNodeIsTemplate) {
+ if (!isTypeReference(Construct.Node/*, theContext*/, targetNode)) {
+ theContext.addError(Message.INVALID_CONSTRUCT_REFERENCE, "node", targetNode, Construct.Node);
+ return;
+ }
+ //targetNode is a type reference
+ }
+
+ //additional checks
+ String targetNodeDef = (String)theDefinition.get("node");
+ if (targetNodeDef != null && targetNode != null) {
+ if (targetNodeIsTemplate) {
+ //if the target is node template, it must be compatible with the
+ //node type specification in the requirement defintion
+ String targetNodeType = (String)
+ catalog.getTemplate(theContext.target(),Construct.Node,targetNode).get("type");
+ if (!catalog.isDerivedFrom(
+ Construct.Node, targetNodeType,targetNodeDef)) {
+ theContext.addError(Message.INCOMPATIBLE_REQUIREMENT_TARGET, Construct.Node, targetNodeType + " of target node " + targetNode, targetNodeDef);
+ return;
+ }
+ }
+ else {
+ //if the target is a node type it must be compatible (= or derived
+ //from) with the node type specification in the requirement definition
+ if (!catalog.isDerivedFrom(
+ Construct.Node, targetNode, targetNodeDef)) {
+ theContext.addError(Message.INCOMPATIBLE_REQUIREMENT_TARGET, Construct.Node, targetNode, targetNodeDef);
+ return;
+ }
+ }
+ }
+ }
+
+ String targetNodeType = targetNodeIsTemplate ?
+ (String)catalog.getTemplate(theContext.target(),Construct.Node,targetNode).get("type"):
+ targetNode;
+
+ //capability assignment
+ boolean targetCapabilityIsType = false;
+ String targetCapability = (String)theAssignment.get("capability");
+ if (targetCapability == null) {
+ targetCapability = (String)theDefinition.get("capability");
+ //in a requirement definition the target capability can only be a
+ //capability type (and not a capability name within some target node
+ //type)
+ targetCapabilityIsType = true;
+ }
+ else {
+ targetCapabilityIsType = isTypeReference(Construct.Capability, targetCapability);
+
+ //check compatibility with the target compatibility type specified
+ //in the requirement definition, if any
+ String targetCapabilityDef = (String)theDefinition.get("capability");
+ if (targetCapabilityDef != null && targetCapability != null) {
+ if (targetCapabilityIsType) {
+ if (!catalog.isDerivedFrom(
+ Construct.Capability, targetCapability, targetCapabilityDef)) {
+ theContext.addError(Message.INCOMPATIBLE_REQUIREMENT_TARGET, Construct.Capability, targetCapability, targetCapabilityDef);
+ return;
+ }
+ }
+ else {
+ //the capability is from a target node. Find its definition and
+ //check that its type is compatible with the capability type
+ //from the requirement definition
+
+ //check target capability compatibility with target node
+ if (targetNode == null) {
+ theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', which was not specified", null);
+ return;
+ }
+ if (!targetNodeIsTemplate) {
+ theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', but there you specified a node type", null);
+ return;
+ }
+ //check that the targetNode (its type) indeed has the
+ //targetCapability
+
+ Map<String,Object> targetNodeCapabilityDef =
+ findTypeFacetByName(
+ Construct.Node, targetNodeType,
+ Facet.capabilities, targetCapability);
+ if (targetNodeCapabilityDef == null) {
+ theContext.addError("No capability '" + targetCapability + "' was specified in the node " + targetNode + " of type " + targetNodeType, null);
+ return;
+ }
+
+ String targetNodeCapabilityType = (String)targetNodeCapabilityDef.get("type");
+
+ if (!catalog.isDerivedFrom(Construct.Capability,
+ targetNodeCapabilityType,
+ targetCapabilityDef)) {
+ theContext.addError("The required target capability type '" + targetCapabilityDef + "' is not compatible with the target capability type found in the target node type capability definition : " + targetNodeCapabilityType + ", targetNode " + targetNode + ", capability name " + targetCapability, null);
+ return;
+ }
+ }
+ }
+ }
+
+ //relationship assignment
+ Map targetRelationship = (Map)theAssignment.get("relationship");
+ if (targetRelationship != null) {
+ //this has to be compatible with the relationship with the same name
+ //from the node type
+ //check the type
+ }
+
+ //node_filter; used jxpath to simplify the navigation somewhat
+ //this is too cryptic
+ JXPathContext jxPath = JXPathContext.newContext(theAssignment);
+ jxPath.setLenient(true);
+
+ List<Map> propertiesFilter =
+ (List<Map>)jxPath.getValue("/node_filter/properties");
+ if (propertiesFilter != null) {
+ for (Map propertyFilter: propertiesFilter) {
+//System.out.println("propertiesFilter " + propertyFilter);
+
+ if (targetNode != null) {
+ //if we have a target node or node template then it must have
+ //have these properties
+ for (Object propertyName: propertyFilter.keySet()) {
+ if (null == findTypeFacetByName(Construct.Node,
+ targetNodeType,
+ Facet.properties,
+ propertyName.toString())) {
+ theContext.addError("The node_filter property " + propertyName + " is invalid: requirement target node " + targetNode + " does not have such a property", null);
+ }
+ }
+ }
+ else if (targetCapability != null) {
+ /*
+ //if we have a target capability type (but not have a target node)
+ //than it must have these properties
+
+ Not true, the filter always refer to node properties: it is the processor's/orchestrator job to match the
+ this requirement with a node that satisfies the filter. We cannot anticipate the values of all properties
+ (some might come from inputs) so we cannot scan for candidates at this point.
+
+
+ if (targetCapabilityIsType) {
+ for (Object propertyName: propertyFilter.keySet()) {
+ if (null == findTypeFacetByName(Construct.Capability,
+ targetCapability,
+ Facet.properties,
+ propertyName.toString())) {
+ theContext.addError("The node_filter property " + propertyName + " is invalid: requirement target capability " + targetCapability + " does not have such a property", null);
+ }
+ }
+ }
+ else {
+ //cannot be: if you point to an explicit capability then you must
+ //have specified a targetNode
+ }
+ */
+ }
+ else {
+ //what are the properties suppose to filter on ??
+ }
+ }
+ }
+
+ List<Map> capabilitiesFilter =
+ (List<Map>)jxPath.getValue("node_filter/capabilities");
+ if (capabilitiesFilter != null) {
+ for (Map capabilityFilterDef: capabilitiesFilter) {
+ assert capabilityFilterDef.size() == 1;
+ Map.Entry<String, Map> capabilityFilterEntry =
+ (Map.Entry<String, Map>)capabilityFilterDef.entrySet().iterator().next();
+ String targetFilterCapability = capabilityFilterEntry.getKey();
+ Map<String,Object> targetFilterCapabilityDef = null;
+
+ //if we have a targetNode capabilityName must be a capability of
+ //that node (type); or it can be simply capability type (but the node
+ //must have a capability of that type)
+
+ String targetFilterCapabilityType = null;
+ if (targetNode != null) {
+ targetFilterCapabilityDef =
+ findTypeFacetByName(Construct.Node, targetNodeType,
+ Facet.capabilities, targetFilterCapability);
+ if (targetFilterCapabilityDef != null) {
+ targetFilterCapabilityType =
+ (String)targetFilterCapabilityDef/*.values().iterator().next()*/.get("type");
+ }
+ else {
+ Map<String,Map> targetFilterCapabilities =
+ findTypeFacetByType(Construct.Node, targetNodeType,
+ Facet.capabilities, targetFilterCapability);
+
+ if (!targetFilterCapabilities.isEmpty()) {
+ if (targetFilterCapabilities.size() > 1) {
+ log.warning("check_requirement_assignment_definition: filter check, target node type '" + targetNodeType + "' has more than one capability of type '" + targetFilterCapability + "', not supported");
+ }
+ //pick the first entry, it represents a capability of the required type
+ Map.Entry<String,Map> capabilityEntry = targetFilterCapabilities.entrySet().iterator().next();
+ targetFilterCapabilityDef = Collections.singletonMap(capabilityEntry.getKey(),
+ capabilityEntry.getValue());
+ targetFilterCapabilityType = targetFilterCapability;
+ }
+ }
+ }
+ else {
+ //no node (type) specified, it can be a straight capability type
+ targetFilterCapabilityDef = catalog.getTypeDefinition(
+ Construct.Capability, targetFilterCapability);
+ //here comes the odd part: it can still be a just a name in which
+ //case we should look at the requirement definition, see which
+ //capability (type) it indicates
+ assert targetCapabilityIsType; //cannot be otherwise, we'd need a node
+ targetFilterCapabilityDef = catalog.getTypeDefinition(
+ Construct.Capability, targetCapability);
+ targetFilterCapabilityType = targetCapability;
+ }
+
+ if (targetFilterCapabilityDef == null) {
+ theContext.addError("Capability (name or type) " + targetFilterCapability + " is invalid: not a known capability (type) " +
+ ((targetNodeType != null) ? (" of node type" + targetNodeType) : ""), null);
+ continue;
+ }
+
+ for (Map propertyFilter:
+ (List<Map>)jxPath.getValue("/node_filter/capabilities/" + targetFilterCapability + "/properties")) {
+ //check that the properties are in the scope of the
+ //capability definition
+ for (Object propertyName: propertyFilter.keySet()) {
+ if (null == findTypeFacetByName(Construct.Capability,
+ targetCapability,
+ Facet.properties,
+ propertyName.toString())) {
+ theContext.addError("The capability filter " + targetFilterCapability + " property " + propertyName + " is invalid: target capability " + targetFilterCapabilityType + " does not have such a property", null);
+ }
+ }
+ }
+ }
+ }
+
+ }
+ finally {
+ theContext//.exit()
+ .exit();
+ }
+ }
+
+ protected void check_capabilities_assignment_definition(
+ Map<String,Map> theCapabilities, CheckContext theContext) {
+ theContext.enter("capabilities");
+ try {
+ if(!checkDefinition("capabilities", theCapabilities, theContext))
+ return;
+
+ //the node type for the node template enclosing these requirements
+ String nodeType = (String)catalog.getTemplate(
+ theContext.target(),
+ Construct.Node,
+ theContext.enclosingConstruct(Construct.Node).name())
+ .get("type");
+
+ for (Iterator<Map.Entry<String,Map>> ci =
+ theCapabilities.entrySet().iterator();
+ ci.hasNext(); ) {
+
+ Map.Entry<String,Map> ce = ci.next();
+
+ String capabilityName = ce.getKey();
+ Map capabilityDef = findTypeFacetByName(Construct.Node, nodeType,
+ Facet.capabilities, capabilityName);
+ if (capabilityDef == null) {
+ theContext.addError("No capability " + capabilityName + " was defined for the node type " + nodeType, null);
+ continue;
+ }
+
+ check_capability_assignment_definition(
+ capabilityName, ce.getValue(), capabilityDef,theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_capability_assignment_definition(
+ String theCapabilityName,
+ Map theAssignment,
+ Map theDefinition,
+ CheckContext theContext) {
+
+ theContext.enter(theCapabilityName, Construct.Capability);
+ try {
+ String capabilityType = (String)theDefinition.get("type");
+ //list of property and attributes assignments
+ checkFacet(Construct.Capability, theAssignment, capabilityType,
+ Facet.properties, theContext);
+ checkFacet(Construct.Capability, theAssignment, capabilityType,
+ Facet.attributes, theContext);
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ /** */
+ protected void check_template_interfaces_definition(
+ Map<String,Map> theInterfaces,
+ CheckContext theContext) {
+ theContext.enter("interfaces");
+ try {
+ if(!checkDefinition("interfaces", theInterfaces, theContext))
+ return;
+
+ //the node type for the node template enclosing these requirements
+ String nodeType = (String)catalog.getTemplate(
+ theContext.target(),
+ Construct.Node,
+ theContext.enclosingConstruct(Construct.Node).name())
+ .get("type");
+
+ for (Iterator<Map.Entry<String,Map>> ii =
+ theInterfaces.entrySet().iterator();
+ ii.hasNext(); ) {
+
+ Map.Entry<String,Map> ie = ii.next();
+
+ String interfaceName = ie.getKey();
+ Map interfaceDef = findTypeFacetByName(Construct.Node, nodeType,
+ Facet.interfaces, interfaceName);
+
+ if (interfaceDef == null) {
+ /* this is subject to augmentation: this could be a warning but not an error */
+ theContext.addError(Message.INVALID_INTERFACE_REFERENCE, nodeType, interfaceName, Construct.Node);
+ continue;
+ }
+
+ check_template_interface_definition(
+ interfaceName, ie.getValue(), interfaceDef, theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_template_interface_definition(
+ String theInterfaceName,
+ Map theAssignment,
+ Map theDefinition,
+ CheckContext theContext) {
+
+ theContext.enter(theInterfaceName, Construct.Interface);
+ try {
+ //check the assignment of the common inputs
+//System.out.println("Checking interface inputs for " + theInterfaceName);
+ checkFacet(Construct.Interface,
+ theAssignment,
+ (String)theDefinition.get("type"),
+ Facet.inputs,
+ theContext);
+
+ //check the assignment of inputs in each operation
+ //unfortunately operations are not defined as a facet (grouped under a
+ //facet name) i.e. operations..
+
+/*
+ Map<String, Map> inputsDefs = theDefinition.get("inputs");
+ Map<String,?> inputs = theAssignment.get("inputs");
+
+ if (inputs != null && !inputs.isEmpty()) {
+ for (Map.Entry inputEntry: input.entrySet()) {
+ //check the input name part of the definition
+ if (inputDefs != null && inputDefs.containsKey(inputEntry.getKey())) {
+ checkDataValuation(inputEntry.getValue(),
+ inputsDefs.get(inputEntry.getKey()),
+ theContext);
+ }
+ else {
+ theContext.addError("No input " + inputEntry.getKey() + " was defined for the interface " + theInterfaceName, null);
+ }
+ }
+ }
+*/
+/*
+ String interfaceType = (String)theDefinition.get("type");
+ //list of property and attributes assignments
+ checkFacet(Construct.Interface, theAssignment, interfaceType,
+ "inputs", theContext);
+*/
+ //the interface operations: can new operations be defined here??
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+
+ @Checks(path="/topology_template/artifacts")
+ protected void check_template_artifacts_definition(
+ Map<String,Object> theDefinition,
+ CheckContext theContext) {
+ theContext.enter("artifacts");
+ try {
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ protected void check_template_artifact_definition(
+ String theArtifactName,
+ Map theAssignment,
+ Map theDefinition,
+ CheckContext theContext) {
+
+ theContext.enter(theArtifactName, Construct.Artifact);
+ try {
+
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ //generic checking actions, not related to validation rules
+
+
+ /* the type can be:
+ * a known type: predefined or user-defined
+ * a collection (list or map) and then check that the entry_schema points to one of the first two cases (is that it?)
+ */
+ protected boolean checkDataType(
+ String theName, Map theSpec, CheckContext theContext) {
+
+ if (!checkTypeReference(Construct.Data, theContext, (String)theSpec.get("type")))
+ return false;
+
+ String type = (String)theSpec.get("type");
+ if (/*isCollectionType(type)*/
+ "list".equals(type) || "map".equals(type)) {
+ Map entry_schema = (Map)theSpec.get("entry_schema");
+ if (entry_schema == null) {
+ //maybe issue a warning ?? or is 'string' the default??
+ return true;
+ }
+
+ if (!catalog.hasType(Construct.Data,(String)entry_schema.get("type"))) {
+ theContext.addError("Unknown entry_schema type: " + entry_schema, null);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /*
+ * generic checks for a type specification
+ */
+ protected boolean checkTypeConstruct(Construct theConstruct,
+ String theTypeName,
+ Map theDef,
+ CheckContext theContext) {
+ /* There is a 'weakness' in the super-type check before: the search for the supertype is done globally and
+ * not strictly on the 'import' path, i.e. one should explore for the super-type definition the target sub-tree
+ * starting at the current target and not ALL the targets
+ */
+ String parentType = (String)theDef.get("derived_from");
+ if (parentType != null && !catalog.hasType(theConstruct, parentType)) {
+ theContext.addError(
+ Message.INVALID_TYPE_REFERENCE, "derived_from", parentType, theConstruct);
+ return false;
+ }
+ return true;
+ }
+
+ /* Check that a particular facet (properties, attributes) of a construct type
+ * (node type, capability type, etc) is correctly (consistenly) defined
+ * across a type hierarchy
+ */
+ protected boolean checkTypeConstructFacet(Construct theConstruct,
+ String theTypeName,
+ Map theTypeSpec,
+ Facet theFacet,
+ CheckContext theContext) {
+ Map<String, Map> defs =
+ (Map<String,Map>)theTypeSpec.get(theFacet.name());
+ if (null == defs) {
+ return true;
+ }
+
+ boolean res = true;
+
+ //given that the type was cataloged there will be at least one entry
+ Iterator<Map.Entry<String,Map>> i =
+ catalog.hierarchy(theConstruct, theTypeName);
+ if (!i.hasNext()) {
+ theContext.addError(
+ "The type " + theTypeName + " needs to be cataloged before attempting 'checkTypeConstruct'", null);
+ return false;
+ }
+ i.next(); //skip self
+ while(i.hasNext()) {
+ Map.Entry<String,Map> e = i.next();
+ Map<String, Map> superDefs = (Map<String,Map>)e.getValue()
+ .get(theFacet.name());
+ if (null == superDefs) {
+ continue;
+ }
+ //this computes entries that appear on both collections but with different values, i.e. the re-defined properties
+ Map<String, MapDifference.ValueDifference<Map>> diff = Maps.difference(defs, superDefs).entriesDiffering();
+
+ for (Iterator<Map.Entry<String, MapDifference.ValueDifference<Map>>> di = diff.entrySet().iterator(); di.hasNext(); ) {
+ Map.Entry<String, MapDifference.ValueDifference<Map>> de = di.next();
+ MapDifference.ValueDifference<Map> dediff = de.getValue();
+ log.finest(
+ theConstruct + " type " + theFacet + ": " + de.getKey() + " has been re-defined between the " + theConstruct + " types " + e.getKey() + " and " + theTypeName);
+ //for now we just check that the type is consistenly re-declared
+ //if (!dediff.leftValue().get("type").equals(dediff.rightValue().get("type"))) {
+ if (!this.catalog.isDerivedFrom(theFacet.construct(),
+ (String)dediff.leftValue().get("type"),
+ (String)dediff.rightValue().get("type"))) {
+ theContext.addError(
+ theConstruct + " type " + theFacet + ", redefiniton changed its type: "+ de.getKey() + " has been re-defined between the " + theConstruct + " types " + e.getKey() + " and " + theTypeName + " in an incompatible manner", null);
+ res = false;
+ }
+ }
+ }
+
+ return res;
+ }
+
+ /*
+ * Checks the validity of a certain facet of a construct
+ * (properties of a node) across a type hierarchy.
+ * For now the check is limited to a verifying that a a facet was declared
+ * somewhere in the construct type hierarchy (a node template property has
+ * been declared in the node type hierarchy).
+ *
+ * 2 versions with the more generic allowing the specification of the type
+ * to be done explicitly.
+ */
+ protected boolean checkFacet(Construct theConstruct,
+ Map theSpec,
+ Facet theFacet,
+ CheckContext theContext) {
+ return checkFacet(theConstruct, theSpec, null, theFacet, theContext);
+ }
+
+ /**
+ * We walk the hierarchy and verify the assignment of a property with respect to its definition.
+ * We also collect the names of those properties defined as required but for which no assignment was provided.
+ */
+ protected boolean checkFacet(Construct theConstruct,
+ Map theSpec,
+ String theSpecType,
+ Facet theFacet,
+ CheckContext theContext) {
+
+ Map<String,Map> defs = (Map<String,Map>)theSpec.get(theFacet.name());
+ if (null == defs) {
+ return true;
+ }
+ defs = Maps.newHashMap(defs); //
+
+ boolean res = true;
+ if (theSpecType == null) {
+ theSpecType = (String)theSpec.get("type");
+ }
+ if (theSpecType == null) {
+ theContext.addError("No specification type available", null);
+ return false;
+ }
+
+ Map<String,Byte> missed = new HashMap<String, Byte>(); //keeps track of the missing required properties, the value is
+ //false if a default was found along the hierarchy
+ Iterator<Map.Entry<String,Map>> i =
+ catalog.hierarchy(theConstruct, theSpecType);
+ while (i.hasNext() && !defs.isEmpty()) {
+ Map.Entry<String,Map> type = i.next();
+
+//System.out.println(" **** type : " + type.getKey() );
+
+ Map<String, Map> typeDefs = (Map<String,Map>)type.getValue()
+ .get(theFacet.name());
+ if (null == typeDefs) {
+ continue;
+ }
+
+ MapDifference<String, Map> diff = Maps.difference(defs, typeDefs);
+
+ //this are the ones this type and the spec have in common (same key,
+ //different values)
+ Map<String, MapDifference.ValueDifference<Map>> facetDefs =
+ diff.entriesDiffering();
+ //TODO: this assumes the definition of the facet is not cumulative, i.e.
+ //subtypes 'add' something to the definition provided by the super-types
+ //it considers the most specialized definition stands on its own
+ for (MapDifference.ValueDifference<Map> valdef: facetDefs.values()) {
+ checkDataValuation(valdef.leftValue(), valdef.rightValue(), theContext);
+ }
+
+ //the ones that appear in the type but not in spec; ensure the type does not requires them.
+/*
+ Map<String, Map> unassigned = diff.entriesOnlyOnRight();
+ for (Map.Entry<String, Map> unassignedEntry: unassigned.entrySet()) {
+
+System.out.println(" **** unassigned -> " + unassignedEntry.getKey() + " : " + unassignedEntry.getValue());
+
+ if (unassignedEntry.getValue().containsKey("required")) {
+ Boolean required = (Boolean)unassignedEntry.getValue().get("required");
+ System.out.println(" **** before " + unassignedEntry.getKey() + ", required " + required + " = " + missed.get(unassignedEntry.getKey()));
+ missed.compute(unassignedEntry.getKey(),
+ (k, v) -> v == null ? (required.booleanValue() ? (byte)1
+ : (byte)0)
+ : (required.booleanValue() ? (byte)(v.byteValue() | 0x01)
+ : (byte)(v.byteValue() & 0x02)));
+
+
+ System.out.println(" **** after " + unassignedEntry.getKey() + ", required " + required + " = " + missed.get(unassignedEntry.getKey()));
+ }
+ if (unassignedEntry.getValue().containsKey("default")) {
+ System.out.println(" **** before " + unassignedEntry.getKey() + ", default = " + missed.get(unassignedEntry.getKey()));
+ missed.compute(unassignedEntry.getKey(),
+ (k, v) -> v == null ? (byte)2
+ : (byte)(v.byteValue() | 0x02));
+ System.out.println(" **** after " + unassignedEntry.getKey() + ", default = " + missed.get(unassignedEntry.getKey()));
+ }
+ }
+*/
+ //remove from properties all those that appear in this type: unfortunately this returns an unmodifiable map ..
+ defs = Maps.newHashMap(diff.entriesOnlyOnLeft());
+ }
+
+ if (!defs.isEmpty()) {
+ theContext.addError(Message.INVALID_FACET_REFERENCE, theConstruct, theFacet, theSpecType, defs);
+ res = false;
+ }
+
+ if (!missed.isEmpty()) {
+ List missedNames =
+ missed.entrySet()
+ .stream()
+ .filter(e -> e.getValue().byteValue() == (byte)1)
+ .map(e -> e.getKey())
+ .collect(Collectors.toList());
+ if (!missedNames.isEmpty()) {
+ theContext.addError(theConstruct + " " + theFacet + " missing required values for: " + missedNames, null);
+ res = false;
+ }
+ }
+
+ return res;
+ }
+
+ /* Augmentation occurs in cases such as the declaration of capabilities within a node type.
+ * In such cases the construct facets (the capabilitity's properties) can redefine (augment) the
+ * specification found in the construct type.
+ */
+ protected boolean checkFacetAugmentation(Construct theConstruct,
+ Map theSpec,
+ Facet theFacet,
+ CheckContext theContext) {
+ return checkFacetAugmentation(theConstruct, theSpec, null, theFacet, theContext);
+ }
+
+ protected boolean checkFacetAugmentation(Construct theConstruct,
+ Map theSpec,
+ String theSpecType,
+ Facet theFacet,
+ CheckContext theContext) {
+
+ Map<String,Map> augs = (Map<String,Map>)theSpec.get(theFacet.name());
+ if (null == augs) {
+ return true;
+ }
+
+ boolean res = true;
+ if (theSpecType == null) {
+ theSpecType = (String)theSpec.get("type");
+ }
+ if (theSpecType == null) {
+ theContext.addError("No specification type available", null);
+ return false;
+ }
+
+ for (Iterator<Map.Entry<String,Map>> ai = augs.entrySet().iterator(); ai.hasNext(); ) {
+ Map.Entry<String,Map> ae = ai.next();
+
+ //make sure it was declared by the type
+ Map facetDef = catalog.getFacetDefinition(theConstruct, theSpecType, theFacet, ae.getKey());
+ if (facetDef == null) {
+ theContext.addError("Unknown " + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + ae.getKey(), null);
+ res = false;
+ continue;
+ }
+
+ //check the compatibility of the augmentation: only the type cannot be changed
+ //can the type be changed in a compatible manner ??
+ if (!facetDef.get("type").equals(ae.getValue().get("type"))) {
+ theContext.addError(theConstruct + " " + theFacet + " " + ae.getKey() + " has a different type than its definition: " + ae.getValue().get("type") + " instead of " + facetDef.get("type"), null);
+ res = false;
+ continue;
+ }
+
+ //check any valuation (here just defaults)
+ Object defaultValue = ae.getValue().get("default");
+ if (defaultValue != null) {
+ checkDataValuation(defaultValue, ae.getValue(), theContext);
+ }
+ }
+
+ return res;
+ }
+
+ protected boolean catalogTypes(Construct theConstruct, Map<String,Map> theTypes, CheckContext theContext) {
+
+ boolean res = true;
+ for (Map.Entry<String,Map> typeEntry: theTypes.entrySet()) {
+ res &= catalogType(theConstruct, typeEntry.getKey(), typeEntry.getValue(), theContext);
+ }
+
+ return res;
+ }
+
+ /* */
+ protected boolean catalogType(Construct theConstruct,
+ String theName,
+ Map theDef,
+ CheckContext theContext) {
+
+ if (!catalog.addType(theConstruct, theName, theDef)) {
+ theContext.addError(theConstruct + " type " + theName + " re-declaration", null);
+ return false;
+ }
+ log.finer(theConstruct + " type " + theName + " has been cataloged");
+
+ return true;
+ }
+
+
+ /* */
+ protected boolean checkTypeReference(Construct theConstruct,
+ CheckContext theContext,
+ String... theTypeNames) {
+ boolean res = true;
+ for (String typeName: theTypeNames) {
+ if (!isTypeReference(theConstruct, typeName)) {
+ theContext.addError(Message.INVALID_TYPE_REFERENCE, "", typeName, theConstruct);
+ res = false;
+ }
+ }
+ return res;
+ }
+
+ protected boolean isTypeReference(Construct theConstruct,
+ String theTypeName) {
+ return this.catalog.hasType(theConstruct, theTypeName);
+ }
+
+ /* node or relationship templates */
+ protected boolean checkTemplateReference(Construct theConstruct,
+ CheckContext theContext,
+ String... theTemplateNames) {
+ boolean res = true;
+ for (String templateName: theTemplateNames) {
+ if (!isTemplateReference(theConstruct, theContext, templateName)) {
+ theContext.addError(Message.INVALID_TEMPLATE_REFERENCE, "", templateName, theConstruct);
+ res = false;
+ }
+ }
+ return res;
+ }
+
+ protected boolean catalogTemplates(Construct theConstruct,
+ Map<String,Map> theTemplates,
+ CheckContext theContext) {
+
+ boolean res = true;
+ for (Map.Entry<String,Map> typeEntry: theTemplates.entrySet()) {
+ res &= catalogTemplate(theConstruct, typeEntry.getKey(), typeEntry.getValue(), theContext);
+ }
+
+ return res;
+ }
+
+ protected boolean catalogTemplate(Construct theConstruct,
+ String theName,
+ Map theDef,
+ CheckContext theContext) {
+ try {
+ catalog.addTemplate(theContext.target(), theConstruct, theName, theDef);
+ log.finer(theConstruct + " " + theName + " has been cataloged");
+ }
+ catch(CatalogException cx) {
+ theContext.addError("Failed to catalog " + theConstruct + " " + theName, cx);
+ return false;
+ }
+ return true;
+ }
+
+ protected boolean isTemplateReference(Construct theConstruct,
+ CheckContext theContext,
+ String theTemplateName) {
+ return this.catalog.hasTemplate(theContext.target(),theConstruct, theTemplateName);
+ }
+
+ /*
+ * For inputs/properties/attributes/(parameters). It is the caller's
+ * responsability to provide the value (from a 'default', inlined, ..)
+ *
+ * @param theDef the definition of the given construct/facet as it appears in
+ * its enclosing type definition.
+ * @param
+ */
+ protected boolean checkDataValuation(Object theExpr,
+ Map<String,?> theDef,
+ CheckContext theContext) {
+ //first check if the expression is a function, if not handle it as a value assignment
+ Data.Function f = Data.function(theExpr);
+ if (f != null) {
+ return f.evaluator()
+ .eval(theExpr, theDef, theContext);
+ }
+ else {
+ Data.Type type = Data.typeByName((String)theDef.get("type"));
+ if (type != null) {
+//System.out.println("Evaluating " + theExpr + " as " + theExpr.getClass().getName() + " against " + theDef);
+ Data.Evaluator evaluator = null;
+
+ evaluator = type.evaluator();
+ if (evaluator == null) {
+ log.info("No value evaluator available for type " + type);
+ }
+ else {
+ if (theExpr != null) {
+ if (!evaluator.eval(theExpr, theDef, theContext)) {
+ return false;
+ }
+ }
+ }
+
+ evaluator = type.constraintsEvaluator();
+ if (evaluator == null) {
+ log.info("No constraints evaluator available for type " + type);
+ }
+ else {
+ if (theExpr != null) {
+ if (!evaluator.eval(theExpr, theDef, theContext)) {
+ return false;
+ }
+ }
+ else {
+ //should have a null value validator
+ }
+ }
+
+ return true;
+ }
+ else {
+ theContext.addError("Expression " + theExpr + " of " + theDef + " could not be evaluated", null);
+ return false;
+ }
+ }
+ }
+
+ /** Given the type of a certain construct (node type for example), look up
+ * in one of its facets (properties, capabilities, ..) for one of the given
+ * facet type (if looking in property, one of the given data type).
+ * @return a map of all facets of the given type, will be empty to signal
+ * none found
+ *
+ * Should we look for a facet construct of a compatible type: any type derived
+ * from the given facet's construct type??
+ */
+ protected Map<String,Map>
+ findTypeFacetByType(Construct theTypeConstruct,
+ String theTypeName,
+ Facet theFacet,
+ String theFacetType) {
+
+ log.logp(Level.FINER, "", "findTypeFacetByType", theTypeName + " " + theTypeConstruct + ": " + theFacetType + " " + theFacet);
+ Map<String,Map> res= new HashMap<String,Map>();
+ Iterator<Map.Entry<String,Map>> i =
+ catalog.hierarchy(theTypeConstruct, theTypeName);
+ while (i.hasNext()) {
+ Map.Entry<String,Map> typeSpec = i.next();
+ log.logp(Level.FINER, "", "findTypeFacetByType", "Checking " + theTypeConstruct + " type " + typeSpec.getKey() );
+ Map<String,Map> typeFacet =
+ (Map<String,Map>)typeSpec.getValue().get(theFacet.name());
+ if (typeFacet == null) {
+ continue;
+ }
+ Iterator<Map.Entry<String,Map>> fi = typeFacet.entrySet().iterator();
+ while(fi.hasNext()) {
+ Map.Entry<String,Map> facet = fi.next();
+ String facetType = (String)facet.getValue().get("type");
+ log.logp(Level.FINER, "", "findTypeFacetByType", "Checking " + facet.getKey() + " type " + facetType);
+
+ //here is the question: do we look for an exact match or ..
+ //now we check that the type has a capability of a type compatible
+ //(equal or derived from) the given capability type.
+ if (catalog.isDerivedFrom(
+ theFacet.construct(), /*theFacetType, facetType*/facetType, theFacetType)) {
+ //res.merge(facet.getKey(), facet.getValue(), (currDef, newDef)->(merge the base class definition in the existing definition but provide the result in a new map as to avoid changing the stored defintitions));
+ res.putIfAbsent(facet.getKey(), facet.getValue());
+ }
+ }
+ }
+ log.logp(Level.FINER, "", "findTypeFacetByType", "found " + res);
+
+ return res;
+ }
+
+ /* */
+ protected Map<String,Object>
+ findTypeFacetByName(Construct theTypeConstruct,
+ String theTypeName,
+ Facet theFacet,
+ String theFacetName) {
+ log.logp(Level.FINER, "", "findTypeFacetByName", theTypeConstruct + " " + theTypeName);
+ Iterator<Map.Entry<String,Map>> i =
+ catalog.hierarchy(theTypeConstruct, theTypeName);
+ while (i.hasNext()) {
+ Map.Entry<String,Map> typeSpec = i.next();
+ log.logp(Level.FINER, "", "findTypeFacetByName", "Checking " + theTypeConstruct + " type " + typeSpec.getKey() );
+ Map<String,Map> typeFacet =
+ (Map<String,Map>)typeSpec.getValue().get(theFacet.name());
+ if (typeFacet == null) {
+ continue;
+ }
+ Map<String,Object> facet = typeFacet.get(theFacetName);
+ if (facet != null) {
+ return facet;
+ }
+ }
+ return null;
+ }
+
+ /* Requirements are the odd ball as they are structured as a sequence ..
+ */
+ protected Map<String,Map> findNodeTypeRequirementByName(
+ String theNodeType, String theRequirementName) {
+ log.logp(Level.FINER, "", "findNodeTypeRequirementByName", theNodeType + "/" + theRequirementName);
+ Iterator<Map.Entry<String,Map>> i =
+ catalog.hierarchy(Construct.Node, theNodeType);
+ while (i.hasNext()) {
+ Map.Entry<String,Map> nodeType = i.next();
+ log.logp(Level.FINER, "", "findNodeTypeRequirementByName", "Checking node type " + nodeType.getKey() );
+ List<Map<String,Map>> nodeTypeRequirements =
+ (List<Map<String,Map>>)nodeType.getValue().get("requirements");
+ if (nodeTypeRequirements == null) {
+ continue;
+ }
+
+ for (Map<String,Map> requirement: nodeTypeRequirements) {
+ Map requirementDef = requirement.get(theRequirementName);
+ if (requirementDef != null) {
+ return requirementDef;
+ }
+ }
+ }
+ return null;
+ }
+
+ /*
+ *
+ */
+ public Map findNodeTemplateInterfaceOperation(
+ Target theTarget, String theNodeName, String theInterfaceName, String theOperationName) {
+
+ Map nodeDefinition = (Map)catalog.getTemplate(theTarget, Construct.Node, theNodeName);
+ if (nodeDefinition == null)
+ return null;
+
+ Map interfaces = (Map)nodeDefinition.get("interfaces");
+ if (interfaces == null)
+ return null;
+
+ Map interfaceDef = (Map)interfaces.get(theInterfaceName);
+ if (interfaceDef == null)
+ return null;
+
+ return (Map)interfaceDef.get(theOperationName);
+ }
+
+ public Map findNodeTypeInterfaceOperation(
+ String theNodeType, String theInterfaceName, String theOperationName) {
+
+ return null;
+ }
+
+ /*
+ * Assumes that at this time the constrints (syntax) for all names (construct
+ * types, constructs, facets: ) are the same.
+ */
+ public boolean checkName(String theName,
+ CheckContext theContext) {
+ return true;
+ }
+
+ /*
+ * Additional generics checks to be performed on any definition: construct,
+ * construct types, etc ..
+ */
+ public boolean checkDefinition(String theName,
+ Map theDefinition,
+ CheckContext theContext) {
+ if (theDefinition == null) {
+ theContext.addError("Missing definition for " + theName, null);
+ return false;
+ }
+
+ if (theDefinition.isEmpty()) {
+ theContext.addError("Empty definition for " + theName, null);
+ return false;
+ }
+
+ return true;
+ }
+
+ public boolean checkDefinition(String theName,
+ List theDefinition,
+ CheckContext theContext) {
+ if (theDefinition == null) {
+ theContext.addError("Missing definition for " + theName, null);
+ return false;
+ }
+
+ if (theDefinition.isEmpty()) {
+ theContext.addError("Empty definition for " + theName, null);
+ return false;
+ }
+
+ return true;
+ }
+
+ /* I'd rather validate each import once at it's own rule time (see next method) but unfortunately the canonicals
+ * are not visible 'right away' (they are applied at the end of the pre-validation but not visible in the
+ * post-validation of the same rule because of kwalify validator implementation).
+ */
+ @Validates(rule="service_template_definition",timing=Validates.Timing.post)
+ protected void validate_imports(
+ Object theValue, Rule theRule, Validator.ValidationContext theContext) {
+
+ Map template = (Map)theValue;
+ List<Map> imports = (List)template.get("imports");
+
+ if (imports != null) {
+ for (Map importEntry: imports) {
+ validate_import(mapEntry(importEntry).getValue(), theRule, theContext);
+ }
+ }
+ }
+
+ //@Validates(rule="import_definition",timing=Validates.Timing.post)
+ protected void validate_import(
+ Object theValue, Rule theRule, Validator.ValidationContext theContext) {
+
+ log.entering("", "import", theContext.getPath());
+
+ TOSCAValidator validator = (TOSCAValidator)theContext.getValidator();
+ Target tgt = validator.getTarget();
+
+ Map def = (Map)theValue; //importEntry.getValue();
+ log.fine("Processing import " + def);
+
+ String tfile = (String)def.get("file");
+ Target tgti = this.locator.resolve(tfile);
+ if (tgti == null) {
+ theContext.addError("Failure to resolve import '" + def + "', imported from " + tgt, theRule, null, null);
+ return;
+ }
+ log.finer("Import " + def + " located at " + tgti.getLocation());
+
+ if (this.catalog.addTarget(tgti, tgt)) {
+
+ //we've never seen this import (location) before
+ try {
+ List<Target> tgtis = parseTarget(tgti);
+ if (tgtis.isEmpty())
+ return; //continue;
+
+ if (tgtis.size() > 1) {
+ theContext.addError("Import '" + tgti + "', imported from " + tgt + ", contains multiple yaml documents" , theRule, null, null);
+ return; //continue;
+ }
+
+ tgti = tgtis.get(0);
+ if (tgt.getReport().hasErrors()) {
+ theContext.addError("Failure parsing import '" + tgti + "',imported from " + tgt, theRule, null, null);
+ return; //continue;
+ }
+
+ validateTarget(tgti);
+ if (tgt.getReport().hasErrors()) {
+ theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, null, null);
+ return; //continue;
+ }
+ }
+ catch (CheckerException cx) {
+ theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, cx, null);
+ }
+ }
+
+ //replace with the actual location (also because this is what they get
+ //index by .. bad, this exposed catalog inner workings)
+ def.put("file", tgti.getLocation());
+ }
+
+ /* plenty of one entry maps around */
+ private Map.Entry mapEntry(Object theMap) {
+ return (Map.Entry)((Map)theMap).entrySet().iterator().next();
+ }
+
+
+ /* */
+ protected static Catalog commonsCatalog = null;
+
+ /*
+ * commons are built-in and supposed to be bulletproof so any error in here
+ * goes out loud.
+ */
+ protected static Catalog commonsCatalog() {
+
+ synchronized (Catalog.class) {
+
+ if (commonsCatalog != null) {
+ return commonsCatalog;
+ }
+
+ //if other templates are going to be part of the common type system
+ //add them to this list. order is relevant.
+ final String[] commons = new String[] {
+ "tosca/tosca-common-types.yaml" };
+
+ Checker commonsChecker = null;
+ try {
+ commonsChecker = new Checker();
+
+ for (String common: commons) {
+ commonsChecker.check(common, buildCatalog(false));
+ Report commonsReport = commonsChecker.targets().iterator().next().getReport();
+
+ if (commonsReport.hasErrors()) {
+ throw new RuntimeException("Failed to process commons:\n" +
+ commonsReport);
+ }
+ }
+ }
+ catch(CheckerException cx) {
+ throw new RuntimeException("Failed to process commons", cx);
+ }
+
+ return commonsCatalog = commonsChecker.catalog;
+ }
+ }
+
+ public static Catalog buildCatalog() {
+ return buildCatalog(true);
+ }
+
+ /*
+ */
+ public static Catalog buildCatalog(boolean doCommons) {
+
+ Catalog catalog = new Catalog(doCommons ? commonsCatalog() : null);
+ if (!doCommons) {
+ //add core TOSCA types
+ for (Data.CoreType type: Data.CoreType.class.getEnumConstants()) {
+ catalog.addType(Construct.Data, type.toString(), Collections.emptyMap());
+ }
+ }
+ return catalog;
+ }
+
+ protected void checks(String theName,
+ Object theTarget,
+ CheckContext theContext) {
+
+ handles("checks:" + theContext.getPath(theName), theTarget, theContext);
+ }
+
+ protected void catalogs(String theName,
+ Object theTarget,
+ CheckContext theContext) {
+
+ handles("catalogs:" + theContext.getPath(theName), theTarget, theContext);
+ }
+
+ protected boolean validates(Validates.Timing theTiming,
+ Object theTarget,
+ Rule theRule,
+ Validator.ValidationContext theContext) {
+ //might look odd but we need both 'handles' call to be executed
+ boolean validated =
+ handles(theTiming + "-validates:" + theRule.getName(), theTarget, theRule, theContext);
+ return handles(theTiming + "-validates:", theTarget, theRule, theContext) || validated;
+ }
+
+ /*
+ * allow the handlers to return a boolean .. only do this in order to accomodate the Canonical's way of avoiding
+ * validation when a short form is encoutered.
+ * @return true if any handler returned true (if they returned something at all), false otherwise (even when no
+ * handlers were found)
+ */
+ protected boolean handles(String theHandlerKey, Object... theArgs) {
+
+ boolean handled = false;
+ Map<Method, Object> entries = handlers.row(theHandlerKey);
+ if (entries != null) {
+ for (Map.Entry<Method, Object> entry: entries.entrySet()) {
+ Object res = null;
+ try {
+ res = entry.getKey().invoke(entry.getValue(), theArgs);
+ }
+ catch (Exception x) {
+ log.log(Level.WARNING, theHandlerKey + " by " + entry.getKey() + " failed", x);
+ }
+ handled |= res == null ? false : (res instanceof Boolean && ((Boolean)res).booleanValue());
+ }
+ }
+ return handled;
+ }
+
+ /**
+ */
+ public class TOSCAValidator extends Validator {
+
+ //what were validating
+ private Target target;
+
+ public TOSCAValidator(Target theTarget, Object theSchema)
+ throws SchemaException {
+ super(theSchema);
+ this.target = theTarget;
+ }
+
+ public Target getTarget() {
+ return this.target;
+ }
+
+ /* hook method called by Validator#validate()
+ */
+ protected boolean preValidationHook(Object value, Rule rule, ValidationContext context) {
+
+ return validates(Validates.Timing.pre, value, rule, context);
+ }
+
+ /*
+ * Only gets invoked once the value was succesfully verified against the syntax indicated by the given rule.
+ */
+ protected void postValidationHook(Object value,
+ Rule rule,
+ ValidationContext context) {
+ validates(Validates.Timing.post, value, rule, context);
+ }
+
+ }
+
+ /**
+ * Maintains state across the checking process.
+ */
+ public class CheckContext {
+
+ public class Step {
+
+ private final Construct construct;
+ private final String name;
+ private final Object info;
+
+ public Step(String theName, Construct theConstruct, Object theInfo) {
+ this.construct = theConstruct;
+ this.name = theName;
+ this.info = theInfo;
+ }
+
+ public Construct construct() { return this.construct; }
+ public String name() { return this.name; }
+ public Object info() { return this.info; }
+ }
+
+
+ private Target target;
+ private ArrayList<Step> steps = new ArrayList<Step>(20); //artificial max nesting ..
+
+
+ public CheckContext(Target theTarget) {
+ this.target = theTarget;
+ }
+
+ public CheckContext enter(String theName) {
+ return enter(theName, null, null);
+ }
+
+ public CheckContext enter(String theName, Construct theConstruct) {
+ return enter(theName, theConstruct, null);
+ }
+
+ public CheckContext enter(String theName, Construct theConstruct, Object theInfo) {
+ this.steps.add(new Step(theName, theConstruct, theInfo));
+ Checker.this.log.entering("check", theName, getPath());
+ return this;
+ }
+
+ public CheckContext exit() {
+ Step step = this.steps.get(this.steps.size()-1);
+ Checker.this.log.exiting("check", step.name(), getPath());
+ this.steps.remove(this.steps.size()-1);
+ return this;
+ }
+
+ public String getPath() {
+ return buildPath(null);
+ }
+
+ public String getPath(String theNextElem) {
+ return buildPath(theNextElem);
+ }
+
+ protected String buildPath(String theElem) {
+ StringBuffer sb = new StringBuffer("");
+ for (Step s: this.steps)
+ sb.append(s.name())
+ .append("/");
+ if (theElem != null)
+ sb.append(theElem)
+ .append("/");
+
+ return sb.substring(0,sb.length()-1);
+ }
+
+ public Step enclosingConstruct(Construct theConstruct) {
+ for (int i = this.steps.size()-1; i > 0; i--) {
+ Construct c = this.steps.get(i).construct();
+ if (c != null && c.equals(theConstruct)) {
+ return this.steps.get(i);
+ }
+ }
+ return null;
+ }
+
+ public Step enclosingElement(String theName) {
+ for (int i = this.steps.size()-1; i > 0; i--) {
+ String n = this.steps.get(i).name();
+ if (n != null && n.equals(theName)) {
+ return this.steps.get(i);
+ }
+ }
+ return null;
+ }
+
+ public Step enclosing() {
+ if (this.steps.size() > 0) {
+ return this.steps.get(this.steps.size()-1);
+ }
+ return null;
+ }
+
+ public CheckContext addError(String theMessage, Throwable theCause) {
+ this.target.report(new TargetError("", getPath(), theMessage, theCause));
+ return this;
+ }
+
+ public CheckContext addError(Message theMsg, Object... theArgs) {
+ this.target.report(new TargetError("", getPath(), messages.format(theMsg, theArgs), null));
+ return this;
+ }
+
+ public boolean hasErrors() {
+ return this.target.getReport().hasErrors();
+ }
+
+ public Checker checker() {
+ return Checker.this;
+ }
+
+ public Catalog catalog() {
+ return Checker.this.catalog;
+ }
+
+ public Target target() {
+ return this.target;
+ }
+
+ public String toString() {
+ return "CheckContext(" + this.target.getLocation() + "," + getPath() + ")";
+ }
+ }
+
+ public static class CheckerConfiguration {
+
+ private boolean allowAugmentation = true;
+ private String defaultImportsPath = null;
+ private String defaultCheckerRoots = null;
+
+ protected CheckerConfiguration() {
+ }
+
+ public CheckerConfiguration allowAugmentation(boolean doAllow) {
+ this.allowAugmentation = doAllow;
+ return this;
+ }
+
+ public boolean allowAugmentation() {
+ return this.allowAugmentation;
+ }
+
+ public CheckerConfiguration defaultImportsPath(String thePath) {
+ this.defaultImportsPath = thePath;
+ return this;
+ }
+
+ public String defaultImportsPath() {
+ return this.defaultImportsPath;
+ }
+
+ }
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CheckerException.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CheckerException.java
new file mode 100644
index 0000000..3751bb5
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CheckerException.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+
+/**
+ * A checker exception represents an error that stops the checker from
+ * completing its task.
+ */
+public class CheckerException extends Exception {
+
+ public CheckerException(String theMsg, Throwable theCause) {
+ super(theMsg, theCause);
+ }
+
+ public CheckerException(String theMsg) {
+ super(theMsg);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CommonLocator.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CommonLocator.java
new file mode 100644
index 0000000..f650f9e
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CommonLocator.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.io.InputStream;
+import java.io.IOException;
+
+import java.net.URL;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import java.nio.file.Paths;
+
+import java.util.Set;
+import java.util.LinkedHashSet;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import com.google.common.collect.Iterables;
+
+
+public class CommonLocator implements TargetLocator {
+
+ private static Logger log =
+ Logger.getLogger("com.att.research.is.tosca.yaml.TargetLocator");
+
+ private Set<URI> searchPaths = new LinkedHashSet();
+
+ /* will create a locator with 2 default search paths: the file directory
+ * from where the app was and the jar from which this checker (actually this
+ * class) was loaded */
+ public CommonLocator() {
+ addSearchPath(
+ Paths.get(".").toAbsolutePath().normalize().toUri());
+ }
+
+ public CommonLocator(String... theSearchPaths) {
+ for (String path: theSearchPaths) {
+ addSearchPath(path);
+ }
+ }
+
+ public boolean addSearchPath(URI theURI) {
+
+ if (!theURI.isAbsolute()) {
+ log.log(Level.WARNING, "Search paths must be absolute uris: " + theURI);
+ return false;
+ }
+
+ return searchPaths.add(theURI);
+ }
+
+ public boolean addSearchPath(String thePath) {
+ URI suri = null;
+ try {
+ suri = new URI(thePath);
+ }
+ catch(URISyntaxException urisx) {
+ log.log(Level.WARNING, "Invalid search path: " + thePath, urisx);
+ return false;
+ }
+
+ return addSearchPath(suri);
+ }
+
+ public Iterable<URI> searchPaths() {
+ return Iterables.unmodifiableIterable(this.searchPaths);
+ }
+
+ /**
+ * Takes the given path and first URI resolves it and then attempts to open
+ * it (a way of verifying its existence) against each search path and stops
+ * at the first succesful test.
+ */
+ public Target resolve(String theName) {
+ URI puri = null;
+ InputStream pis = null;
+
+ //try absolute
+ try {
+ puri = new URI(theName);
+ if (puri.isAbsolute()) {
+ try {
+ pis = puri.toURL().openStream();
+ }
+ catch (IOException iox) {
+ log.log(Level.WARNING, "The path " + theName + " is an absolute uri but it canont be opened", iox);
+ return null;
+ }
+ }
+ }
+ catch(URISyntaxException urisx) {
+ log.log(Level.FINER, "TargetResolver failed attempting " + puri, urisx);
+ //keep it silent but what are the chances ..
+ }
+
+ //try relative to the search paths
+ for (URI suri: searchPaths) {
+ try {
+ puri = suri.resolve(theName);
+ log.finer("TargetResolver trying " + puri);
+ pis = puri.toURL().openStream();
+ return new Target(theName, puri.normalize());
+ }
+ catch (Exception x) {
+ log.log(Level.FINER, "TargetResolver failed attempting " + puri, x);
+ continue;
+ }
+ finally {
+ if (pis!= null) {
+ try {
+ pis.close();
+ }
+ catch (IOException iox) {
+ }
+ }
+ }
+ }
+
+ //try classpath
+ URL purl = getClass().getClassLoader().getResource(theName);
+ if (purl != null) {
+ try {
+ return new Target(theName, purl.toURI());
+ }
+ catch (URISyntaxException urisx) {
+ }
+ }
+
+ return null;
+ }
+
+ public String toString() {
+ return "CommonLocator(" + this.searchPaths + ")";
+ }
+
+
+ public static void main(String[] theArgs) {
+ TargetLocator tl = new CommonLocator();
+ tl.addSearchPath(java.nio.file.Paths.get("").toUri());
+ tl.addSearchPath("file:///");
+ System.out.println(
+ tl.resolve(theArgs[0]));
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Construct.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Construct.java
new file mode 100644
index 0000000..45108cb
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Construct.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+/*
+ * What exactly is allowed to go in here is a subject of meditation :) I would have said 'elements with a type' but
+ * that will no cover Requirement and Workflow, or topology template top elements but won't cover others ..
+ *
+ * Properties/Attributes/Inputs/Outputs are just Data constructs under a particular name.
+ */
+public enum Construct {
+ Data,
+ Requirement,
+ Capability,
+ Relationship,
+ Artifact,
+ Interface,
+ Node,
+ Group,
+ Policy,
+ Workflow
+}
+
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Data.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Data.java
new file mode 100644
index 0000000..fc29dcf
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Data.java
@@ -0,0 +1,923 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.lang.reflect.InvocationTargetException;
+
+import java.util.Collection;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.EnumSet;
+
+import java.util.logging.Logger;
+import java.util.logging.Level;
+
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Table;
+import com.google.common.collect.HashBasedTable;
+
+/*
+ * tosca data type (primitive or user defined) evaluators and validators, used in checking valuations
+ */
+public class Data {
+
+ private static Logger log = Logger.getLogger(Data.class.getName());
+
+
+ private Data() {
+ }
+
+ /*
+ */
+ @FunctionalInterface
+ public static interface Evaluator {
+
+ public boolean eval(Object theExpr, Map theDef, Checker.CheckContext theCtx);
+ }
+
+
+ /* data type processing */
+
+ private static Map<String,Type> typesByName = new HashMap<String,Type>();
+ static {
+ //CoreType.String.toString();
+ //CoreFunction.concat.toString();
+ //Constraint.equal.toString();
+ }
+
+
+ public static Data.Type typeByName(String theName) {
+ return typesByName.getOrDefault(theName, userType);
+ }
+/*
+ public static Evaluator getTypeEvaluator(Type theType) {
+ }
+*/
+
+ /* Needs a better name ?? RValue??
+ * This is not an rvalue (C def) per se but the construct who's instances
+ * yield rvalues. It is a construct that yields data, not the data (yield)
+ * itself.
+ */
+ public static interface Type {
+
+ public String name();
+
+ public Evaluator evaluator();
+
+ public Evaluator constraintsEvaluator();
+ }
+
+ /* generic placeholder
+ */
+ private static Type userType = new Type() {
+
+ public String name() {
+ return null;
+ }
+
+ public Evaluator evaluator() {
+ return Data::evalUser;
+ }
+
+ public Evaluator constraintsEvaluator() {
+ return Data::evalUserConstraints;
+ }
+ };
+
+
+ public static enum CoreType implements Type {
+
+ String("string",
+ (expr,def,ctx) -> expr != null && expr instanceof String,
+ Data::evalScalarConstraints),
+ Integer("integer",
+ (expr,def,ctx) -> Data.valueOf(ctx, expr, Integer.class),
+ Data::evalScalarConstraints),
+ Float("float",
+ (expr,def,ctx) -> Data.valueOf(ctx, expr, Double.class, Integer.class),
+ Data::evalScalarConstraints),
+ Boolean("boolean",
+ (expr,def,ctx) -> Data.valueOf(ctx, expr, Boolean.class),
+ Data::evalScalarConstraints),
+ Null("null",
+ (expr,def,ctx) -> expr.equals("null"),
+ null),
+ Timestamp("timestamp",
+ (expr,def,ctx) -> timestampRegex.matcher(expr.toString()).matches(),
+ null),
+ List("list", Data::evalList, Data::evalListConstraints),
+ Map("map", Data::evalMap, Data::evalMapConstraints),
+ Version("version",
+ (expr,def,ctx) -> versionRegex.matcher(expr.toString()).matches(),
+ null),
+ /* use a scanner and check that the upper bound is indeed greater than
+ * the lower bound */
+ Range("range",
+ (expr,def,ctx) -> { return rangeRegex.matcher(expr.toString()).matches();},
+ null ),
+ Size("scalar-unit.size",
+ (expr,def,ctx) -> sizeRegex.matcher(expr.toString()).matches(),
+ null),
+ Time("scalar-unit.time",
+ (expr,def,ctx) -> timeRegex.matcher(expr.toString()).matches(),
+ null),
+ Frequency("scalar-unit.frequency",
+ (expr,def,ctx) -> frequencyRegex.matcher(expr.toString()).matches(),
+ null);
+
+
+ private String toscaName;
+ private Evaluator valueEvaluator,
+ constraintsEvaluator;
+
+ private CoreType(String theName, Evaluator theValueEvaluator, Evaluator theConstraintsEvaluator) {
+ this.toscaName = theName;
+ this.valueEvaluator = theValueEvaluator;
+ this.constraintsEvaluator = theConstraintsEvaluator;
+
+ if (typesByName == null)
+ throw new RuntimeException("No type index available!");
+
+ typesByName.put(this.toscaName, this);
+ }
+
+ public String toString() {
+ return this.toscaName;
+ }
+
+ public Evaluator evaluator() {
+ return this.valueEvaluator;
+ }
+
+ public Evaluator constraintsEvaluator() {
+ return this.constraintsEvaluator;
+ }
+ }
+
+ private static Pattern timestampRegex = null,
+ versionRegex = null,
+ rangeRegex = null,
+ sizeRegex = null,
+ timeRegex = null,
+ frequencyRegex = null;
+
+ static {
+ try {
+ timestampRegex = Pattern.compile(
+ "\\p{Digit}+"); //?? where to find the definition
+
+ //<major_version>.<minor_version>[.<fix_version>[.<qualifier>[-<build_version]]]
+ versionRegex = Pattern.compile(
+ "\\p{Digit}+\\.\\p{Digit}+?(\\.\\p{Digit}+(\\.\\p{Alpha}+(\\-\\p{Digit}+))*)*");
+
+ rangeRegex = Pattern.compile(
+ "\\[[ ]*\\p{Digit}+(\\.\\p{Digit}+)?[ ]*\\,[ ]*(\\p{Digit}+(\\.\\p{Digit}+)?|UNBOUNDED)[ ]*\\]");
+
+ sizeRegex = Pattern.compile(
+ "\\p{Digit}+(\\.\\p{Digit}+)?[ ]*(B|kB|KiB|MB|MiB|GB|GiB|TB|TiB)");
+
+ timeRegex = Pattern.compile(
+ "\\p{Digit}+(\\.\\p{Digit}+)?[ ]*(d|h|m|s|ms|us|ns)");
+
+ frequencyRegex = Pattern.compile(
+ "\\p{Digit}+(\\.\\p{Digit}+)?[ ]*(Hz|kHz|MHz|GHz)");
+ }
+ catch (PatternSyntaxException psx) {
+ throw new RuntimeException("Bad patterns", psx);
+ }
+ }
+
+ /* */
+ public static boolean evalScalarConstraints(Object theVal,
+ Map theDef,
+ Checker.CheckContext theCtx) {
+ Data.Type type = typeByName((String)theDef.get("type"));
+ List<Map<String,Object>> constraints =
+ (List<Map<String,Object>>)theDef.get("constraints");
+ if (constraints == null) {
+ return true;
+ }
+
+ //check value against constraints
+ boolean res = true;
+ for (Map<String,Object> constraintDef: constraints) {
+ Map.Entry<String,Object> constraintEntry =
+ constraintDef.entrySet().iterator().next();
+ Data.Constraint constraint = constraintByName(constraintEntry.getKey());
+
+// the def passed here includes all constraints, not necessary! we can pass
+// simple constraintEntry.getValue()
+ Evaluator constraintEvaluator = getTypeConstraintEvaluator(type, constraint);
+ if (constraintEvaluator == null) {
+ log.info("No constaint evaluator available for " + type + "/" + constraint);
+ continue;
+ }
+
+ if (!constraintEvaluator.eval(theVal, theDef, theCtx)) {
+ theCtx.addError("Value " + theVal + " failed constraint " + constraintEntry, null);
+ res = false;
+ }
+ }
+ return res;
+ }
+
+ /*
+ * It assumes the specification is complete, i.e. it contains a valid
+ * entry_schema section.
+ * TODO: check constraints, i.e. entrySchema.get("constraints")
+ */
+ public static boolean evalList(Object theVal,
+ Map theDef,
+ Checker.CheckContext theCtx) {
+ try {
+ return evalCollection((List)theVal, theDef, theCtx);
+ }
+ catch (ClassCastException ccx) {
+ theCtx.addError("Value " + theVal + " not a list", null);
+ return false;
+ }
+ }
+
+ public static boolean evalMap(Object theVal,
+ Map theDef,
+ Checker.CheckContext theCtx) {
+ try {
+ return evalCollection(((Map)theVal).values(), theDef, theCtx);
+ }
+ catch (ClassCastException ccx) {
+ theCtx.addError("Value " + theVal + " not a map", null);
+ return false;
+ }
+ }
+
+
+ /**
+ * The elements of a collection can be of a core type or user defined type.
+ */
+ private static boolean evalCollection(Collection theVals,
+ Map theDef,
+ Checker.CheckContext theCtx) {
+
+//System.out.println("evalCollection: " + theDef + ", " + theVals);
+
+ Data.Type entryType = null;
+ Map entryTypeDef = (Map)theDef.get("entry_schema");
+ if (null != entryTypeDef)
+ entryType = typeByName((String)entryTypeDef.get("type"));
+
+//System.out.println("evalCollection, entry definition: " + entryTypeDef);
+ boolean res = true;
+ for (Object val: theVals) {
+ //check if the value is not a function call
+ Data.Function f = Data.function(val);
+ if (f != null &&
+ f.evaluator().eval(val, entryTypeDef, theCtx)) {
+ res = false;
+ }
+ else if (entryType != null &&
+ !entryType.evaluator().eval(val, entryTypeDef, theCtx)) {
+ res= false;
+ //the error should hav been reported by the particular evaluator
+ //theCtx.addError("Value " + val + " failed evaluation", null);
+ }
+ }
+ return res;
+ }
+
+ public static boolean evalListConstraints(Object theVal,
+ Map theDef,
+ Checker.CheckContext theCtx) {
+ return evalCollectionConstraints((List)theVal, theDef, theCtx);
+ }
+
+ public static boolean evalMapConstraints(Object theVal,
+ Map theDef,
+ Checker.CheckContext theCtx) {
+ return evalCollectionConstraints(((Map)theVal).values(), theDef, theCtx);
+ }
+
+ private static boolean evalCollectionConstraints(Collection theVals,
+ Map theDef,
+ Checker.CheckContext theCtx) {
+//System.out.println("evalCollectionConstraints: " + theDef + ", " + theVals);
+
+ //should check overall constraints
+
+ if (theVals == null)
+ return true;
+
+ Map entryTypeDef = (Map)theDef.get("entry_schema");
+ if (null == entryTypeDef)
+ return true;
+
+ String entryTypeName = (String)entryTypeDef.get("type");
+ Data.Type entryType = typeByName(entryTypeName);
+
+//System.out.println("evalCollectionConstraints, entry definition: " + entryTypeDef);
+
+ boolean res = true;
+ for (Object val: theVals) {
+ Evaluator entryEvaluator = entryType.constraintsEvaluator();
+ if (entryEvaluator != null &&
+ !entryEvaluator.eval(val, entryTypeDef, theCtx)) {
+ res= false;
+ //the constraints evaluator should have already added an error, but it also adds some context
+ //theCtx.addError("Value " + val + " failed evaluation", null);
+ }
+ }
+ return res;
+ }
+
+ /*
+ * All required properties across the hierarchical defintion must be present
+ * TODO: The expr cannot contain any entry not specified in the type definition
+ */
+ public static boolean evalUser(Object theVal,
+ Map theDef,
+ Checker.CheckContext theCtx) {
+//System.out.println("evalUser: " + theDef + ", " + theVal);
+
+ boolean res = true;
+ Map val = (Map)theVal;
+ //must be done with respect to the super-type(s) definition
+ Iterator<Map.Entry> props = theCtx.catalog()
+ .facets(Construct.Data,
+ Facet.properties,
+ (String)theDef.get("type"));
+ while (props.hasNext()) {
+ Map.Entry propEntry = props.next();
+ Map propDef = (Map)propEntry.getValue();
+ Object propVal = val.get(propEntry.getKey());
+
+//System.out.println("evalUser: " + propVal);
+
+ if (propVal != null) {
+ Data.Type propType = typeByName((String)propDef.get("type"));
+
+ if (!propType.evaluator().eval(propVal, propDef, theCtx)) {
+ res= false;
+ //the constraints evaluator should have already added an error
+ //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null);
+ }
+ }
+ }
+ return res;
+ }
+
+ public static boolean evalUserConstraints(Object theVal,
+ Map theDef,
+ Checker.CheckContext theCtx) {
+ boolean res = true;
+ Map val = (Map)theVal;
+ Iterator<Map.Entry> props = theCtx.catalog()
+ .facets(Construct.Data,
+ Facet.properties,
+ (String)theDef.get("type"));
+ while (props.hasNext()) {
+ Map.Entry propEntry = props.next();
+ Map propDef = (Map)propEntry.getValue();
+ Object propVal = val.get(propEntry.getKey());
+
+ if (propVal != null) {
+ Data.Type propType = typeByName((String)propDef.get("type"));
+
+ if (propType.constraintsEvaluator() != null &&
+ !propType.constraintsEvaluator().eval(propVal, propDef, theCtx)) {
+ res= false;
+ //the constraints evaluator should have already added an error
+ //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null);
+ }
+ }
+ else {
+ if (Boolean.TRUE == (Boolean)propDef.getOrDefault("required", Boolean.FALSE) &&
+ !propDef.containsKey("default")) {
+ theCtx.addError("Property " + propEntry.getKey() + " failed 'required' constraint; definition is " + propDef, null);
+ res = false;
+ }
+ }
+ }
+ return res;
+ }
+
+ private static boolean valueOf(Checker.CheckContext theCtx,
+ Object theExpr,
+ Class ... theTypes) {
+ for (Class type: theTypes) {
+ if (type.isAssignableFrom(theExpr.getClass())) {
+ return true;
+ }
+ }
+
+ theCtx.addError("Expression " + theExpr + " as " + theExpr.getClass().getName() + " is not compatible with any of required types: " + Arrays.toString(theTypes), null);
+ return false;
+ }
+
+/*
+ private static boolean valueOf(Class theTarget,
+ String theExpr,
+ Checker.CheckContext theCtx) {
+ try {
+ theTarget.getMethod("valueOf", new Class[] {String.class})
+ .invoke(null, theExpr);
+ return true;
+ }
+ catch (InvocationTargetException itx) {
+ theCtx.addError("Failed to parse " + theExpr + " as a " + theTarget.getName(), itx.getCause());
+ return false;
+ }
+ catch (Exception x) {
+ theCtx.addError("Failed to valueOf " + theExpr + " as a " + theTarget.getName(), x);
+ return false;
+ }
+ }
+*/
+
+ /*
+ * Function e(valuation)
+ * ?
+ * note to self : is there a more efficient way of retrieving a map's
+ * single entry? (without knowing the key)
+ *
+ * ! Function evaluators have to handle null definition (i.e. perform argument checking) so that
+ * we can use them in the context of collections with without entry_schemas
+ */
+
+ //just as Type but is it worth expressing this 'commonality'??
+
+ public static interface Function {
+
+ public String name();
+
+ public Evaluator evaluator();
+ }
+
+ /*
+ * This is a heuristic induced from the tosca specification .. it answers the
+ * question of wether the given expression is a function
+ */
+ public static Function function(Object theExpr) {
+ if (theExpr instanceof Map &&
+ ((Map)theExpr).size() == 1) {
+ try {
+ return Enum.valueOf(CoreFunction.class, functionName(theExpr));
+ }
+ catch (IllegalArgumentException iax) {
+ //no such function but we cannot really record an error as we only guessed the expression as being a function ..
+ log.info("Failed attempt to interpret " + theExpr + " as a function call");
+ }
+ }
+
+ return null;
+ }
+
+ /*
+ */
+ public static String functionName(Object theExpr) {
+ return (String)
+ ((Map.Entry)
+ ((Map)theExpr).entrySet().iterator().next())
+ .getKey();
+ }
+
+ /*
+ */
+ public static Data.Function functionByName(String theName) {
+ return Enum.valueOf(CoreFunction.class, theName);
+ }
+
+ /*
+ */
+ public static enum CoreFunction implements Function {
+
+ concat(Data::evalConcat),
+ token(Data::evalToken),
+ get_input(Data::evalGetInput),
+ get_property(Data::evalGetProperty),
+ get_attribute(Data::evalGetAttribute),
+ get_operation_output((expr,def,ctx) -> true),
+ get_nodes_of_type(Data::evalGetNodesOfType),
+ get_artifact((expr,def,ctx) -> true);
+
+ private Evaluator evaluator;
+
+ private CoreFunction(Evaluator theEval) {
+ this.evaluator = theEval;
+ }
+
+ public Evaluator evaluator() {
+ return this.evaluator;
+ }
+ }
+
+ private static boolean evalConcat(
+ Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ return true;
+ }
+
+ private static boolean evalToken(
+ Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ return true;
+ }
+
+ private static boolean evalGetInput(
+ Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ Map val = (Map)theVal;
+ Map.Entry entry = (Map.Entry)val.entrySet().iterator().next();
+
+ if (!(entry.getValue() instanceof String)) {
+ theCtx.addError("get_input: argument must be a String" ,null);
+ return false;
+ }
+
+ //check that an input with the given name exists and has a compatible type
+ Map inputDef = theCtx.catalog()
+ .getTemplate(theCtx.target(), Construct.Data, (String)entry.getValue());
+ if (inputDef == null) {
+ theCtx.addError("get_input: no such input " + entry.getValue(), null);
+ return false;
+ }
+
+ if (theDef == null)
+ return true;
+
+ //the output must be type compatible with the input
+ String targetType = (String)theDef.get("type");
+ if (targetType != null) {
+ String inputType = (String)inputDef.get("type");
+
+ if (!theCtx.catalog()
+ .isDerivedFrom(Construct.Data, inputType, targetType)) {
+ theCtx.addError("get_input: input type " + inputType + " is incompatible with the target type " + targetType, null);
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ /*
+ * Who's the smarty that decided to define optional arguments in between
+ * required ones ?!
+ * (factors the evaluation of get_attribute and get_property)
+ */
+ private static boolean evalGetData(
+ Object theVal, Map theDef,
+ EnumSet<Facet> theFacets, Checker.CheckContext theCtx) {
+
+ Map val = (Map)theVal;
+ Map.Entry entry = (Map.Entry)val.entrySet().iterator().next();
+
+ if (!(entry.getValue() instanceof List)) {
+ theCtx.addError("get_property: argument must be a List" ,null);
+ return false;
+ }
+
+ List args = (List)entry.getValue();
+ if (args.size() < 2) {
+ theCtx.addError("'get_property' has at least 2 arguments", null);
+ return false;
+ }
+
+ //the first argument is a node or relationship template
+ String tmpl = (String)args.get(0);
+ Construct tmplConstruct = null;
+ Map tmplSpec = null;
+
+ if ("SELF".equals(tmpl)) {
+ tmpl = theCtx.enclosingConstruct(Construct.Node).name();
+ if (tmpl == null) {
+ tmpl = theCtx.enclosingConstruct(Construct.Relationship).name();
+ if (tmpl == null) {
+ theCtx.addError("'get_property' invalid SELF reference: no node or relationship template in scope at " + theCtx.getPath(), null);
+ return false;
+ }
+ else {
+ tmplConstruct = Construct.Relationship;
+ }
+ }
+ else {
+ tmplConstruct = Construct.Node;
+ }
+ tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), tmplConstruct, tmpl);
+ }
+ else if ("SOURCE".equals("tmpl")) {
+ //we are in the scope of a relationship template and this is the source node template.
+ tmpl = theCtx.enclosingConstruct(Construct.Relationship).name();
+ if (tmpl == null) {
+ theCtx.addError("'get_property' invalid SOURCE reference: no relationship template in scope at " + theCtx.getPath(), null);
+ return false;
+ }
+
+ return true;
+ }
+ else if ("TARGET".equals("tmpl")) {
+ //we are in the scope of a relationship template and this is the target node template.
+ tmpl = theCtx.enclosingConstruct(Construct.Relationship).name();
+ if (tmpl == null) {
+ theCtx.addError("'get_property' invalid TARGET reference: no relationship template in scope at " + theCtx.getPath(), null);
+ return false;
+ }
+
+ return true;
+ }
+ else if ("HOST".equals("tmpl")) {
+ tmpl = theCtx.enclosingConstruct(Construct.Node).name();
+ if (tmpl == null) {
+ theCtx.addError("'get_property' invalid HOST reference: no node template in scope at " + theCtx.getPath(), null);
+ return false;
+ }
+
+ return true;
+ }
+ else {
+ //try node template first
+ tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Node, tmpl);
+ if (tmplSpec == null) {
+ //try relationship
+ tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Relationship, tmpl);
+ if (tmplSpec == null) {
+ theCtx.addError("'get_data' invalid template reference '" + tmpl + "': no node or relationship template with this name", null);
+ return false;
+ }
+ else {
+ tmplConstruct = Construct.Relationship;
+ }
+ }
+ else {
+ tmplConstruct = Construct.Node;
+ }
+ }
+
+ int facetNameIndex = 1;
+ Construct facetConstruct = tmplConstruct; //who's construct the facet is supposed to belong to
+ Map facetConstructSpec = null;
+ String facetConstructType = null;
+
+ if (tmplConstruct.equals(Construct.Node) &&
+ args.size() > 2) {
+ //the second arg might be a capability or requirement name. If it is a
+ //capability than the third argument becomes a property of the
+ //coresponding capability type. If it is a requirement than the
+ //requirement definition indicates a capability who's type has a
+ //property with the name indicated in the third argument ..
+ //
+ //while the spec does not make it explicit this can only take place
+ //if the first argument turned out to be a node template (as relationship
+ //templates/types do not have capabilities/requirements
+ String secondArg = (String)args.get(1);
+ if ((facetConstructSpec = theCtx.catalog().getFacetDefinition(
+ tmplConstruct,
+ (String)tmplSpec.get("type"),
+ Facet.capabilities,
+ secondArg)) != null) {
+ facetNameIndex = 2;
+ facetConstruct = Construct.Capability;
+ facetConstructType = (String)facetConstructSpec.get("type");
+ }
+ else if ((facetConstructSpec = theCtx.catalog().getRequirementDefinition(
+ tmplConstruct,
+ (String)tmplSpec.get("type"),
+ secondArg)) != null) {
+ facetNameIndex = 2;
+ facetConstruct = Construct.Capability;
+
+ //find the specof the capability this requirement points to
+ //TODO: check, can the capability reference be anything else but a capability tyep?
+ facetConstructType = (String)facetConstructSpec.get("capability");
+ }
+ }
+ else {
+ //we'll attempt to handle it as a property of the node template
+ facetConstruct = Construct.Node;
+ facetConstructSpec = tmplSpec;
+ facetConstructType = (String)facetConstructSpec.get("type");
+ }
+
+ //validate the facet name
+ Map facetSpec = null;
+ {
+ String facetName = (String)args.get(facetNameIndex);
+ for (Facet facet: theFacets) {
+//System.out.println("get_data: looking at " + facetConstruct + " " + facetConstructType + ", " + facet + " " + facetName);
+ facetSpec = theCtx.catalog()
+ .getFacetDefinition(
+ facetConstruct,
+ facetConstructType,
+ facet,
+ facetName);
+ if (facetSpec != null)
+ break;
+ }
+
+ if (facetSpec == null) {
+//TODO: not the greatest message if the call strated with a requirement ..
+ theCtx.addError("'get_data' invalid reference, '" + facetConstruct + "' " + facetConstructType + " has no " + theFacets + " with name " + facetName, null);
+ return false;
+ }
+ }
+
+ //the rest of the arguments have to resolve to a field of the property's
+ //data type; the propertySpec contains the type specification
+ for (int i = facetNameIndex + 1; i < args.size(); i++) {
+ }
+
+ return true;
+ }
+
+ /**/
+ private static boolean evalGetProperty(
+ Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ return evalGetData(theVal, theDef, EnumSet.of(Facet.properties), theCtx);
+ }
+
+ /*
+ * get_property and get_attribute are identical, just operating on different
+ * facets, with one exception: there is an intrinsec attribute for every
+ * declared property.
+ */
+ private static boolean evalGetAttribute(
+ Object theVal, Map theDef, Checker.CheckContext theCtx) {
+ return evalGetData(theVal, theDef, EnumSet.of(Facet.attributes, Facet.properties), theCtx);
+ }
+
+ private static boolean evalGetNodesOfType(
+ Object theVal, Map theDef, Checker.CheckContext theCtx) {
+
+ Map val = (Map)theVal;
+ Map.Entry entry = (Map.Entry)val.entrySet().iterator().next();
+
+ if (!(entry.getValue() instanceof String)) {
+ theCtx.addError("get_nodes_of_type: argument must be a String", null);
+ return false;
+ }
+
+ String arg = (String)entry.getValue();
+
+ if (null == theCtx.catalog().getTypeDefinition(Construct.Node, arg)) {
+ theCtx.addError("get_nodes_of_type: no such node type " + arg, null);
+ return false;
+ }
+ else {
+ return true;
+ }
+ }
+
+ /* */
+ public static Constraint constraintByName(String theName) {
+ return Enum.valueOf(Constraint.class, theName);
+ }
+
+ /* */
+ public static Constraint constraint(Object theExpr) {
+ if (theExpr instanceof Map &&
+ ((Map)theExpr).size() == 1) {
+ return constraintByName(constraintName(theExpr));
+ }
+
+ return null;
+ }
+
+ /* */
+ public static String constraintName(Object theExpr) {
+ return (String)
+ ((Map.Entry)
+ ((Map)theExpr).entrySet().iterator().next())
+ .getKey();
+ }
+
+ private static Object getConstraintValue(Map theDef,
+ Constraint theConstraint) {
+ List<Map> constraints = (List<Map>)theDef.get("constraints");
+ if (null == constraints)
+ return null;
+
+ for(Map constraint: constraints) {
+ Object val = constraint.get(theConstraint.toString());
+ if (val != null)
+ return val;
+ }
+ return null;
+ }
+
+ public static enum Constraint {
+ equal,
+ greater_than,
+ greater_or_equal,
+ less_than,
+ less_or_equal,
+ in_range,
+ valid_values,
+ length,
+ min_length,
+ max_length,
+ pattern;
+ }
+
+
+ /* hold the constraint evaluators for pairs of type/constraint.
+ * If a pair is not present than the given constraint does not apply
+ * to the type.
+ */
+ private static Table<Type,Constraint,Evaluator> typeConstraintEvaluator =null;
+
+ public static Evaluator
+ getTypeConstraintEvaluator(Type theType, Constraint theConstraint) {
+ if (typeConstraintEvaluator == null) {
+ typeConstraintEvaluator = HashBasedTable.create();
+
+ typeConstraintEvaluator.put(CoreType.String, Constraint.equal,
+ (val,def,ctx) -> val.equals(getConstraintValue(def,Constraint.equal)));
+ typeConstraintEvaluator.put(CoreType.String, Constraint.valid_values,
+ (val,def,ctx) -> {
+ return ((List)getConstraintValue(def,Constraint.valid_values)).contains(val);
+ });
+ typeConstraintEvaluator.put(CoreType.String, Constraint.length,
+ (val,def,ctx) -> ((String)val).length() == ((Number)getConstraintValue(def,Constraint.length)).intValue());
+ typeConstraintEvaluator.put(CoreType.String, Constraint.min_length,
+ (val,def,ctx) -> ((String)val).length() >= ((Number)getConstraintValue(def,Constraint.min_length)).intValue());
+ typeConstraintEvaluator.put(CoreType.String, Constraint.max_length,
+ (val,def,ctx) -> ((String)val).length() <= ((Number)getConstraintValue(def,Constraint.max_length)).intValue());
+ typeConstraintEvaluator.put(CoreType.String, Constraint.pattern,
+ (val,def,ctx) -> Pattern.compile((String)getConstraintValue(def,Constraint.pattern))
+ .matcher((String)val)
+ .matches());
+
+ typeConstraintEvaluator.put(CoreType.Integer, Constraint.equal,
+ (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.equal)) == 0);
+ typeConstraintEvaluator.put(CoreType.Integer, Constraint.greater_than,
+ (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.greater_than)) > 0);
+ typeConstraintEvaluator.put(CoreType.Integer, Constraint.greater_or_equal,
+ (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.greater_or_equal)) >= 0);
+ typeConstraintEvaluator.put(CoreType.Integer, Constraint.less_than,
+ (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.less_than)) < 0);
+ typeConstraintEvaluator.put(CoreType.Integer, Constraint.less_or_equal,
+ (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.less_or_equal)) <= 0);
+ typeConstraintEvaluator.put(CoreType.Integer, Constraint.in_range,
+ (val,def,ctx) -> { List<Integer> range = (List<Integer>)getConstraintValue(def, Constraint.in_range);
+ return ((Integer)val).compareTo(range.get(0)) >= 0 &&
+ ((Integer)val).compareTo(range.get(1)) <= 0;
+ });
+ typeConstraintEvaluator.put(CoreType.Integer, Constraint.valid_values,
+ (val,def,ctx) -> ((List<Integer>)getConstraintValue(def, Constraint.valid_values)).contains((Integer)val));
+
+//yaml parser represents yaml floats as java Double and we are even more tolerant as many double values
+//get represented as ints and the parser will substitute an Integer
+ typeConstraintEvaluator.put(CoreType.Float, Constraint.equal,
+ (val,def,ctx) -> ((Number)val).doubleValue() == ((Number)getConstraintValue(def,Constraint.equal)).doubleValue());
+ typeConstraintEvaluator.put(CoreType.Float, Constraint.greater_than,
+ (val,def,ctx) -> ((Number)val).doubleValue() > ((Number)getConstraintValue(def,Constraint.greater_than)).doubleValue());
+ typeConstraintEvaluator.put(CoreType.Float, Constraint.greater_or_equal,
+ (val,def,ctx) -> ((Number)val).doubleValue() >= ((Number)getConstraintValue(def,Constraint.greater_or_equal)).doubleValue());
+ typeConstraintEvaluator.put(CoreType.Float, Constraint.less_than,
+ (val,def,ctx) -> ((Number)val).doubleValue() < ((Number)getConstraintValue(def,Constraint.less_than)).doubleValue());
+ typeConstraintEvaluator.put(CoreType.Float, Constraint.less_or_equal,
+ (val,def,ctx) -> ((Number)val).doubleValue() <= ((Number)getConstraintValue(def,Constraint.less_or_equal)).doubleValue());
+ typeConstraintEvaluator.put(CoreType.Float, Constraint.in_range,
+ (val,def,ctx) -> { List<Number> range = (List<Number>)getConstraintValue(def, Constraint.in_range);
+ return ((Number)val).doubleValue() >= range.get(0).doubleValue() &&
+ ((Number)val).doubleValue() <= range.get(1).doubleValue();
+ });
+ typeConstraintEvaluator.put(CoreType.Float, Constraint.valid_values,
+ (val,def,ctx) -> ((List<Number>)getConstraintValue(def, Constraint.valid_values)).contains((Number)val));
+ }
+
+ Evaluator eval = typeConstraintEvaluator.get(theType, theConstraint);
+
+ return eval == null ? (expr,def,ctx) -> true
+ : eval;
+ }
+
+
+ private static boolean stringValidValues(String theVal,
+ List<String> theValidValues,
+ Checker.CheckContext theCtx) {
+ if (!theValidValues.contains(theVal)) {
+ theCtx.addError("not a valid value: " + theVal + " not part of " + theValidValues, null);
+ return false;
+ }
+
+ return true;
+ }
+
+ public static final void main(String[] theArgs) {
+ Data.CoreType dt = Enum.valueOf(Data.CoreType.class, theArgs[0]);
+ System.out.println(theArgs[1] + " > " + dt.evaluator().eval(theArgs[1], null, null));
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Facet.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Facet.java
new file mode 100644
index 0000000..ea9fd48
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Facet.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+/*
+ * Oddballs:
+ * - requirements (a requirement does not have a type - i.e. is not based
+ * on a Construct) and can target a node, a capability or both .. When present
+ * as a facet of another Construct it is also the only one represented as a
+ * sequence so it will need special handling anyway.
+ */
+public enum Facet {
+
+ inputs(Construct.Data),
+ outputs(Construct.Data),
+ properties(Construct.Data),
+ attributes(Construct.Data),
+ capabilities(Construct.Capability),
+ //requirements(Construct.Capability),//??
+ artifacts(Construct.Artifact),
+ interfaces(Construct.Interface);
+ /*
+ Node
+ Relationship
+ they can be considered as facets of the topology template ...
+ */
+
+ private Construct construct;
+
+ private Facet(Construct theConstruct) {
+ this.construct = theConstruct;
+ }
+
+ public Construct construct() {
+ return this.construct;
+ }
+}
+
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Messages.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Messages.java
new file mode 100644
index 0000000..98158da
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Messages.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.text.MessageFormat;
+import java.util.ResourceBundle;
+import java.util.MissingResourceException;
+
+/*
+ * This class should be generated programatically based on the keys available in messages.properties
+ */
+public class Messages {
+
+ private ResourceBundle messages;
+
+ public Messages() {
+ try {
+ this.messages = ResourceBundle.getBundle("org/onap/tosca/checker/messages");
+ }
+ catch (MissingResourceException mrx) {
+ throw new RuntimeException("", mrx);
+ }
+
+ //check that the Message enum is in sync with the resource bundle
+ }
+
+ public String format(Message theMessage, Object[] theArgs) {
+ String message = this.messages.getString(theMessage.name());
+ if (message == null)
+ throw new RuntimeException("Un-available message: " + theMessage);
+
+ return MessageFormat.format(message, theArgs);
+ }
+
+ public enum Message {
+ EMPTY_TEMPLATE,
+ INVALID_CONSTRUCT_REFERENCE,
+ INVALID_TYPE_REFERENCE,
+ INVALID_TEMPLATE_REFERENCE,
+ INVALID_INTERFACE_REFERENCE,
+ INVALID_FACET_REFERENCE,
+ INCOMPATIBLE_REQUIREMENT_TARGET
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Paths.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Paths.java
new file mode 100644
index 0000000..2cedaca
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Paths.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Iterator;
+
+import org.apache.commons.jxpath.JXPathContext;
+import org.apache.commons.jxpath.JXPathException;
+
+import org.onap.tosca.checker.annotations.Catalogs;
+
+/*
+ * Facilitates processing of catalog information through xpath expressions.
+ * In development.
+ */
+public class Paths {
+
+ private Map<Target, JXPathContext> paths = new HashMap<Target, JXPathContext>();
+ private JXPathContext types = JXPathContext.newContext(new HashMap());
+
+ @Catalogs(path="/")
+ public void catalog_root(Map theDefinition, Checker.CheckContext theContext) {
+ paths.put(theContext.target(), JXPathContext.newContext(theContext.target().getTarget()));
+ theDefinition
+ .forEach((k,v) -> { if (k.toString().endsWith("_types"))
+ ((Map)types.getContextBean()).merge(k, v, (v1, v2) -> { ((Map)v1).putAll((Map)v2);
+ return v1;
+ });
+ });
+ }
+
+ public Object resolve(Target theTarget, String thePath) {
+ return paths.get(theTarget).getValue(thePath);
+ }
+
+ public Object resolve(String thePath) {
+ return types.getValue(thePath);
+ }
+
+ /* */
+ public String nodeType(String theNodeType) {
+ return (String)resolve("/node_types/" + theNodeType);
+ }
+
+ public Map nodeTypeInterface(String theNodeType, String theInterfaceName) {
+ return (Map)resolve("/node_types/" + theNodeType + "/interfaces/" + theInterfaceName);
+ }
+
+ public String nodeTypeInterfaceType(String theNodeType, String theInterfaceName) {
+ return (String)resolve("/node_types/" + theNodeType + "/interfaces/" + theInterfaceName + "/type");
+ }
+
+ /* */
+ public Map interfaceType(String theInterfaceType) {
+ return (Map)resolve("/interface_types/" + theInterfaceType);
+ }
+
+ //this will also resolve 'metadata', 'derived_from', version and other common entries as operations
+ public Map interfaceTypeOperation(String theInterfaceType, String theOperationName) {
+ return (Map)resolve("/interface_types/" + theInterfaceType + "/" + theOperationName);
+ }
+
+ /* node templates */
+ public String nodeTemplateType(Target theTarget, String theNodeName) {
+ return (String)resolve(theTarget, "/topology_template/node_templates/" + theNodeName + "/type");
+ }
+
+ public Map nodeTemplateInterface(Target theTarget, String theNodeName, String theInterfaceName) {
+ return (Map)resolve(theTarget, "/topology_template/node_templates/" + theNodeName + "/interfaces/" + theInterfaceName);
+ }
+
+ public Map nodeTemplateInterfaceOperation(
+ Target theTarget, String theNodeName, String theInterfaceName, String theOperationName) {
+ return (Map)resolve(theTarget, "/topology_template/node_templates/" + theNodeName + "/interfaces/" + theInterfaceName + "/" + theOperationName);
+ }
+
+
+ /* */
+ public Map nodeTypeInterfaceOperation(String theNodeType, String theInterfaceName, String theOperationName) {
+ return (Map)resolve("/node_types/" + theNodeType + "/interfaces/" + theInterfaceName + "/" + theOperationName);
+ }
+
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Report.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Report.java
new file mode 100644
index 0000000..bce0729
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Report.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.io.IOException;
+
+import java.util.LinkedList;
+import java.util.Collections;
+
+import org.yaml.snakeyaml.error.MarkedYAMLException;
+import kwalify.ValidationException;
+
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+
+/**
+ * Represents a collection of errors that occured during one of the stages
+ * of the checker: yaml parsing, yaml validation (tosca syntax), tosca checking
+ */
+/*
+ * This needs some re-thinking: while it is useful to have all original errors introducing
+ * the custom json conversion (just to help the service) is not great either.
+ * I was torn between this approach or creating a custom deserializer and object mapper (which
+ * would have kept all the customized serialization in the service but then the error analysis
+ * would be duplicated there too ..).
+ */
+@JsonSerialize(contentUsing=org.onap.tosca.checker.Report.ReportEntrySerializer.class)
+public class Report<T extends Throwable> extends LinkedList<T> {
+
+ public Report() {
+ }
+
+ public Report(T[] theErrors) {
+ Collections.addAll(this, theErrors);
+ }
+
+ public boolean hasErrors() {
+ return !this.isEmpty();
+ }
+
+ public boolean addOnce(T theError) {
+ for (T e: this) {
+ if (e.getMessage().equals(theError.getMessage()))
+ return false;
+ }
+ return add(theError);
+ }
+
+ public String toString() {
+ StringBuilder sb = new StringBuilder(this.size() + " errors");
+ for (Throwable x: this) {
+ sb.append("\n")
+ .append("[")
+ .append(location(x))
+ .append("] ")
+ .append(x.getMessage());
+ if (x.getCause() != null) {
+ sb.append("\n\tCaused by:\n")
+ .append(x.getCause());
+ }
+ }
+ sb.append("\n");
+ return sb.toString();
+ }
+
+ private static String location(Throwable theError) {
+ if (theError instanceof MarkedYAMLException) {
+ MarkedYAMLException mx = (MarkedYAMLException)theError;
+ return "line " + mx.getProblemMark().getLine() + ", column " + mx.getProblemMark().getColumn();
+ }
+ if (theError instanceof ValidationException) {
+ ValidationException vx = (ValidationException)theError;
+ return vx.getPath();
+ }
+ if (theError instanceof TargetError) {
+ TargetError tx = (TargetError)theError;
+ return tx.getLocation();
+ }
+ return "unknown";
+ }
+
+
+ public static class ReportEntrySerializer extends StdSerializer<Throwable> {
+
+ public ReportEntrySerializer() {
+ super(Throwable.class);
+ }
+
+ @Override
+ public void serialize(Throwable theError, JsonGenerator theGenerator, SerializerProvider theProvider)
+ throws IOException, JsonProcessingException {
+ theGenerator.writeStartObject();
+ theGenerator.writeStringField("location", location(theError));
+ theGenerator.writeStringField("message", theError.getMessage());
+ if (theError.getCause() != null)
+ theGenerator.writeStringField("cause", theError.getCause().toString());
+ theGenerator.writeEndObject();
+ }
+ }
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Repository.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Repository.java
new file mode 100644
index 0000000..66c37af
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Repository.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+
+import java.net.URI;
+import java.net.URL;
+import java.net.MalformedURLException;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import java.util.Map;
+
+/**
+ * Represents a 'container' of (yaml) TOSCA documents
+ */
+public abstract class Repository {
+
+ protected static Logger log =
+ Logger.getLogger("com.att.research.is.tosca.yaml.Repository");
+
+ private String name,
+ description;
+ protected URI rootURI;
+ protected Map credential; //TOSCA type tosca.datatype.Credential
+
+ public Repository(String theName, URI theRoot) {
+ this.name = theName;
+ this.rootURI = theRoot;
+ }
+
+ public String getName() {
+ return this.name;
+ }
+
+ public URI getRoot() {
+ return this.rootURI;
+ }
+
+ /** optional */
+ public abstract Iterable<Target> targets();
+
+ /** */
+ public abstract Target resolve(URI theURI);
+
+ @Override
+ public String toString() {
+ return "Repository " + this.name + " at " + this.rootURI;
+ }
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Stage.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Stage.java
new file mode 100644
index 0000000..416a533
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Stage.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+/*
+ * The Target processing stages
+ */
+public enum Stage {
+
+ located, /* not really used as we do not track a Target that we cannot locate */
+ parsed, /* yaml parsing succesfully completed */
+ validated, /* syntax check succesfully completed: document is compliant to yaml tosca grammar */
+ cataloged, /* all the constructs have been cataloged */
+ checked; /* 'semantic' checking completed */
+
+ private static final Stage[] stages = values();
+
+ public Stage next() {
+ return stages[ordinal() + 1];
+ }
+}
+
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Target.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Target.java
new file mode 100644
index 0000000..c21593b
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Target.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.io.BufferedReader;
+import java.io.IOException;
+
+import java.net.URI;
+import java.net.URL;
+import java.net.MalformedURLException;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * Represents a yaml document to be parsed/validated/checked.
+ * A Target's scope is the checker that is processing it.
+ */
+public class Target {
+
+ private static Logger log =
+ Logger.getLogger("com.att.research.is.tosca.yaml.Target");
+
+ private String name;
+ private URI location;
+ //while it is convenient having the information below in here I am conflicted about it
+ //as it represents processing state
+ private Object target; //this is the parsed form of the target
+ private Stage stage = Stage.located; //the processinf stage this target has reached
+ private Report report = new Report(); //collects the errors related to this target
+
+ public Target(String theName, URI theLocation) {
+ this.name = theName;
+ this.location = theLocation;
+ }
+
+ public String getName() {
+ return this.name;
+ }
+
+ public URI getLocation() {
+ return this.location;
+ }
+
+ public Report getReport() {
+ return this.report;
+ }
+
+ public void report(Throwable theError) {
+ this.report.add(theError);
+ }
+
+ public void report(String theErrMsg) {
+ this.report.add(new Exception(theErrMsg));
+ }
+
+ public void setTarget(Object theTarget) {
+ this.target = theTarget;
+ }
+
+ public Object getTarget() {
+ return this.target;
+ }
+
+ public Stage getStage() {
+ return this.stage;
+ }
+
+ public void setStage(Stage theStage) {
+ if (theStage.ordinal() <= this.stage.ordinal())
+ throw new IllegalArgumentException("Stage can only be set forward.");
+
+ this.stage = theStage;
+ }
+
+ public void nextStage() {
+ setStage(this.stage.next());
+ }
+
+ /*
+ * @return a reader for the source or null if failed
+ */
+ public Reader open() throws IOException {
+
+ return new BufferedReader(
+ new InputStreamReader(
+ this.location.toURL().openStream()));
+ }
+
+ public String toString() {
+ //return String.format("Target %s (%.20s ...)", this.location, this.target == null ? "" : this.target.toString());
+ return String.format("Target %s at %s", this.name, this.location);
+
+ }
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetError.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetError.java
new file mode 100644
index 0000000..917ec3f
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetError.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+
+/**
+ * A target error represents an error in target the resource being checked.
+ * We only represent it as a Throwable because the libraries that perform parsing and syntax validation
+ * represent their errors as such ..
+ */
+public class TargetError extends Throwable {
+
+ /*
+ public static enum Level {
+ error,
+ warning
+ }
+ */
+
+ private String location; //we might need an more detailed representation
+ //here: it could be a YAML document jpath or
+ //document location (line).
+ private String target;
+
+ public TargetError(String theTarget, String theLocation, String theMessage, Throwable theCause) {
+ super(theMessage, theCause);
+ this.target = theTarget;
+ this.location = theLocation;
+ }
+
+ public TargetError(String theTarget, String theLocation, String theMessage) {
+ this(theTarget, theLocation, theMessage, null);
+ }
+
+ public String getTarget() {
+ return this.target;
+ }
+
+ public String getLocation() {
+ return this.location;
+ }
+
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetLocator.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetLocator.java
new file mode 100644
index 0000000..920e1a6
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetLocator.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.net.URI;
+
+/**
+ * Used to locate a document that needs to be processed by the checker
+ */
+public interface TargetLocator {
+
+ /** */
+ public boolean addSearchPath(URI theURI);
+
+ /** */
+ public boolean addSearchPath(String thePath);
+
+ /** */
+ public Iterable<URI> searchPaths();
+
+ /** */
+ public Target resolve(String theName);
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Workflows.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Workflows.java
new file mode 100644
index 0000000..3f6a445
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Workflows.java
@@ -0,0 +1,287 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker;
+
+import java.util.Map;
+import java.util.List;
+import java.util.Iterator;
+
+import org.onap.tosca.checker.annotations.Checks;
+
+/**
+ * Workflows checking isolated as it is of significant size. Showcases the way checking can be added to the core
+ * checker through annotations.
+ */
+@Checks
+public class Workflows {
+
+ @Checks(path="/topology_template/workflows")
+ public void check_workflows(Map theDefinition, Checker.CheckContext theContext) {
+
+ theContext.enter("workflows", null, theDefinition);
+
+ try {
+ if(!theContext.checker().checkDefinition("workflows", theDefinition, theContext))
+ return;
+
+ for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_workflow_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+
+ public void check_workflow_definition(String theName, Map theDef, Checker.CheckContext theContext) {
+
+ theContext.enter("workflow", Construct.Workflow);
+
+ if (theDef.containsKey("inputs")) {
+ theContext
+ .checker()
+ .check_properties((Map<String,Map>)theDef.get("inputs"), theContext);
+ }
+
+ if (theDef.containsKey("preconditions")) {
+ check_workflow_preconditions_definition((List<Map>)theDef.get("preconditions"), theContext);
+ }
+
+ if (theDef.containsKey("steps")) {
+ check_workflow_steps_definition((Map<String, Map>)theDef.get("steps"), theContext);
+ }
+
+ theContext.exit();
+ }
+
+
+ public void check_workflow_steps_definition(Map theSteps, Checker.CheckContext theContext) {
+
+ theContext.enter("steps");
+
+ try {
+ for (Iterator<Map.Entry<String,Map>> i = theSteps.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,Map> e = i.next();
+ check_workflow_step_definition(e.getKey(), e.getValue(), theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+
+ }
+
+ public void check_workflow_step_definition(String theName, Map theDef, Checker.CheckContext theContext) {
+
+ theContext.enter(theName, null, theDef);
+ try {
+ //target
+ check_target_definition(theDef, theContext);
+
+ //operation_host
+
+ //filter: sequence of constraints with workflow assertion definitions (see section 3.5.18)
+ //where the keys are attribute names: representing the name of an attribute defined on the assertion context
+ //entity (node instance, relationship instance, group instance)
+ //we could verify that all th eattributes ae defined .. how accurate can that be
+
+ //activities
+ List<Map> activities = (List)theDef.get("activities");
+ if (activities != null) {
+ for (Map activity: activities) {
+ check_workflow_step_activity_definition(activity, theContext);
+ }
+ }
+
+ List successSteps = (List)theDef.get("on_success");
+ List failureSteps = (List)theDef.get("on_failure");
+ if (successSteps != null || failureSteps != null) {
+ Map steps = (Map)theContext.catalog().getTemplate(theContext.target(),
+ Construct.Workflow,
+ theContext.enclosingConstruct(Construct.Workflow).name())
+ .get("steps");
+
+ if (successSteps != null) {
+ for (Object successStep: successSteps) {
+ if (!steps.containsKey(successStep)) {
+ theContext.addError("The 'on_success' step " + successStep + " was not declared", null);
+ }
+ }
+ }
+
+ if (failureSteps != null) {
+ for (Object failureStep: failureSteps) {
+ if (!steps.containsKey(failureStep)) {
+ theContext.addError("The 'on_failure' step " + failureStep + " was not declared", null);
+ }
+ }
+ }
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ public void check_workflow_preconditions_definition(List<Map> thePreconditions, Checker.CheckContext theContext) {
+
+ theContext.enter("preconditions");
+
+ try {
+ for (Map precondition: thePreconditions) {
+ check_workflow_precondition_definition(precondition, theContext);
+ }
+ }
+ finally {
+ theContext.exit();
+ }
+ }
+
+ public void check_workflow_precondition_definition(Map theDef, Checker.CheckContext theContext) {
+
+ Map targetDef = check_target_definition(theDef, theContext);
+ if (targetDef != null) {
+ List<Map> condDef = (List<Map>)theDef.get("condition");
+ if (condDef != null) {
+ check_condition_clause_definition(targetDef, condDef, theContext);
+ }
+ }
+ }
+
+ public void check_workflow_step_activity_definition(Map theDef, Checker.CheckContext theContext) {
+ if (theDef.size() != 1)
+ theContext.addError("An activity has only one entry", null);
+
+ //only one entry expected
+ String delegate = (String)theDef.get("delegate");
+ if (delegate != null) {
+ //not clear to me what this is: a reference to a declarative workflow?
+ }
+
+ String state = (String)theDef.get("set_state");
+ if (state != null) {
+ }
+
+ String operation = (String)theDef.get("call_operation");
+ if (operation != null) {
+ int pos = operation.lastIndexOf('.');
+ if (pos < 0) {
+ theContext.addError("Invalid 'operation_call' statement", null);
+ }
+ else {
+ String interfaceName = operation.substring(0, pos),
+ interfaceOp = operation.substring(pos+1);
+
+ //we expect the enclosing to be the workflow step, where the target is specified
+ String stepTarget = (String)((Map)theContext.enclosing().info()).get("target");
+ Paths paths = theContext.checker().getHandler(Paths.class);
+ Map opDef = null;
+
+ //look in the node template
+ if (null == (opDef = paths.nodeTemplateInterfaceOperation(
+ theContext.target(),
+ stepTarget,
+ interfaceName,
+ interfaceOp))) {
+ //look in the node type
+ if (null == (opDef = paths.nodeTypeInterfaceOperation(
+ paths.nodeTemplateType(theContext.target(), stepTarget),
+ interfaceName,
+ interfaceOp))) {
+
+ //look into the interface type definition, if the node has an interface with the given name
+ if (null ==
+ (opDef = paths.interfaceTypeOperation(
+ paths.nodeTypeInterfaceType(paths.nodeTemplateType(theContext.target(), stepTarget),
+ interfaceName),
+ interfaceOp))) {
+ theContext.addError("Step target node " + stepTarget + " does not have a " + interfaceName + " with operation " + interfaceOp, null);
+ }
+ }
+ }
+
+ System.out.println("*** opDef: " + opDef);
+ }
+ }
+
+ String workflow = (String)theDef.get("inline");
+ if (workflow != null && !((Map)theContext.enclosingElement("workflows").info()).containsKey(workflow)) {
+ theContext.addError("Activity 'inline' " + workflow + " was not declared", null);
+ }
+ }
+
+ /* it would have been great to be ab le to convey in the return value the type of construct
+ */
+ protected Map check_target_definition(Map theDef, Checker.CheckContext theContext) {
+
+ String target = (String)theDef.get("target");
+ Construct targetConstruct = null;
+ Map targetDef = null;
+
+ if ((targetDef = theContext.catalog().getTemplate(theContext.target(), Construct.Group, target)) != null) {
+ targetConstruct = Construct.Group;
+ }
+ else if ((targetDef = theContext.catalog().getTemplate(theContext.target(), Construct.Node, target)) != null) {
+ targetConstruct = Construct.Node;
+ }
+ else {
+ theContext.addError("The 'target' entry must contain a reference to a node template or group template, '" + target + "' is none of those", null);
+ }
+
+ String targetRelationship = (String)theDef.get("target_relationship");
+ if (targetRelationship != null) {
+ if (targetConstruct.equals(Construct.Node)) {
+ //'The optional name of a requirement of the target in case the step refers to a relationship rather than a node or group'
+ if (null == theContext.catalog().getRequirementDefinition(Construct.Node,
+ (String)targetDef.get("type"),
+ targetRelationship)) {
+ theContext.addError("The 'target' (" + target + ") has no requirement '" + targetRelationship + "', as indicated by 'targetRelationship'", null);
+ }
+ }
+ else {
+ theContext.addError("The 'target_relationship' is only applicable if the 'target' (" + target + ") is a node template (which it is not)", null);
+ }
+ }
+
+ return targetDef;
+ }
+
+ protected boolean check_condition_clause_definition(Map theTargetDefinition,
+ List<Map> theConditionDef,
+ Checker.CheckContext theContext) {
+ boolean res = true;
+
+ for (Map clause: theConditionDef) {
+ Map.Entry clauseEntry = (Map.Entry)clause.entrySet().iterator().next();
+ String clauseType = (String)clauseEntry.getKey();
+
+ if ("and".equals(clauseType) || "or".equals(clauseType)) {
+ res &= check_condition_clause_definition(theTargetDefinition, (List<Map>)clauseEntry.getValue(), theContext);
+ }
+ else if ("assert".equals(clauseType)) {
+ List<Map> assertions = (List<Map>)clauseEntry.getValue();
+ for (Map assertion: assertions) {
+ //expected to be a one entry map
+ for (Object attributeName: assertion.keySet()) {
+ //the attribute must be defined in the given Target .. but there are also exposed built-in attributes (such as 'state') that we are not aware of
+ }
+ }
+ }
+ }
+
+ return res;
+ }
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Catalogs.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Catalogs.java
new file mode 100644
index 0000000..829d756
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Catalogs.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Target;
+
+
+/**
+ * Marks a method as a processing step during the cataloging phase. Allows for custom cataloging of (any) tosca
+ * construct information.
+ * The expected signature of a cataloging processing function is:
+ * param theDef of type matching the representation of the expected/processed construct
+ * param theContext exposes the processing state of the checker
+ *
+ * <div>
+ * {@code
+ * package org.onap.tosca.myaddons;
+ *
+ * import org.onap.tosca.checker.annotations.Catalogs;
+ *
+ * public class MyToscaCatalog {
+ * ..
+ * @Catalogs(path="/node_types")
+ * public void catalog_node_types(
+ * Map<String, Map> theDefinitions, CheckContext theContext) {
+ * //doing my thing
+ * }
+ * }
+ * </div>
+ *
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.METHOD})
+public @interface Catalogs {
+ String path() default "/";
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Checks.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Checks.java
new file mode 100644
index 0000000..856ac1b
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Checks.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Target;
+
+
+/**
+ * Marks a method as a processing step during the checking phase. Allows for additional checking to be 'plugged' in.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.METHOD})
+/**
+ * Marks a method as participating in the consistency checking stage. The method is expected to have the following
+ * signature:
+ * param theDef type of expected representation of the construct's information, for example Map<String,Map> for
+ * node_types or List<Map> for requirements
+ * param theContext CheckContext instance exposing the checker state
+ *
+ * Note: currently not used at type level
+ */
+/* The iffy part: as a type annotaton we do not need a path or a version specification,
+ as a method annotation it is mandatory (cannot be the default)
+ We could forsee that a version indication at type level would cover all check handler within the type
+ */
+public @interface Checks {
+ String path() default "/";
+ String[] version() default { "1.0", "1.0.0", "1.1", "1.1.0" };
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Validates.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Validates.java
new file mode 100644
index 0000000..f34a9fc
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Validates.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.annotations;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Target;
+
+
+/**
+ * Marks a method as a processing step during the validation phase (grammar rules checking).
+ * The expected signature of a method so annotated is:
+ * param theObject the POJO currently being subject to validation
+ * param theRule kwalify yaml syntax rule representation
+ * param theContext kwalify validation context exposing the state of the validator
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.METHOD})
+public @interface Validates {
+
+ public static enum Timing {
+ pre,
+ post
+ }
+
+ /* empty string means 'all rules' */
+ String rule();
+
+ Validates.Timing timing();
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/package-info.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/package-info.java
new file mode 100644
index 0000000..e7b268a
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/package-info.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+
+/**
+ * Contains the definition of the annotations used to mark methods used in processing a TOSCA template, one annotation
+ * for each stage (not for locate and parsed: the location process is done through org.onap.tosca.checker.Locator and
+ * parsing is currently built in).
+ *
+ * The checker {@link org.onap.tosca.checker.Checker Checker} scans the classpath for classes with methods annotated
+ * accordingly to annotations in this package (currently we scan packages prefixed with org.onap.tosca).
+ *
+ * At different stages the checker will delegate processing to this annotated methods.
+ *
+ * {@link org.onap.tosca.checker.annotations.Validates Validates} marks a method as part of the validation stage, i.e.
+ * syntax checking. It requires the indication of the grammar rule it intends to add validation to and wether it
+ * performs pre or post processing.
+ *
+ * {@link org.onap.tosca.checker.annotations.Checks Checks} marks a method as part of the consistency checks stage.
+ * A method such annotated will be invoked when the processing reaches the document path indicated in the annotation.
+ * A document path follows the strcture of the TOSCA yaml document, for example:
+ * /node_types/some_node_type
+ * /topology_template/node_templates
+ * Note that current implementation works by delegation down the hierachical structure of the document. If no processor
+ * is in place for /topology_templates/ or the one(s) in place do not delegate any further then a processor for
+ * /topology_template/node_templates will not get involved. The framework only attempts the invocation of first level
+ * (hierachically speaking) checks.
+ *
+ * {@link org.onap.tosca.checker.annotations.Catalogs Catalogs} marks a method as part of the cataloging stage, i.e.
+ * the storage/indexing (of some sort, it does not really need to be storing) of a construct that has been validated.
+ * Same as with {@link org.onap.tosca.checker.annotations.Checks Checks} the scope of a Cataloger is determined through
+ * a document path specification.
+ *
+ * A third party can provide its own processing (by using these annotations) and perform custom verifications, indexing
+ * for special post-processing, etc.
+ */
+
+package org.onap.tosca.checker.annotations;
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifact.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifact.java
new file mode 100644
index 0000000..8d67f0f
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifact.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/**
+ * Artifact definition, spec section 3.5.6
+ */
+public interface Artifact extends TOSCAObject<Artifact> {
+
+ public String name();
+
+ public String type();
+
+ public String description();
+
+ public String file();
+
+ public String repository();
+
+ public String deploy_path();
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactType.java
new file mode 100644
index 0000000..798a550
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactType.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/**
+ * Artifact type definition, spec section 3.6.4
+ */
+public interface ArtifactType extends TOSCAObject<ArtifactType> {
+
+ public String name();
+
+ public String derived_from();
+
+ public String description();
+
+ public String version();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public String mime_type();
+
+ public List<String> file_ext();
+
+ public default Properties properties() {
+ return (Properties)proxy("properties", Properties.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactTypes.java
new file mode 100644
index 0000000..d714cae
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactTypes.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface ArtifactTypes extends TOSCAMap<ArtifactType> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifacts.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifacts.java
new file mode 100644
index 0000000..7c22eac
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifacts.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Artifacts extends TOSCAMap<Artifact> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attribute.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attribute.java
new file mode 100644
index 0000000..0515f78
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attribute.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/**
+ */
+public interface Attribute extends TOSCAObject<Attribute> {
+
+ public String name();
+
+ public String type();
+
+ public String description();
+
+ public default Object _default() {
+ return info().get("default");
+ }
+
+ public Boolean required();
+
+ public String status();
+
+ public String entry_schema();
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attributes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attributes.java
new file mode 100644
index 0000000..d5ee2ca
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attributes.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Attributes extends TOSCAMap<Attribute> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/AttributesAssignments.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/AttributesAssignments.java
new file mode 100644
index 0000000..70ac163
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/AttributesAssignments.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+/*
+ * A simple representation of the attribute value assignments, spec section 3.5.11
+ */
+public interface AttributesAssignments extends TOSCAObject<AttributesAssignments> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capabilities.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capabilities.java
new file mode 100644
index 0000000..31d0e45
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capabilities.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+/**
+ * Collection of type specific capability definitions (spec section 3.6.2)
+ */
+public interface Capabilities extends TOSCAMap<Capability> {
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilitiesAssignments.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilitiesAssignments.java
new file mode 100644
index 0000000..d0f4200
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilitiesAssignments.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+/**
+ * Collection of template specific capability assignment (spec section 3.7.2)
+ */
+public interface CapabilitiesAssignments extends TOSCAMap<CapabilityAssignment> {
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capability.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capability.java
new file mode 100644
index 0000000..72df0b7
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capability.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/*
+ * Type specific capability definition, spec section 3.6.2
+ */
+public interface Capability extends TOSCAObject<Capability> {
+
+ /**
+ * The required name of the Capability Type the capability definition is based upon.
+ */
+ public String type();
+
+ /**
+ * The optional description of the Capability definition.
+ */
+ public String description();
+
+ /**
+ * An optional list of one or more valid names of Node Types that are supported as valid sources of any
+ * relationship established to the declared Capability Type.
+ */
+ public List<String> valid_source_types();
+
+ /**
+ * The optional minimum and maximum occurrences for the capability.
+ * By default, an exported Capability should allow at least one relationship to be formed with it with a
+ * maximum of UNBOUNDED relationships.
+ */
+ public default Range occurences() {
+ return (Range)proxyList("occurences", Range.class);
+ }
+
+ /**
+ * An optional list of property definitions for the Capability definition.
+ */
+ public default Properties properties() {
+ return (Properties)proxy("properties", Properties.class);
+ }
+
+ /**
+ * An optional list of attribute definitions for the Capability definition.
+ */
+ public default Attributes attributes() {
+ return (Attributes)proxy("attributes", Attributes.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityAssignment.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityAssignment.java
new file mode 100644
index 0000000..1c49dbe
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityAssignment.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/*
+ * Type specific capability definition, spec section 3.7.1
+ */
+public interface CapabilityAssignment extends TOSCAObject<CapabilityAssignment> {
+
+ /** */
+ public String name();
+
+ /**
+ * An optional list of property definitions for the Capability definition.
+ */
+ public default PropertiesAssignments properties() {
+ return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class);
+ }
+
+ /**
+ * An optional list of attribute definitions for the Capability definition.
+ */
+ public default AttributesAssignments attributes() {
+ return (AttributesAssignments)proxy("attributes", AttributesAssignments.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityType.java
new file mode 100644
index 0000000..64808d1
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityType.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+public interface CapabilityType extends TOSCAObject<CapabilityType> {
+
+ public String name();
+
+ public String derived_from();
+
+ public String description();
+
+ public String version();
+
+ public List<String> valid_source_types();
+
+ public default Properties properties() {
+ return (Properties)proxy("properties", Properties.class);
+ }
+
+ public default Attributes attributes() {
+ return (Attributes)proxy("attributes", Attributes.class);
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityTypes.java
new file mode 100644
index 0000000..3567afe
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityTypes.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface CapabilityTypes extends TOSCAMap<CapabilityType> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraint.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraint.java
new file mode 100644
index 0000000..03c8d20
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraint.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/**
+ * constraint definition, spec section 3.6.6
+ */
+public interface Constraint extends TOSCAObject<Constraint> {
+
+ public Constraint.Type name();
+
+ /* this is a one entry map so here we pick the single
+ */
+ public default Object expression() {
+ return info().values().iterator().next();
+ }
+
+ public enum Type {
+ equal,
+ greater_than,
+ greater_or_equal,
+ less_than,
+ less_or_equal,
+ in_range,
+ valid_values,
+ length,
+ min_length,
+ max_length,
+ pattern
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraints.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraints.java
new file mode 100644
index 0000000..8413cd7
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraints.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Constraints extends TOSCASeq<Constraint> {
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataType.java
new file mode 100644
index 0000000..941695c
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataType.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/**
+ * Data type definition, spec section 3.6.6
+ */
+public interface DataType extends TOSCAObject<DataType> {
+
+ public String name();
+
+ public String derived_from();
+
+ public String description();
+
+ public String version();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public default Properties properties() {
+ return (Properties)proxy("properties", Properties.class);
+ }
+
+ public default Constraints constraints() {
+ return (Constraints)proxy("constraints", Constraints.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataTypes.java
new file mode 100644
index 0000000..10b343a
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataTypes.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface DataTypes extends TOSCAMap<DataType> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/EntrySchema.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/EntrySchema.java
new file mode 100644
index 0000000..3aebaab
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/EntrySchema.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/**
+ * Used in data type, property, input and so on definitions, see spec section
+ */
+public interface EntrySchema extends TOSCAObject<EntrySchema> {
+
+ public String type();
+
+ public String description();
+
+ public default Constraints constraints() {
+ return (Constraints)proxy("constraints", Constraints.class);
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Group.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Group.java
new file mode 100644
index 0000000..9e530c1
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Group.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/**
+ * Spec section 3.7.5
+ */
+public interface Group extends TOSCAObject<Group> {
+
+ public String name();
+
+ public String type();
+
+ public String description();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public default PropertiesAssignments properties() {
+ return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class);
+ }
+
+ public default TemplateInterfaces interfaces() {
+ return (TemplateInterfaces)proxy("interfaces", TemplateInterfaces.class);
+ }
+
+ public List<String> members();
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupType.java
new file mode 100644
index 0000000..acaea03
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupType.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/**
+ * Group type definition, spec section 3.6.11
+ */
+public interface GroupType extends TOSCAObject<GroupType> {
+
+ public String name();
+
+ public String derived_from();
+
+ public String description();
+
+ public String version();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public List<String> members();
+
+ public default Properties properties() {
+ return (Properties)proxy("properties", Properties.class);
+ }
+
+ public default Attributes attributes() {
+ return (Attributes)proxy("attributes", Attributes.class);
+ }
+
+ public default TypeInterfaces interfaces() {
+ return (TypeInterfaces)proxy("interfaces", TypeInterfaces.class);
+ }
+
+ public default Requirements requirements() {
+ return (Requirements)proxy("requirements", Requirements.class);
+ }
+
+ public default Capabilities capabilities() {
+ return (Capabilities)proxy("capabilities", Capabilities.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupTypes.java
new file mode 100644
index 0000000..8254af1
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupTypes.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface GroupTypes extends TOSCAMap<GroupType> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Groups.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Groups.java
new file mode 100644
index 0000000..d6f770a
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Groups.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Groups extends TOSCAMap<Group> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Import.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Import.java
new file mode 100644
index 0000000..2e7edf8
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Import.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+/**
+ * Import defintion, see section
+ */
+public interface Import extends TOSCAObject<Import> {
+
+ public String name();
+
+ public String file();
+
+ public String repository();
+
+ public String namespace_uri();
+
+ public String namespace_prefix();
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Imports.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Imports.java
new file mode 100644
index 0000000..3aed669
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Imports.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Imports extends TOSCAMap<Import> {
+
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Input.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Input.java
new file mode 100644
index 0000000..e636d90
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Input.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+/**
+ * Same as property definition
+ */
+public interface Input extends TOSCAObject<Input> {
+
+ public String name();
+
+ public String type();
+
+ public String description();
+
+ public default Object _default() {
+ return info().get("default");
+ }
+
+ public boolean required();
+
+ public Status status();
+
+ public default Constraints constraints() {
+ return (Constraints)proxy("constraints", Constraints.class);
+ }
+
+ public default EntrySchema entry_schema() {
+ return (EntrySchema)proxy("entry_schema", EntrySchema.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Inputs.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Inputs.java
new file mode 100644
index 0000000..1f19079
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Inputs.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Inputs extends TOSCAMap<Input> {
+
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceType.java
new file mode 100644
index 0000000..559f24b
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceType.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.Map;
+
+import com.google.common.collect.Maps;
+
+/**
+ * Interface type definition, spec section 3.6.5
+ */
+public interface InterfaceType extends TOSCAObject<InterfaceType> {
+
+ public String name();
+
+ public String derived_from();
+
+ public String description();
+
+ public String version();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public default Properties inputs() {
+ return (Properties)proxy("inputs", Properties.class);
+ }
+
+ /**
+ * The set of operations, made up by all keys but the ones above ..
+ */
+ public default Operations operations() {
+ return (Operations)
+ TOSCAProxy.record(info(),
+ info -> TOSCAProxy.buildMap(null,
+ Maps.filterKeys((Map)info,
+ key -> !("derived_from".equals(key) ||
+ "description".equals(key) ||
+ "version".equals(key) ||
+ "metadata".equals(key) ||
+ "inputs".equals(key))),
+ Operations.class));
+
+ }
+
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceTypes.java
new file mode 100644
index 0000000..4b42c8e
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceTypes.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface InterfaceTypes extends TOSCAMap<InterfaceType> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Metadata.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Metadata.java
new file mode 100644
index 0000000..8e395dc
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Metadata.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/*
+ * No predefined entries here, so just use the java.util.Map interface get, i.e. get("some_entry_name")
+ */
+public interface Metadata extends TOSCAObject<Metadata> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Models.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Models.java
new file mode 100644
index 0000000..868d059
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Models.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.io.File;
+import java.util.Map;
+
+import org.onap.tosca.checker.Target;
+import org.onap.tosca.checker.Catalog;
+import org.onap.tosca.checker.Checker;
+
+
+public class Models {
+
+ public static ServiceTemplate service_template(Target theTarget) {
+ return TOSCAProxy.buildObject("", (Map)theTarget.getTarget(), ServiceTemplate.class);
+ }
+
+
+
+ public static void main(String[] theArgs) {
+
+ try {
+ Catalog cat = new Checker().check(new File(theArgs[0]));
+
+ for (Target t: cat.targets()) {
+ System.err.println(t.getLocation() + "\n" + cat.importString(t) + "\n" + t.getReport());
+ }
+
+ //for (Target t: cat.sortedTargets()) {
+ // System.out.println(t);
+ //}
+
+ ServiceTemplate tmpl = service_template(cat.topTargets().iterator().next());
+
+ NodeTypes node_types = tmpl.node_types();
+ if (node_types != null) {
+ for (NodeType nt: node_types.values()) {
+ System.out.println(" ** node type : " + nt.name());
+ Requirements reqs = nt.requirements();
+ if (reqs != null) {
+ for (Requirement req: reqs)
+ System.out.println("\t ** requirement " + req.name() + "/" + req.capability());
+ }
+ }
+ }
+
+ Substitution subs = tmpl.topology_template().substitution_mappings();
+ System.out.format("Substitution Mapping %s", subs.node_type());
+ Substitution.Mappings maps = subs.capabilities();
+ if (maps != null) {
+ for (Substitution.Mapping m: maps.values())
+ System.out.format("\n\tCapability mapping %s : %s)", m.target(), m.mapping());
+ }
+ maps = subs.requirements();
+ if (maps != null) {
+ for (Substitution.Mapping m: maps.values())
+ System.out.format("\n\tRequirement mapping %s : %s", m.target(), m.mapping());
+ }
+/*
+for(NodeTemplate nt: tmpl.topology_template().node_templates().values()) {
+ System.out.println(" ** node : " + nt.name() + " : " + nt.type());
+ PropertiesAssignments props = nt.properties();
+ if (props != null) {
+ props.entrySet().stream().forEach(e -> {
+ System.out.println(" ** property : " + e.getKey() + " : " + e.getValue());
+ });
+ }
+*/
+ }
+ catch (Exception x) {
+ x.printStackTrace();
+ }
+ }
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeFilter.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeFilter.java
new file mode 100644
index 0000000..27e3de2
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeFilter.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/**
+ * Node filter definition, spec section
+ */
+public interface NodeFilter extends TOSCAObject<NodeFilter> {
+
+ public default Properties properties() {
+ return (Properties)proxy("properties", Properties.class);
+ }
+
+ public default Capabilities capabilities() {
+ return (Capabilities)proxy("capabilities", Capabilities.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplate.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplate.java
new file mode 100644
index 0000000..6f20e2b
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplate.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/**
+ * See tosca spec section
+ */
+public interface NodeTemplate extends TOSCAObject<NodeTemplate> {
+
+ public String name();
+
+ public String type();
+
+ public String description();
+
+ public List<String> directives();
+
+ public String copy();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public default PropertiesAssignments properties() {
+ return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class);
+ }
+
+ public default AttributesAssignments attributes() {
+ return (AttributesAssignments)proxy("attributes", AttributesAssignments.class);
+ }
+
+ public default CapabilitiesAssignments capabilities() {
+ return (CapabilitiesAssignments)proxy("capabilities", CapabilitiesAssignments.class);
+ }
+
+ public default RequirementsAssignments requirements() {
+ return (RequirementsAssignments)proxy("requirements", RequirementsAssignments.class);
+ }
+
+ public default TemplateInterfaces interfaces() {
+ return (TemplateInterfaces)proxy("interfaces", TemplateInterfaces.class);
+ }
+
+ public default Artifacts artifacts() {
+ return (Artifacts)proxy("artifacts", Artifacts.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplates.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplates.java
new file mode 100644
index 0000000..4e4bb09
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplates.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface NodeTemplates extends TOSCAMap<NodeTemplate> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeType.java
new file mode 100644
index 0000000..06d79bb
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeType.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/**
+ * Node type definition, spec section 3.6.9
+ */
+public interface NodeType extends TOSCAObject<NodeType> {
+
+ public String name();
+
+ public String derived_from();
+
+ public String description();
+
+ public String version();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public default Properties properties() {
+ return (Properties)proxy("properties", Properties.class);
+ }
+
+ public default Attributes attributes() {
+ return (Attributes)proxy("attributes", Attributes.class);
+ }
+
+ public default Requirements requirements() {
+ return (Requirements)proxy("requirements", Requirements.class);
+ }
+
+ public default Capabilities capabilities() {
+ return (Capabilities)proxy("capabilities", Capabilities.class);
+ }
+
+ public default TypeInterfaces interfaces() {
+ return (TypeInterfaces)proxy("interfaces", TypeInterfaces.class);
+ }
+
+ public default Artifacts artifacts() {
+ return (Artifacts)proxy("artifacts", Artifacts.class);
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTypes.java
new file mode 100644
index 0000000..e61deb4
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTypes.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface NodeTypes extends TOSCAMap<NodeType> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operation.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operation.java
new file mode 100644
index 0000000..fbc66b3
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operation.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+/**
+ * This is the type specific defintion, as per spec section 3.5.13.2.2
+ */
+public interface Operation extends TOSCAObject<Operation> {
+
+
+ public String name();
+
+ public String description();
+
+ public String implementation();
+
+ public default Properties inputs() {
+ return (Properties)proxy("inputs", Properties.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operations.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operations.java
new file mode 100644
index 0000000..726ad90
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operations.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Operations extends TOSCAMap<Operation> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Outputs.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Outputs.java
new file mode 100644
index 0000000..2daed56
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Outputs.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Outputs extends TOSCAMap<Parameter> {
+
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Parameter.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Parameter.java
new file mode 100644
index 0000000..e1d771b
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Parameter.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+/**
+ * Spec section 3.5.12
+ */
+public interface Parameter extends TOSCAObject<Parameter> {
+
+ public String name();
+
+ public String type();
+
+ public String description();
+
+ public Object value();
+
+ public default Object _default() {
+ return info().get("default");
+ }
+
+ public boolean required();
+
+ public Status status();
+
+ public default Constraints constraints() {
+ return (Constraints)proxy("constraints", Constraints.class);
+ }
+
+ public default EntrySchema entry_schema() {
+ return (EntrySchema)proxy("entry_schema", EntrySchema.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policies.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policies.java
new file mode 100644
index 0000000..21a6be5
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policies.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Policies extends TOSCAMap<Policy> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policy.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policy.java
new file mode 100644
index 0000000..4d291a9
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policy.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/**
+ * Policy type definition, spec section 3.7.6
+ */
+public interface Policy extends TOSCAObject<Policy> {
+
+ public String type();
+
+ public String description();
+
+ public String version();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public List<String> targets();
+
+ public default PropertiesAssignments properties() {
+ return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class);
+ }
+
+ public default PolicyType.Triggers triggers() {
+ return (PolicyType.Triggers)proxy("triggers", PolicyType.Triggers.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyType.java
new file mode 100644
index 0000000..08a4bfa
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyType.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/**
+ * Policy type definition, spec section 3.6.12
+ */
+public interface PolicyType extends TOSCAObject<PolicyType> {
+
+ public String name();
+
+ public String derived_from();
+
+ public String description();
+
+ public String version();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public List<String> targets();
+
+ public default Properties properties() {
+ return (Properties)proxy("properties", Properties.class);
+ }
+
+ public default Triggers triggers() {
+ return (Triggers)proxy("triggers", Triggers.class);
+ }
+
+ public interface Triggers extends TOSCAMap<Trigger> {
+
+ }
+
+ public interface Trigger extends TOSCAObject<Trigger> {
+
+ public String description();
+
+ public String event_type();
+
+ public String schedule();
+
+ public default Constraints constraint() {
+ return (Constraints)proxy("constraint", Constraints.class);
+ }
+
+ public default Constraints condition() {
+ return (Constraints)proxy("condition", Constraints.class);
+ }
+
+ public int period();
+
+ public int evaluations();
+
+ public String method();
+
+ public String action();
+
+ //target_filter
+ public default EventFilter target_filter() {
+ return (EventFilter)proxy("target_filter", EventFilter.class);
+ }
+
+ }
+
+ public interface EventFilter extends TOSCAObject<EventFilter> {
+
+ public String node();
+
+ public String requirement();
+
+ public String capability();
+
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyTypes.java
new file mode 100644
index 0000000..8ef6278
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyTypes.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface PolicyTypes extends TOSCAMap<PolicyType> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Properties.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Properties.java
new file mode 100644
index 0000000..d2a8e85
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Properties.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Properties extends TOSCAMap<Property> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PropertiesAssignments.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PropertiesAssignments.java
new file mode 100644
index 0000000..dd949ff
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PropertiesAssignments.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+/*
+ * A simple representation of the property value assignments, to be used through the Map interface
+ * Working with this more basic representation keeps all (jx)paths expressions valid
+ */
+public interface PropertiesAssignments extends TOSCAObject<PropertiesAssignments> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Property.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Property.java
new file mode 100644
index 0000000..126a346
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Property.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/**
+ * See spec section 3.5.8
+ */
+public interface Property extends TOSCAObject<Property> {
+
+ public String name();
+
+ public String type();
+
+ public String description();
+
+ public default Object _default() {
+ return info().get("default");
+ }
+
+ public boolean required();
+
+ public Status status();
+
+ public default Constraints constraints() {
+ return (Constraints)proxy("constraints", Constraints.class);
+ }
+
+ public default EntrySchema entry_schema() {
+ return (EntrySchema)proxy("entry_schema", EntrySchema.class);
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Range.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Range.java
new file mode 100644
index 0000000..ed90ed7
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Range.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/*
+ */
+public interface Range extends List<Object> {
+
+ public default Object lower() {
+ return get(0);
+ }
+
+ public default Object upper() {
+ return get(1);
+ }
+
+ public default boolean isUnbounded() {
+ return "UNBOUNDED".equals(upper());
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplate.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplate.java
new file mode 100644
index 0000000..89e1fa0
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplate.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/**
+ */
+public interface RelationshipTemplate extends TOSCAObject<RelationshipTemplate> {
+
+ public String name();
+
+ public String type();
+
+ public String description();
+
+ public String copy();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public default PropertiesAssignments properties() {
+ return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class);
+ }
+
+ public default AttributesAssignments attributes() {
+ return (AttributesAssignments)proxy("attributes", AttributesAssignments.class);
+ }
+
+ public default TemplateInterfaces interfaces() {
+ return (TemplateInterfaces)proxy("interfaces", TemplateInterfaces.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplates.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplates.java
new file mode 100644
index 0000000..d4f73d6
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplates.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface RelationshipTemplates extends TOSCAMap<RelationshipTemplate> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipType.java
new file mode 100644
index 0000000..844f176
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipType.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+/**
+ * Relationship type definition, spec section 3.6.10
+ */
+public interface RelationshipType extends TOSCAObject<RelationshipType> {
+
+ public String name();
+
+ public String derived_from();
+
+ public String description();
+
+ public String version();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public default Properties properties() {
+ return (Properties)proxy("properties", Properties.class);
+ }
+
+ public default Attributes attributes() {
+ return (Attributes)proxy("attributes", Attributes.class);
+ }
+
+ public default TypeInterfaces interfaces() {
+ return (TypeInterfaces)proxy("interfaces", TypeInterfaces.class);
+ }
+
+ public List<String> valid_target_types();
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTypes.java
new file mode 100644
index 0000000..c5144f5
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTypes.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface RelationshipTypes extends TOSCAMap<RelationshipType> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repositories.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repositories.java
new file mode 100644
index 0000000..b30bfe7
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repositories.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Repositories extends TOSCAMap<Repository> {
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repository.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repository.java
new file mode 100644
index 0000000..b685378
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repository.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.Map;
+
+
+/*
+ * Repository definition (spec section 3.6.3)
+ */
+public interface Repository extends TOSCAObject<Repository> {
+
+ /** */
+ public String name();
+
+ /** */
+ public String description();
+
+ /** */
+ public String url();
+
+ /** */
+ public default Credential credential() {
+ return (Credential)proxy("credential", Credential.class);
+ }
+
+ /** */
+ public interface Credential extends TOSCAObject<Credential> {
+
+ /** */
+ public String protocol();
+
+ /** */
+ public String token_type();
+
+ /** */
+ public String token();
+
+ /** */
+ public String user();
+
+ /** */
+ public Map<String,String> keys();
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirement.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirement.java
new file mode 100644
index 0000000..5e3c4f8
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirement.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/*
+ * Requirement definition, as it appears in node type definitions (spec section 3.6.3)
+ */
+public interface Requirement extends TOSCAObject<Requirement> {
+
+ /** */
+ public String name();
+
+ /** */
+ public String capability();
+
+ /** */
+ public String node();
+
+ /** */
+ public default Range occurences() {
+ return (Range)proxy("relationship", Range.class);
+ }
+
+ public default Relationship relationship() {
+ return (Relationship)proxy("relationship", Relationship.class);
+ }
+
+ /**
+ * Spec section 3.6.3.2.3
+ */
+ public interface Relationship extends TOSCAObject<Relationship> {
+
+ public String type();
+
+ public default TypeInterfaces interfaces() {
+ return (TypeInterfaces)proxy("interfaces", TypeInterfaces.class);
+ }
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementAssignment.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementAssignment.java
new file mode 100644
index 0000000..7d03453
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementAssignment.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/*
+ * Requirement assignment as it appears in node templates. See spec section 3.7.2
+ */
+public interface RequirementAssignment extends TOSCAObject<RequirementAssignment> {
+
+ public String name();
+
+ /**
+ * Provide the name of either a:
+ * Capability definition within a target node template that can fulfill the requirement.
+ * Capability Type that the provider will use to select a type-compatible target node template to fulfill the requirement at runtime.
+ */
+ public String capability();
+
+ /**
+ */
+ public String node();
+
+ /** */
+ public default NodeFilter node_filter() {
+ return (NodeFilter)proxy("node_filter", NodeFilter.class);
+ }
+
+ /** */
+ public default RelationshipAssignment relationship() {
+ return (RelationshipAssignment)proxy("relationship", RelationshipAssignment.class);
+ }
+
+ public interface RelationshipAssignment extends TOSCAObject<RelationshipAssignment> {
+
+ public String type();
+
+ public default PropertiesAssignments properties() {
+ return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class);
+ }
+
+ public default TemplateInterfaces interfaces() {
+ return (TemplateInterfaces)proxy("interfaces", TemplateInterfaces.class);
+ }
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirements.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirements.java
new file mode 100644
index 0000000..b29a256
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirements.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface Requirements extends TOSCASeq<Requirement> {
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementsAssignments.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementsAssignments.java
new file mode 100644
index 0000000..a621c44
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementsAssignments.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface RequirementsAssignments extends TOSCASeq<RequirementAssignment> {
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ServiceTemplate.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ServiceTemplate.java
new file mode 100644
index 0000000..7eb686c
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ServiceTemplate.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+/**
+ */
+public interface ServiceTemplate extends TOSCAObject<ServiceTemplate> {
+
+ public String tosca_definitions_version();
+
+ public String description();
+
+ public default Metadata metadata() {
+ return (Metadata)proxy("metadata", Metadata.class);
+ }
+
+ public default Imports imports() {
+ return (Imports)proxy("imports", Imports.class);
+ }
+
+ public default Repositories repositories() {
+ return (Repositories)proxy("repositories", Repositories.class);
+ }
+
+ public default ArtifactTypes artifact_types() {
+ return (ArtifactTypes)proxy("artifact_types", ArtifactTypes.class);
+ }
+
+ public default DataTypes data_types() {
+ return (DataTypes)proxy("data_types", DataTypes.class);
+ }
+
+ public default NodeTypes node_types() {
+ return (NodeTypes)proxy("node_types", NodeTypes.class);
+ }
+
+ public default GroupTypes group_types() {
+ return (GroupTypes)proxy("group_types", GroupTypes.class);
+ }
+
+ public default PolicyTypes policy_types() {
+ return (PolicyTypes)proxy("policy_types", PolicyTypes.class);
+ }
+
+ public default RelationshipTypes relationship_types() {
+ return (RelationshipTypes)proxy("relationship_types", RelationshipTypes.class);
+ }
+
+ public default CapabilityTypes capability_types() {
+ return (CapabilityTypes)proxy("capability_types", CapabilityTypes.class);
+ }
+
+ public default InterfaceTypes interface_types() {
+ return (InterfaceTypes)proxy("interface_types", InterfaceTypes.class);
+ }
+
+ public default TopologyTemplate topology_template() {
+ return (TopologyTemplate)proxy("topology_template", TopologyTemplate.class);
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Status.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Status.java
new file mode 100644
index 0000000..e76bff5
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Status.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public enum Status {
+
+ supported,
+ unsupported,
+ experimental,
+ deprecated
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Substitution.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Substitution.java
new file mode 100644
index 0000000..c41a429
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Substitution.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+
+/**
+ * Spec section 3.8
+ */
+public interface Substitution extends TOSCAObject<Substitution> {
+
+ public String node_type();
+
+ public default Mappings capabilities() {
+ return (Mappings)proxy("capabilities", Mappings.class);
+ }
+
+ public default Mappings requirements() {
+ return (Mappings)proxy("requirements", Mappings.class);
+ }
+
+ /** */
+ public interface Mappings extends TOSCAMap<Mapping> {
+ }
+
+ /** */
+ public interface Mapping extends TOSCAObject<Mapping> {
+
+ /** to promote the key */
+ public String name();
+
+ /**
+ */
+ public default String target() {
+ return name();
+ }
+
+ /** */
+ public default List<String> mapping() {
+ return (List<String>)info().values().iterator().next();
+ }
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAMap.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAMap.java
new file mode 100644
index 0000000..50feb44
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAMap.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.Map;
+
+
+/*
+ * used to render TOSCA constructs that are maps of names to actual construct data:
+ * - node types, etc
+ * - topology template inputs, etc
+ */
+public interface TOSCAMap<T extends TOSCAObject<T>> extends Map<String, T> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAObject.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAObject.java
new file mode 100644
index 0000000..a0332fd
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAObject.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.Map;
+import java.util.List;
+import java.util.Collections;
+
+import java.lang.reflect.Proxy;
+
+/*
+ * Some choice to make here: do we explcitly extend Map or not
+ * Note that this makes assumptions about the POJO representation of the yaml document.
+ */
+public interface TOSCAObject<T extends TOSCAObject<T>> extends Map<String, Object> {
+
+ /** */
+ public abstract Map info();
+
+ /* if the key is absent this produces a null value, rather normal I'd say
+ */
+ public default <A> A proxy(String theKey, final Class<A> theType) {
+ return (A)/*info().*/computeIfPresent(theKey,
+ (key, val)-> {
+ /*
+ if (val instanceof TOSCAObject ||
+ val instanceof TOSCAMap ||
+ val instanceof TOSCASeq)
+ return val;
+ */
+ //this makes the assumption that no other proxies are at play
+ if (Proxy.isProxyClass(val.getClass()))
+ return val;
+
+ if (val instanceof Map && TOSCAMap.class.isAssignableFrom(theType))
+ return TOSCAProxy.buildMap((String)key, (Map<String,Map>)val, (Class<TOSCAMap>)theType);
+ if (val instanceof List && TOSCASeq.class.isAssignableFrom(theType))
+ return TOSCAProxy.buildSeq((List)val, (Class<TOSCASeq>)theType);
+
+ return TOSCAProxy.buildObject((String)key, (Map<String,Map>)val, theType);
+ });
+ }
+
+ public default <A extends List> A proxyList(String theKey, final Class<A> theType) {
+ return (A)computeIfPresent(theKey,
+ (key, val)-> {
+ if (Proxy.isProxyClass(val.getClass()))
+ return val;
+ return TOSCAProxy.buildList((List)val, theType);
+ });
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAProxy.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAProxy.java
new file mode 100644
index 0000000..bce8675
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAProxy.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.Collections;
+
+import java.util.stream.Collectors;
+
+import java.util.function.Function;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import java.lang.reflect.Type;
+import java.lang.reflect.Method;
+import java.lang.reflect.Array;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+
+import java.lang.invoke.MethodHandles;
+
+import com.google.common.reflect.Invokable;
+import com.google.common.reflect.AbstractInvocationHandler;
+
+
+/**
+ */
+public class TOSCAProxy
+ extends AbstractInvocationHandler {
+
+
+ public static Constructor<MethodHandles.Lookup> lookupHandleConstructor;
+
+ static {
+ try {
+ lookupHandleConstructor =
+ MethodHandles.Lookup.class.getDeclaredConstructor(Class.class,
+ int.class);
+
+ if (!lookupHandleConstructor.isAccessible()) {
+ lookupHandleConstructor.setAccessible(true);
+ }
+ }
+ catch (Exception x) {
+ throw new RuntimeException(x);
+ }
+ }
+
+ protected static <T> Class<T> typeArgument(Class theType) {
+ return (Class<T>)
+ ((ParameterizedType)theType.getGenericInterfaces()[0]).
+ getActualTypeArguments()[0];
+ }
+
+ private static Map proxyRecords = new HashMap();
+
+ /** */
+ public static Object record(Object theRef, Function theProxyBuilder) {
+ return proxyRecords.computeIfAbsent(theRef, theProxyBuilder);
+ }
+
+ /* a version allowing for the 'decoration/wrapping' of a basic list
+ */
+ public static <T extends List> T buildList(final List theInfo, Class<T> theType) {
+ return (T)java.lang.reflect.Proxy.newProxyInstance(
+ TOSCAProxy.class.getClassLoader(),
+ new Class[] { theType },
+ new AbstractInvocationHandler() {
+ protected Object handleInvocation(Object theProxy,Method theMethod,Object[] theArgs)
+ throws Throwable {
+ return theMethod.invoke(theInfo, theArgs);
+ }
+ });
+ }
+
+ /*
+ * This is targeted at lists of one entry maps seen in in the TOSCA spec
+ */
+ public static <T extends TOSCASeq> T buildSeq(final List<Map> theInfo, Class<T> theType) {
+ theInfo.replaceAll((value) -> { Map.Entry<String,Map> entry = (Map.Entry<String,Map>)
+ value.entrySet().iterator().next();
+ return buildObject(entry.getKey(), entry.getValue(), typeArgument(theType));
+ });
+ return (T)java.lang.reflect.Proxy.newProxyInstance(
+ TOSCAProxy.class.getClassLoader(),
+ new Class[] { theType },
+ new AbstractInvocationHandler() {
+ protected Object handleInvocation(Object theProxy,Method theMethod,Object[] theArgs)
+ throws Throwable {
+ //A Seq is nothing but a List so just propagate the call ..
+ return theMethod.invoke(theInfo, theArgs);
+ }
+ });
+ }
+
+ /*
+ * All entries in the map will become TOSCAObject instances (used for collections of constructs)
+ */
+ public static <T extends TOSCAMap> T buildMap(String theName, Map<String, ?> theInfo, Class<T> theType) {
+ theInfo.replaceAll((name, value) -> {
+ return
+ value instanceof Map ?
+ buildObject(name, (Map)value, typeArgument(theType)) :
+ buildObject(name, Collections.singletonMap("value", value), typeArgument(theType));
+ });
+ return buildObject(theName, theInfo, theType);
+ }
+
+ public static <T> T buildObject(String theName, Map theInfo, Class<T> theType) {
+ return (T)java.lang.reflect.Proxy.newProxyInstance(
+ TOSCAProxy.class.getClassLoader(),
+ new Class[] { theType /*, Map.class*/ },
+ new TOSCAProxy(theName, theInfo));
+ }
+/*
+ public static <T> T build(String theName, Map theInfo, Class<T> theType) {
+ if (TOSCAMap.class.isAssignableFrom(theType))
+ return buildMap(theName, theInfo, (Class<TOSCAMap>)theType);
+ else
+ return buildObject(theName, theInfo, theType);
+ }
+*/
+ private String name;
+ private Map info;
+
+ protected TOSCAProxy(String theName, Map theInfo) {
+ this.name = theName;
+ this.info = theInfo;
+ }
+
+ protected Object handleInvocation(
+ Object theProxy,Method theMethod,Object[] theArgs)
+ throws Throwable {
+
+//System.out.println(" ** proxy looking for " + theMethod + " in " + name + "&" + info);
+
+ //if the method was declared in Map.class, just default to 'info' (we should make this read only)
+ if (Map.class.equals(theMethod.getDeclaringClass())) {
+ return theMethod.invoke(this.info, theArgs);
+ }
+
+ if (theMethod.isDefault()) {
+ final Class<?> declaringClass = theMethod.getDeclaringClass();
+ return lookupHandleConstructor
+ .newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
+ .unreflectSpecial(theMethod, declaringClass)
+ .bindTo(theProxy)
+ .invokeWithArguments(theArgs);
+ }
+
+ if ("info".equals(theMethod.getName())) {
+ return this.info;
+ }
+
+ if ("name".equals(theMethod.getName())) {
+ return this.name;
+ }
+
+ return this.info.get(theMethod.getName());
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCASeq.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCASeq.java
new file mode 100644
index 0000000..e335e5a
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCASeq.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+
+
+/*
+ * used to render TOSCA constructs that are list of actual constructs:
+ * - requirements
+ */
+public interface TOSCASeq<T extends TOSCAObject<T>> extends List<T> {
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterface.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterface.java
new file mode 100644
index 0000000..628e2f7
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterface.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.collect.Maps;
+
+
+/*
+ * Interface definition used in templates (node, relationship)
+ */
+public interface TemplateInterface extends TOSCAObject<TemplateInterface> {
+
+ public String name();
+
+ public default Inputs inputs() {
+ return (Inputs)proxy("inputs", Inputs.class);
+ }
+
+ /**
+ * See InterfaceType for the reason for the implementation below.
+ * Use the template specific operation definition, as per spec section 3.5.13.2.3
+ */
+ public default TemplateInterface.Operations operations() {
+ return (Operations)
+ TOSCAProxy.record(info(),
+ info -> TOSCAProxy.buildMap(null,
+ Maps.filterKeys((Map)info,
+ key -> !("inputs".equals(key))),
+ Operations.class));
+ }
+
+ /**
+ * Is this to be viewed as an 'operation assignment' ??
+ */
+ public interface Operations extends TOSCAMap<Operation> {
+ }
+
+ /*
+ * Template specific operation definition, section 3.5.13.2.3
+ */
+ public interface Operation extends TOSCAObject<Operation> {
+
+ public String name();
+
+ public String description();
+
+ public default PropertiesAssignments inputs() {
+ return (PropertiesAssignments)proxy("inputs", PropertiesAssignments.class);
+ }
+
+ public default Implementation implementation() {
+ return (Implementation)proxy("implementation", Implementation.class);
+ }
+
+ /**
+ */
+ public interface Implementation extends TOSCAObject<Implementation> {
+
+ public String primary();
+
+ public List<String> dependencies();
+
+ }
+
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterfaces.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterfaces.java
new file mode 100644
index 0000000..b4b40a2
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterfaces.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface TemplateInterfaces extends TOSCAMap<TemplateInterface> {
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TopologyTemplate.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TopologyTemplate.java
new file mode 100644
index 0000000..88328ba
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TopologyTemplate.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface TopologyTemplate extends TOSCAObject<TopologyTemplate> {
+
+ public String description();
+
+ public default Inputs inputs() {
+ return (Inputs)proxy("inputs", Inputs.class);
+ }
+
+ public default Outputs outputs() {
+ return (Outputs)proxy("Outputs", Outputs.class);
+ }
+
+ public default NodeTemplates node_templates() {
+ return (NodeTemplates)proxy("node_templates", NodeTemplates.class);
+ }
+
+ public default RelationshipTemplates relationship_templates() {
+ return (RelationshipTemplates)proxy("relationship_templates", RelationshipTemplates.class);
+ }
+
+ public default Groups groups() {
+ return (Groups)proxy("groups", Groups.class);
+ }
+
+ public default Substitution substitution_mappings() {
+ return (Substitution)proxy("substitution_mappings", Substitution.class);
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterface.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterface.java
new file mode 100644
index 0000000..77836f6
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterface.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+import java.util.Map;
+
+import com.google.common.collect.Maps;
+
+/*
+ * Interface definition used in types (node, relationship)
+ */
+public interface TypeInterface extends TOSCAObject<TypeInterface> {
+
+ public String name();
+
+ public String type();
+
+ public default Inputs inputs() {
+ return (Inputs)proxy("inputs", Inputs.class);
+ }
+
+ public default Operations operations() {
+ //not as obvious because we need to pick up all the keys EXCEPT "inputs" and "type";
+ //as a result we cannot pick a key to attach the proxy to (and end up in the best case creating one every time ..).
+ //I wish I had a more elegant solution but in order to avoid building a new proxy every time we get here
+ //we register the long-lasting reference to proxy association ..
+
+ return (Operations)
+ TOSCAProxy.record(info(),
+ info -> TOSCAProxy.buildMap(null,
+ Maps.filterKeys((Map)info,
+ key -> !("type".equals(key) ||
+ "inputs".equals(key))),
+ Operations.class));
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterfaces.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterfaces.java
new file mode 100644
index 0000000..fb271d7
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterfaces.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.model;
+
+
+public interface TypeInterfaces extends TOSCAMap<TypeInterface> {
+
+}
+
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/package-info.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/package-info.java
new file mode 100644
index 0000000..783389e
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/package-info.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+
+/**
+ * The package contains Java representations of TOSCA constructs.
+ * During processing (parsing, checking, cataloging) a POJO representation of the TOSCA yaml document content
+ * is being used. For any post-processing the user has the option of wrapping the POJO representations in these
+ * domain specific representations.
+ * Domain specific elements are created 'as needed' as the client navigates through the template content and the
+ * goal here was not to change the POJO-based structure, but just to wrap it in compatible elements (in this way
+ * all other methods of post processing stay compatible: jxpath, etc).
+ * The TOSCA specific representations are defined as interfaces for which implementations are built at run-time,
+ * implementations which delegate to the existing POJO representations resulting from the checker.
+ *
+ * The entry points are defined in {@link org.onap.tosca.checker.models.Models Models}.
+ *
+ * ToDo: more entry points needed, at least for catalog based navigation of available types.
+ */
+
+package org.onap.tosca.checker.model;
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/package-info.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/package-info.java
new file mode 100644
index 0000000..818eeab
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/package-info.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+
+/**
+ * The checker provides an api/tool for the verification of TOSCA yaml files.
+ * <p>
+ * It provides a three stage processing of a tosca yaml file:
+ * - yaml verification: is the document a valid yaml document as per yaml.org/spec. In particular we're using
+ * the snakeyaml library for parsing the yaml document to a nested structure of java objects. Multiple yaml
+ * documents per storage element/yaml character stream (file, stream) are supported, see section 9 of yaml spec.
+ * - tosca yaml grammar validation: is the document a valid tosca yaml
+ * document, as per the the TOSCA simple profile for yaml. We use a modified
+ * version of the kwalify library for this task. The grammar for TOSCA yaml
+ * is itself a yaml document (found in the package in
+ * resources/tosca-schema.yaml). There are certain limitations on how far
+ * this grammar can go.
+ * - consistency verification: we check the type hierarchies for all TOSCA
+ * constructs (data types, capability types, node types, etc), the definition
+ * of all facets of a construct (properties, attributes, etc) across the type
+ * hierachies, the conformity of construct templates (node templates, ..) with
+ * their types, data valuations(input assignements, constants, function calls).
+ *
+ * Each stage is blocking, i.e. a stage will be performed only if the previous
+ * one completed successfully. Additional procesing can be added to each stage by
+ * providing code annotated as per the {@link org.onap.tosca.checker.annotations Annotations} package.
+ * <p>
+ * The verification is done across all the imported documents. The common TOSCA
+ * types are by default made available to all documents being processed (the
+ * specification is in resources/tosca-common-types.yaml). Networking related
+ * types can be made available by importing resources/tosca-network-types.yaml
+ * while the tosca nfv profile definitions are available at
+ * resources/tosca-nfv-types.yaml.
+ * <p>
+ * The main java classes exposed by the package are the Checker, Target
+ * and Report. A Target represents a document processed by the Checker. While
+ * the Checker starts with a top Target, through import statements it can end up
+ * processing a number of Targets. The results of processing a Target are made
+ * available through a Report which currently is nothing more that a list of
+ * recorded errors.
+ *
+ * <div>
+ * {@code
+ * Checker checker = new Checker();
+ * checker.check("tests/example.yaml");
+ *
+ * for (Target t: checker.targets())
+ * System.out.println(t.getLocation() + "\n" + t.getReport());
+ * }
+ * </div>
+ * <p>
+ * A {@link org.onap.tosca.checker.Report report} consolidates the problems encountered
+ * in the processed documents per Target.
+ * The errors are recorded as instances of Exception, mostly due to the fact
+ * snakeyaml and kwalify do report errors as exceptions. As such there are 3
+ * basic types of errros to be expected in a report: YAMLException (from
+ * snakeyaml, related to parsing), ValidationException (from kwalify, tosca
+ * grammar validation), TargetException (from the checker itself). This might
+ * change as we're looking to unify the way errors are reported. A Report
+ * object has a user friendly toString function.
+ *
+ * A CheckerException thrown during the checking process is an indication of a
+ * malfunction in the checker itself.
+ * <p>
+ * The checker handles targets as URIs. The resolution of a target consists in
+ * going from a string representing some path/uri to the absolute URI.
+ * The Targets are found through a TargetLocator which interprets a location string
+ * as an URI. A custom TargetLocator (capable of interpreting particular URI schemes)
+ * can be plugged in the Checker. Locating a target is actually the first stage of
+ * target processing and and intrisic part of the process (resolving imported targets).
+ * The package provides a default {@link org.onap.tosca.checker.CommonLocator locator}
+ * which handles the common uri schemes (the jdk recognizes).
+ * The example above highlights the fact that even when processing starts with one target
+ * it could end up going over a larger set, through imports.
+ *
+ * The results of processing a TOSCA yaml document (all all related documents through
+ * the import relationship) are stored in a {@link org.onap.tosca.checker.Catalog catalog}.
+ * Catalogs are re-usable and the checker provides methods for processing targets with
+ * respect to an existing catalog. 'With respect to a catalog' means that all the types
+ * available in that catalog are visisble to the targets being processed; this facilitates
+ * the processing of TOSCA templates organized as a schema (types specifications) and
+ * topology templates.
+ * <p>
+ * The most common entry point is {@link org.onap.tosca.checker.Checker#check(String) check},
+ * where the argument is passed to thelocator and once a {@link org.onap.tosca.checker.Target target}
+ * has been obtained, processing continues with the other stages.
+ * The checker has processing entry points past the first two processing stages: an already located
+ * target <br/>
+ * <ul>
+ * <li>{@link org.onap.tosca.checker.Checker#check(Target) check}</li>
+ * <li>{@link org.onap.tosca.checker.Checker#check(Target, Catalog) check}</li>
+ * </ul>
+ * and after a target located and parsed <br/>
+ * <ul>
+ * <li>{@link org.onap.tosca.checker.Checker#validate(Target) check}</li>
+ * <li>{@link org.onap.tosca.checker.Checker#validate(Target, Catalog) check}</li>
+ * </ul>
+ * </p>
+ * <p>
+ * The build process provides an all-in-one jar to facilitate the usage as a tool:
+ * <p>
+ * Other:
+ * <ul>
+ * <li>the checker performs during tosca grammar validation a 'normalization'
+ * process as the tosca yaml profile allows for short forms in the
+ * specification of a number of its constructs (see spec). The checker changes
+ * the actual structure of the parsed document such that only normalized
+ * (complete) forms of specification are present before the checking phase.
+ * (the kwalify library was extended in order to be able to specify these
+ * short forms in the grammar itself and process/tolerate them at validation
+ * time).
+ * </li>
+ * <li>the checker contains an internal catalog where the types and templates
+ * of different constructs are aggregated and indexed across all targets in
+ * order to facilitate the checking phase. Catalogs can be 'linked' and the
+ * resolution process delegated (the checker maintains a basic catalog with
+ * the core and common types and there is always a second catalog maintaining
+ * the information related to the current targets).
+ * The catalog is currently not exposed by the library.
+ * </li>
+ * <li>imports processing: the import statements present in a target are first
+ * 'detected' during tosca yaml grammar validation phase. At that stage all
+ * imports are (recursively) parsed and validated (first 2 phases). Checking
+ * off all imports (recursively) is done during stage 3.
+ * </li>
+ * <ul>
+ * <p>
+ * ToDo:
+ * - nobody says the parsing cannot be pluggable: snakeyaml could be replaced by
+ * anything that can take a Reader and produce a POJO hierarchy for the next stages.
+ */
+
+package org.onap.tosca.checker;
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/JSP.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/JSP.java
new file mode 100644
index 0000000..4183c75
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/JSP.java
@@ -0,0 +1,659 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.processing;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.File;
+
+import java.net.URI;
+
+import java.util.Set;
+import java.util.Map;
+import java.util.List;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.stream.Collectors;
+import java.util.function.Consumer;
+import java.util.function.BiFunction;
+import java.util.function.Predicate;
+
+import javax.script.Invocable;
+import javax.script.Compilable;
+import javax.script.CompiledScript;
+import javax.script.Bindings;
+import javax.script.SimpleBindings;
+import javax.script.ScriptContext;
+import javax.script.SimpleScriptContext;
+import javax.script.ScriptEngine;
+import javax.script.ScriptEngineManager;
+import javax.script.ScriptException;
+
+import jdk.nashorn.api.scripting.JSObject;
+import jdk.nashorn.api.scripting.AbstractJSObject;
+
+import org.apache.commons.jxpath.JXPathContext;
+import org.apache.commons.jxpath.JXPathException;
+
+import org.onap.tosca.checker.Target;
+import org.onap.tosca.checker.Catalog;
+import org.onap.tosca.checker.Report;
+import org.onap.tosca.checker.Construct;
+import org.onap.tosca.checker.Facet;
+
+
+/**
+ * Java Script Processor
+ * Each script is represented by a Target and the JSP processor maintains a collection of Targets, i.e. scripts.
+ * A collection of targets can be used with only one JSP processor at a time (as the processor stores processor specific * compiled versions within the target).
+ */
+public class JSP implements Processor<JSP> {
+
+ private ScriptEngine engine;
+ private Collection<? extends Target> targets;
+
+ public JSP(String[] theScripts) {
+ this(Arrays.stream(theScripts)
+ .map(s -> new Target(s, new File(s).toURI()))
+ .collect(Collectors.toList()));
+ }
+
+ public JSP(File[] theScripts) {
+ this(Arrays.stream(theScripts)
+ .map(s -> new Target(s.getName(), s.toURI()))
+ .collect(Collectors.toList()));
+ }
+
+ public JSP(URI[] theScripts) {
+ this(Arrays.stream(theScripts)
+ .map(s -> new Target(s.toString(), s))
+ .collect(Collectors.toList()));
+ }
+
+ /**
+ * The given collection is allowed to change while used by the JSP engine but access to it needs to be synchronized.
+ * The engine uses the target field of each Target to store a compiled version of each script. An external reset of
+ * this field (maybe in order to indicate some change in the Target) will caue a re-compilation of the Target.
+ */
+ public JSP(Collection<? extends Target> theTargets) {
+ this.targets = theTargets;
+ ScriptEngineManager engineManager = new ScriptEngineManager();
+ this.engine = engineManager.getEngineByName("nashorn");
+ }
+
+ public Collection<? extends Target> targets() {
+ return this.targets;
+ }
+
+ /* pre-compiles all known targets
+ */
+ protected void compile() throws ProcessorException {
+ synchronized (this.targets) {
+ for (Target t: this.targets)
+ compile(t);
+ }
+ }
+
+ protected CompiledScript compile(Target theTarget) throws ProcessorException {
+
+ CompiledScript cs = null;
+
+ synchronized(theTarget) {
+ try {
+ cs = (CompiledScript)theTarget.getTarget();
+ }
+ catch(ClassCastException ccx) {
+ throw new ProcessorException(theTarget, "Unexpected target content");
+ }
+
+ if (cs == null) {
+ try {
+ cs = ((Compilable)this.engine).compile(theTarget.open());
+ theTarget.setTarget(cs);
+ }
+ catch (IOException iox) {
+ throw new ProcessorException(theTarget, "Failed to read script", iox);
+ }
+ catch (ScriptException sx) {
+ throw new ProcessorException(theTarget, "Failed to compile script", sx);
+ }
+ }
+ }
+
+ return cs;
+ }
+
+ public ContextBuilder process(Catalog theCatalog) {
+ return new ContextBuilder(
+ this.engine.createBindings())
+ //new DelegateBindings(this.engine.getBindings(ScriptContext.ENGINE_SCOPE)))
+ .with("catalog", new JSCatalog(theCatalog));
+ }
+
+ /**
+ */
+ public class ContextBuilder implements ProcessBuilder<JSP> {
+
+ private ScriptContext context;
+
+ protected ContextBuilder(Bindings theBindings) {
+ this.context = new SimpleScriptContext();
+ this.context.setBindings(theBindings, Process.PROCESS_SCOPE /*ScriptContext.ENGINE_SCOPE*/);
+ }
+
+ public ContextBuilder withPreprocessing(BiFunction<Target, ScriptContext, Boolean> thePreprocessing) {
+ this.context.setAttribute("preprocessor", thePreprocessing, Process.PROCESS_SCOPE);
+ return this;
+ }
+
+ public ContextBuilder withPostprocessing(BiFunction<Target, ScriptContext, Boolean> thePostprocessing) {
+ this.context.setAttribute("postprocessor", thePostprocessing, Process.PROCESS_SCOPE);
+ return this;
+ }
+
+ public ContextBuilder with(String theName, Object theValue) {
+ this.context.getBindings(Process.PROCESS_SCOPE).put(theName, theValue);
+ return this;
+ }
+
+ public ContextBuilder withOpt(String theName, Object theValue) {
+ if (theValue != null)
+ this.context.getBindings(Process.PROCESS_SCOPE).put(theName, theValue);
+ return this;
+ }
+
+ public JSProcess process() {
+ return new JSProcess(this.context);
+ }
+
+ }
+
+ /**
+ */
+ public class JSProcess implements Process<JSP> {
+
+ private Report report = new Report();
+ private Iterator<? extends Target> scripts;
+ private JScriptInfo scriptInfo = new JScriptInfo();
+ private Target script; //script currently being evaluated
+ private boolean stopped = false;
+ private ScriptContext context;
+
+ private JSProcess(ScriptContext theContext) {
+
+ this.context = theContext;
+ this.context.getBindings(Process.PROCESS_SCOPE)
+ .put("stop", new Consumer<String>() {
+ public void accept(String theMsg) {
+ JSProcess.this.stopped = true;
+ //log the message??
+ }
+ });
+ this.context.getBindings(Process.PROCESS_SCOPE)
+ .put("report", new Consumer<String>() {
+ public void accept(String theMsg) {
+ JSProcess.this.report.add(new ProcessorException(script, theMsg));
+ }
+ });
+ this.context.getBindings(Process.PROCESS_SCOPE)
+ .put("reportOnce", new Consumer<String>() {
+ public void accept(String theMsg) {
+ JSProcess.this.report.addOnce(new ProcessorException(script, theMsg));
+ }
+ });
+ this.scripts = JSP.this.targets.iterator();
+ }
+
+ protected String infoName(Target theTarget) {
+ String name = theTarget.getName();
+ return name.substring(0, name.indexOf(".")) + "_info";
+ }
+
+ public JSP processor() {
+ return JSP.this;
+ }
+
+ public boolean hasNext() {
+ return !this.stopped && this.scripts.hasNext();
+ }
+
+ protected Target next() {
+ if (hasNext())
+ return this.script = this.scripts.next();
+ else
+ throw new RuntimeException("Process is completed");
+ }
+
+ protected boolean runProcessor(String theName) throws ProcessorException {
+ BiFunction<Target, ScriptContext, Boolean> proc = (BiFunction<Target, ScriptContext, Boolean>)
+ this.context.getAttribute(theName, Process.PROCESS_SCOPE);
+ if (proc != null) {
+ try {
+ return proc.apply(this.script, this.context).booleanValue();
+ }
+ catch (Exception x) {
+ throw new ProcessorException(this.script, theName + "failed", x);
+ }
+ }
+
+ return true;
+ }
+
+ public Process runNext() throws ProcessorException {
+ Target target = next();
+ synchronized(target) {
+ String name = infoName(target);
+ try {
+ if (runProcessor("preprocessor")) {
+ compile(target).eval(this.context);
+ runProcessor("postprocessor");
+ }
+ }
+ catch (ScriptException sx) {
+ throw new ProcessorException(target, "Failed to execute validation script", sx);
+ }
+ }
+
+ return this;
+ }
+
+ public Process runNextSilently() {
+ try {
+ return runNext();
+ }
+ catch (ProcessorException px) {
+ this.report.add(px);
+ }
+ return this;
+ }
+
+ public Report run() {
+ while (hasNext())
+ runNextSilently();
+ return this.report;
+ }
+
+ public void stop() {
+ this.stopped = true;
+ }
+
+ public Report report() {
+ return this.report;
+ }
+ }
+
+ private static class JScriptInfo implements TargetInfo {
+
+ private JSObject info;
+
+ protected JScriptInfo() {
+ }
+
+ protected JScriptInfo setInfo(JSObject theInfo) {
+ this.info = theInfo;
+ return this;
+ }
+
+ public Set<String> entryNames() {
+ return this.info == null ? Collections.EMPTY_SET : this.info.keySet();
+ }
+
+ public boolean hasEntry(String theName) {
+ return this.info == null ? false : this.info.hasMember(theName);
+ }
+
+ public Object getEntry(String theName) {
+ return this.info == null ? null :
+ this.info.hasMember(theName) ? this.info.getMember(theName) : null;
+ }
+ }
+
+
+ /* Exposes the catalog information in a more Java Script friendly manner.
+ */
+ public static class JSCatalog {
+
+ private Catalog catalog;
+
+ private JSCatalog(Catalog theCatalog) {
+ this.catalog = theCatalog;
+ }
+
+ /** */
+ public JSTarget[] targets() {
+ return
+ this.catalog.targets()
+ .stream()
+ .map(t -> { return new JSTarget(t); })
+ .toArray(size -> new JSTarget[size]); //or toArray(JSNode[]::new)
+ }
+
+ public JSTarget[] topTargets() {
+ return
+ this.catalog.topTargets()
+ .stream()
+ .map(t -> { return new JSTarget(t); })
+ .toArray(size -> new JSTarget[size]); //or toArray(JSNode[]::new)
+ }
+
+ /** */
+ public String[] types(String theConstruct) {
+ Set<String> names =
+ this.catalog.getConstructTypes(Enum.valueOf(Construct.class,theConstruct)).keySet();
+ return names.toArray(new String[names.size()]);
+ }
+
+ /** */
+ public boolean isDerivedFrom(String theConstruct, String theType, String theSuperType) {
+ return this.catalog.isDerivedFrom(Enum.valueOf(Construct.class,theConstruct), theType, theSuperType);
+ }
+
+ /** */
+ public JSObject facetDefinition(String theConstruct, String theType, String theFacet, String theName) {
+ return new JSElement(theName,
+ this.catalog.getFacetDefinition(
+ Enum.valueOf(Construct.class, theConstruct), theType,
+ Enum.valueOf(Facet.class, theFacet), theName));
+ }
+
+
+ /** */
+/*
+ public JSElement[] targetNodes(Target theTarget) {
+ return
+ this.catalog.getTargetTemplates(theTarget, Construct.Node)
+ .entrySet()
+ .stream()
+ .map(e -> { return new JSElement(e.getKey(),e.getValue()); })
+ .toArray(size -> new JSElement[size]); //or toArray(JSNode[]::new)
+ }
+*/
+
+ public class JSTarget {
+
+ private Target tgt;
+ private JXPathContext jxPath;
+
+ private JSTarget(Target theTarget) {
+ this.tgt = theTarget;
+ this.jxPath = JXPathContext.newContext(this.tgt.getTarget());
+ this.jxPath.setLenient(true);
+ }
+
+ public String getName() { return this.tgt.getName(); }
+
+ public JSElement resolve(String thePath) {
+ Object res = jxPath.getValue(thePath);
+ if (res instanceof Map) {
+ return new JSElement(thePath, (Map)res);
+ }
+ //??
+ return null;
+ }
+
+ public JSElement[] getInputs() {
+
+ Map<String,Map> inputs = (Map<String,Map>)jxPath.getValue("/topology_template/inputs");
+ return (inputs == null) ?
+ new JSElement[0]
+ : inputs.entrySet()
+ .stream()
+ .map(e -> { return new JSElement(e.getKey(),e.getValue()); })
+ .toArray(size -> new JSElement[size]);
+ }
+
+// public JSElement[] getOutputs() {
+// }
+
+ public JSElement getMetadata() {
+ return new JSElement("metadata", (Map)jxPath.getValue("/metadata"));
+ }
+
+ public JSElement[] getNodes() {
+ return
+ JSCatalog.this.catalog.getTargetTemplates(this.tgt, Construct.Node)
+ .entrySet()
+ .stream()
+ .map(e -> { return new JSElement(e.getKey(),e.getValue()); })
+ .toArray(size -> new JSElement[size]); //or toArray(JSElement[]::new)
+ }
+
+// public JSElement[] getPolicies() {
+// }
+
+ }
+
+
+ /*
+ */
+ public class JSElement extends AbstractJSObject {
+
+
+ private String name;
+ private Map def;
+
+ private JSElement(String theName, Object theDef) {
+//System.out.println("JSObject " + theName + ":" + theDef);
+ this.name = theName;
+ this.def = theDef == null ? Collections.emptyMap()
+ : (theDef instanceof Map) ? (Map)theDef
+ : Collections.singletonMap("value",theDef);
+ }
+
+ public String getName() { return this.name; }
+
+ public boolean hasMember(String theMember) {
+// System.out.println("hasMember " + theMember);
+ return this.def.containsKey(theMember);
+ }
+
+ public Object getMember(final String theMember) {
+ Object val = this.def.get(theMember);
+// System.out.println("getMember " + theMember + ": " + val);
+ if (val != null) {
+ if (val instanceof Map) {
+ return new JSElement(theMember, val);
+ /*
+ return ((Map<String,?>)obj).entrySet()
+ .stream()
+ .map((Map.Entry<String,?> e) -> { return new JSElement(e.getKey(),e.getValue()); })
+ .toArray(size -> new JSElement[size]);
+ */
+ }
+
+ if (val instanceof List) {
+ //a property value can be a list of: primitive types or maps (for a user defined type)
+ //requirements are exposed as a list of maps
+ List lval = (List)val;
+ if (lval.get(0) instanceof Map) {
+ return lval
+ .stream()
+ .map((e) -> new JSElement(theMember, e))
+ .toArray(size -> new JSElement[size]);
+
+ /*
+ return val
+ .stream()
+ .map((e) -> {
+ System.out.println("Found in " + theMember + ": " + e);
+ Map.Entry<String,?> re = ((Map<String,?>)e).entrySet().iterator().next();
+ return new JSElement(re.getKey(), re.getValue());
+ })
+ .toArray(size -> new JSElement[size]);
+ */
+ }
+ }
+
+ return val;
+ }
+ else {
+ if ("name".equals(theMember))
+ return this.name;
+ if ("toString".equals(theMember))
+ return _toString;
+ if ("hasOwnProperty".equals(theMember))
+ return _hasOwnProperty;
+// System.out.println("Could not find " + theMember + " in " + this.def);
+ return super.getMember(theMember);
+ }
+ }
+ /* TODO: we do not expose 'name' in here */
+ public Set<String> keySet() {
+// System.out.println("keySet: " + this.def.keySet());
+ return this.def.keySet();
+ }
+
+/* having problem with getDefaultValue in Jenkins builds */
+/*
+ public Object getDefaultValue(Class<?> hint) {
+// System.out.println("getDefaultValue: " + hint);
+ if (String.class.equals(hint))
+ return this.def.toString();
+
+ return super.getDefaultValue(hint);
+ }
+*/
+
+ }
+
+
+ static final JSObject _toString =
+ new TracerJSObject("_toString") {
+ public Object call(Object thiz, Object... args) {
+ return ((JSElement)thiz).def.toString();
+ }
+
+ public boolean isFunction() { return true; }
+ };
+
+ static final JSObject _hasOwnProperty =
+ new TracerJSObject("_hasOwnProperty") {
+ public Object call(Object thiz, Object... args) {
+// System.out.println("_hasOwnProperty: " + args.length);
+ return ((JSElement)thiz).def.containsKey(args[0]);
+ }
+
+ public boolean isFunction() { return true; }
+ };
+
+ }//JSCatalog
+
+
+
+ private static class TracerJSObject extends AbstractJSObject {
+
+ private String mark;
+
+ TracerJSObject(String theMark) {
+ this.mark = theMark;
+ }
+
+ public Object call(Object thiz, Object... args) {
+ System.out.println(this.mark + ":call");
+ return super.call(thiz, args);
+ }
+
+ public Object newObject(Object... args) {
+ System.out.println(this.mark + ":newObject");
+ return super.newObject(args);
+ }
+
+ public Object eval(String s) {
+ System.out.println(this.mark + ":eval");
+ return super.eval(s);
+ }
+
+ public Object getMember(String name) {
+ System.out.println(this.mark + ":getMember");
+ return super.getMember(name);
+ }
+
+ public Object getSlot(int index) {
+ System.out.println(this.mark + ":getSlot");
+ return super.getSlot(index);
+ }
+
+ public boolean hasMember(String name) {
+ System.out.println(this.mark + ":hasMember");
+ return super.hasMember(name);
+ }
+
+ public boolean hasSlot(int slot) {
+ System.out.println(this.mark + ":hasSlot");
+ return super.hasSlot(slot);
+ }
+
+ public void removeMember(String name) {
+ System.out.println(this.mark + ":removeMember");
+ super.removeMember(name);
+ }
+
+ public void setMember(String name, Object value) {
+ System.out.println(this.mark + ":setMember");
+ super.setMember(name,value);
+ }
+
+ public void setSlot(int index, Object value) {
+ System.out.println(this.mark + ":setSlot");
+ super.setSlot(index,value);
+ }
+
+ public Set<String> keySet() {
+ System.out.println(this.mark + "JSObject:keySet");
+ return super.keySet();
+ }
+
+ public Collection<Object> values() {
+ System.out.println(this.mark + ":values");
+ return super.values();
+ }
+
+ public boolean isInstance(Object instance) {
+ System.out.println(this.mark + ":isInstance");
+ return super.isInstance(instance);
+ }
+
+ public boolean isInstanceOf(Object clazz) {
+ System.out.println(this.mark + ":isInstanceOf");
+ return super.isInstance(clazz);
+ }
+
+ public String getClassName() {
+ System.out.println(this.mark + ":getClassName");
+ return super.getClassName();
+ }
+
+ public boolean isFunction() {
+ System.out.println(this.mark + ":isFunction");
+ return super.isFunction();
+ }
+
+ public boolean isStrictFunction() {
+ System.out.println(this.mark + ":isStrictFunction");
+ return super.isStrictFunction();
+ }
+
+ public boolean isArray() {
+ System.out.println(this.mark + ":isArray");
+ return super.isArray();
+ }
+
+ public Object getDefaultValue(Class<?> hint) {
+ System.out.println(this.mark + ":getDefaultValue(" + hint + ")");
+ return super.getDefaultValue(hint);
+ }
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Process.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Process.java
new file mode 100644
index 0000000..865de06
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Process.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.processing;
+
+import java.util.function.Predicate;
+
+import org.onap.tosca.checker.Report;
+
+/**
+ *
+ */
+public interface Process<T extends Processor> {
+
+ public static final int PROCESS_SCOPE = 100;
+
+ /**
+ * the processor running this process
+ */
+ public T processor();
+
+ /* */
+ public boolean hasNext();
+
+ /* */
+ public Process runNext() throws ProcessorException;
+
+ /* execute all steps to completion
+ */
+ public Report run();
+
+ /* execution report
+ */
+ public Report report();
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessBuilder.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessBuilder.java
new file mode 100644
index 0000000..cb0b455
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessBuilder.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.processing;
+
+import org.onap.tosca.checker.Report;
+
+
+/**
+ * Just in case you might want to do something with a template (set) once it was checked
+ */
+public interface ProcessBuilder<T extends Processor> {
+
+ /* */
+ public ProcessBuilder<T> with(String theName, Object theValue);
+
+ /* */
+ public ProcessBuilder<T> withOpt(String theName, Object theValue);
+
+ /* */
+ public Process<T> process();
+
+ /* */
+ default public Report run() {
+ return process()
+ .run();
+ }
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Processor.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Processor.java
new file mode 100644
index 0000000..94904d2
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Processor.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.processing;
+
+import org.onap.tosca.checker.Catalog;
+
+
+/**
+ * Just in case you might want to do something with a template (set) once it was checked
+ */
+public interface Processor<T extends Processor<T>> {
+
+ /* */
+ public ProcessBuilder<T> process(Catalog theCatalog);
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessorException.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessorException.java
new file mode 100644
index 0000000..2d9cd22
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessorException.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.processing;
+
+import org.onap.tosca.checker.Target;
+import org.onap.tosca.checker.CheckerException;
+
+
+/**
+ */
+public class ProcessorException extends CheckerException {
+
+ private Target target;
+
+ public ProcessorException(Target theTarget, String theMsg, Throwable theCause) {
+ super(theMsg, theCause);
+ this.target = theTarget;
+ }
+
+ public ProcessorException(Target theTarget, String theMsg) {
+ super(theMsg);
+ this.target = theTarget;
+ }
+
+ public Target getTarget() {
+ return this.target;
+ }
+
+ @Override
+ public String getMessage() {
+ return this.target + ":" + super.getMessage() + (getCause() == null ? "" : ("(" + getCause() + ")"));
+ }
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/TargetInfo.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/TargetInfo.java
new file mode 100644
index 0000000..f330df5
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/TargetInfo.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+package org.onap.tosca.checker.processing;
+
+import java.util.Set;
+
+
+/**
+ * Exposes target properties. How they are obtained/calculated not of importance here.
+ */
+public interface TargetInfo {
+
+ /** */
+ public Set<String> entryNames();
+
+ /** */
+ public boolean hasEntry(String theName);
+
+ /** */
+ public Object getEntry(String theName);
+
+}
diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/package-info.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/package-info.java
new file mode 100644
index 0000000..7a2d62f
--- /dev/null
+++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/package-info.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2017 <AT&T>. All rights reserved.
+ * ===================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
+ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+ * the specific language governing permissions and limitations under the License.
+ */
+
+/**
+ * After checking and cataloging we assume some applications will do some sort of post-processing.
+ * While all such processing can be done in java by navigating catalog information this package focuses
+ * on processing based on different 'environments' through a so called 'processor'. The package offers one
+ * implementation, a Java Script processor that exposes Checker catalog information to java scripts executed
+ * through Nashorn, the Javs (8) built-in java script engine.
+ *
+ */
+
+package org.onap.tosca.checker.processing;