summaryrefslogtreecommitdiffstats
path: root/dcaedt_catalog/db
diff options
context:
space:
mode:
authorStone, Avi (as206k) <as206k@att.com>2018-04-12 15:46:31 +0300
committerStone, Avi (as206k) <as206k@att.com>2018-04-12 15:49:38 +0300
commit5032434b101f25fa44d2e1f8dc8393e30af1ed4f (patch)
tree2dc7d37a8048e025c7412af080640da4c9a22b65 /dcaedt_catalog/db
parent2205633792f95f46a02bbf8f87f0c2637265d924 (diff)
DCAE-D be initial commit
DCAE-D be initial commit Issue-ID: SDC-1218 Change-Id: Id18ba96c499e785aa9ac395fbaf32d57f08c281b Signed-off-by: Stone, Avi (as206k) <as206k@att.com>
Diffstat (limited to 'dcaedt_catalog/db')
-rw-r--r--dcaedt_catalog/db/pom.xml149
-rw-r--r--dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java1980
-rw-r--r--dcaedt_catalog/db/src/main/resources/tosca-schema.yaml1231
-rw-r--r--dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml37
4 files changed, 3397 insertions, 0 deletions
diff --git a/dcaedt_catalog/db/pom.xml b/dcaedt_catalog/db/pom.xml
new file mode 100644
index 0000000..8a0e1f9
--- /dev/null
+++ b/dcaedt_catalog/db/pom.xml
@@ -0,0 +1,149 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Catalog</artifactId>
+ <version>1806.0.1-SNAPSHOT</version>
+ </parent>
+ <artifactId>DCAE-DT-Catalog-DB</artifactId>
+ <packaging>jar</packaging>
+ <name>DCAE DT Catalog database</name>
+
+ <build>
+ <sourceDirectory>src/main/java</sourceDirectory>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>3.1</version>
+ <configuration>
+ <source>1.8</source>
+ <target>1.8</target>
+ <encoding>${project.build.sourceEncoding}</encoding>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.10</version>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/deps</outputDirectory>
+ <overWriteReleases>false</overWriteReleases>
+ <overWriteSnapshots>false</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>buildnumber-maven-plugin</artifactId>
+ <version>1.4</version>
+ <executions>
+ <execution>
+ <phase>validate</phase>
+ <goals>
+ <goal>create</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <doCheck>false</doCheck>
+ <doUpdate>false</doUpdate>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.1</version>
+ <configuration>
+ <archive>
+ <manifest>
+ <addDefaultImplementationEntries>true</addDefaultImplementationEntries>
+ </manifest>
+ <manifestEntries>
+ <Implementation-Build>${buildNumber}</Implementation-Build>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ </plugin>
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.6</version>
+ <configuration>
+ <descriptorRefs>
+ <descriptorRef>jar-with-dependencies</descriptorRef>
+ </descriptorRefs>
+ <archive>
+ <manifest>
+ <mainClass>org.onap.sdc.dcae.db.neo4j.Modeled</mainClass>
+ </manifest>
+ <manifestEntries>
+ <Implementation-Build>${buildNumber}</Implementation-Build>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ <executions>
+ <execution>
+ <id>make-assembly</id> <!-- this is used for inheritance merges -->
+ <phase>package</phase> <!-- bind to the packaging phase -->
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpasyncclient</artifactId>
+ <version>4.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>2.4</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>1.3</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-jxpath</groupId>
+ <artifactId>commons-jxpath</artifactId>
+ <version>1.3</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>17.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.yaml</groupId>
+ <artifactId>snakeyaml</artifactId>
+ <version>1.17</version>
+ </dependency>
+ <dependency>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ <version>20160212</version>
+ </dependency>
+ <dependency>
+ <groupId>com.github.wnameless</groupId>
+ <artifactId>json-flattener</artifactId>
+ <version>0.2.2</version>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java b/dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java
new file mode 100644
index 0000000..6b2f395
--- /dev/null
+++ b/dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java
@@ -0,0 +1,1980 @@
+/*
+ * AT&T - PROPRIETARY
+ * THIS FILE CONTAINS PROPRIETARY INFORMATION OF
+ * AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN
+ * ACCORDANCE WITH APPLICABLE AGREEMENTS.
+ *
+ * Copyright (c) 2014 AT&T Knowledge Ventures
+ * Unpublished and Not for Publication
+ * All Rights Reserved
+ */
+package org.onap.sdc.dcae.db.neo4j;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.List;
+import java.util.LinkedList;
+import java.util.Collections;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.codec.binary.Base64;
+
+import org.apache.commons.jxpath.JXPathContext;
+import org.apache.commons.jxpath.JXPathException;
+
+import org.apache.http.Header;
+import org.apache.http.HttpHeaders;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.JSONArray;
+
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.yaml.snakeyaml.Yaml;
+
+import com.google.common.collect.Table;
+import com.google.common.collect.HashBasedTable;
+
+/* A few less obvious design choices:
+ * - representing properties across type hierarchies (same for requirements
+ * and capabilities, and will be for attributes and interfaces when we'll
+ * add them): we attach to each type only those properties it declares (such a
+ * declaration might be the re-definition of a property defined by a supertype).
+ * Calculating the set of properties for a type (i.e. the one it declares plus
+ * the ones it inherits, with respect to re-defintions) is a 2 step process:
+ * 1. run a query matching all properties acrosss the type's hierarchy, from
+ * leaf to root type (neo's job)
+ * 2. collecting them in a set that accumulates them with respect to
+ * re-definition (model catalog client library job)
+ * A (viable) alternative would have been to calculate the entire property set
+ * at model import time and associate them it the type node. It would simplify
+ * the query and processing in the catalog API. It has the drawback of making
+ * the reverse process (exporting a yaml model from neo) tedious.
+ * As we get a better sense of were the optimizations are needed this might
+ * be a change to be made ..
+ *
+ *
+ * - representing requirements and capability as nodes. At first glance
+ * both can be represented as edges pointing from a Type Node or Template Node
+ * to another Type Node or Template Node. While this is true for capabilities
+ * it is not so for requirements: a requirement could point to a capability
+ * of a Type Node, i.e. it is a hyperedge between a Type Node (or Tempate Node), * another Type Node (the target) and a capability of the target. As such, the
+ * requirements ands up being represented as a node and the capability will need
+ * to do the same in order to be able to be pointed at (and for the sake of
+ * uniformity ..).
+ *
+ *
+ */
+public class Modeled {
+
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ private static HttpClientBuilder httpClientBuilder =
+ HttpClientBuilder.create();
+ private static String USAGE = "oil oil_stylesheet_path | bigdata | aws | awsdata input_file customer";
+
+ private static List<String> ignoreMissing = new LinkedList<String>();
+
+ static {
+ Collections.addAll(ignoreMissing,
+ "tosca.datatypes",
+ "tosca.capabilities",
+ "tosca.relationships",
+ "tosca.interfaces",
+ "tosca.nodes",
+ "tosca.artifacts",
+ "tosca.policies",
+ "tosca.groups");
+ }
+
+ public static void main(String[] theArgs) {
+
+ CommandLineParser parser = new BasicParser();
+
+ // create the Options
+ Options options = new Options();
+ options.addOption(OptionBuilder.
+ withArgName("target")
+ .withLongOpt("target")
+ .withDescription("target ice4j database uri")
+ .hasArg()
+ .isRequired()
+ .create('t'));
+
+ options.addOption(OptionBuilder.
+ withArgName("action")
+ .withLongOpt("action")
+ .withDescription("one of import, annotate, list, remove")
+ .hasArg()
+ .isRequired()
+ .create('a'));
+
+ options.addOption(
+ OptionBuilder.withArgName("input")
+ .withLongOpt("input")
+ .withDescription(
+ "for import/annotate: the tosca template file, " +
+ "for list: an optional json filter, " +
+ "for remove: the template id")
+ .hasArgs()
+ .create('i')).addOption(
+ OptionBuilder.withArgName("labels")
+ .withLongOpt("labels")
+ .withDescription(
+ "for annotate: the ':' sepatated list of annotation labels")
+ .hasArgs()
+ .create('l'));
+
+ options.addOption(OptionBuilder.
+ withArgName("ignore")
+ .withLongOpt("ignore")
+ .isRequired(false)
+ .withDescription(
+ "for annotate: the ':' sepatated list of namespaces who's missing constructs can be ignored")
+ .hasArgs()
+ .create());
+
+
+ CommandLine line;
+ try {
+ line = parser.parse(options, theArgs);
+ } catch (ParseException exp) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), exp.getMessage());
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("import", options);
+ return;
+ }
+
+ String ignores = line.getOptionValue("ignore");
+ if (ignores != null)
+ Collections.addAll(ignoreMissing, ignores.split(":"));
+
+ Modeled modeled = new Modeled();
+ try {
+ modeled.setNeoUri(new URI(line.getOptionValue("target")));
+ } catch (URISyntaxException urisx) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), "Invalid target specification: {}", urisx);
+ return;
+ }
+
+ try {
+ loadStorageSpec();
+
+ String action = line.getOptionValue("action");
+ if ("import".equals(action)) {
+ modeled.importTemplate(line.getOptionValue("input"));
+ } else if ("annotate".equals(action)) {
+ modeled.annotateItem(line.getOptionValue("input"), line.getOptionValue("labels"));
+ } else if ("list".equals(action)) {
+ modeled.listTemplates(line.getOptionValue("input"));
+ } else if ("remove".equals(action)) {
+ modeled.removeTemplate(line.getOptionValue("input"));
+ } else {
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("import", options);
+ }
+ } catch (Exception x) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), x.getMessage());
+ }
+ }
+
+ private static Tracker<String> tracker = new Tracker<String>();
+ private static Map toscaStorageSpec;
+
+ private static void loadStorageSpec() {
+ toscaStorageSpec = (Map) new Yaml().load(
+ Modeled.class.getClassLoader().getResourceAsStream("tosca-schema.yaml"));
+
+ Map storageSpec = (Map) new Yaml().load(
+ Modeled.class.getClassLoader().getResourceAsStream("tosca-storage-schema.yaml"));
+
+ JXPathContext jxPath = JXPathContext.newContext(toscaStorageSpec);
+ for (Iterator<Map.Entry<String, Object>> ces =
+ storageSpec.entrySet().iterator();
+ ces.hasNext(); ) {
+ Map.Entry<String, Object> ce = ces.next();
+ try {
+ Map m = (Map) jxPath.getValue(ce.getKey());
+ if (m == null) {
+ debugLogger.log(LogLevel.DEBUG, Modeled.class.getName(), "No schema entry '{}'", ce.getKey());
+ continue;
+ }
+
+ m.putAll((Map) ce.getValue());
+ } catch (JXPathException jxpx) {
+ errLogger.log(LogLevel.WARN, Modeled.class.getName(), "Failed to apply storage info {}", jxpx);
+ }
+ }
+ }
+
+
+ private static JSONObject EMPTY_JSON_OBJECT = new JSONObject();
+
+ private URI neoUri = null;
+
+ private Modeled() {
+ }
+
+ private void setNeoUri(URI theUri) {
+ this.neoUri = theUri;
+ }
+
+ public URI getNeoUri() {
+ return this.neoUri;
+ }
+
+ /* Experimental in nature. I was reluctant creating another node to represent
+ * the set of constraints as they're integral part of the property (or other
+ * artifact) they're related to. I was also looking for a representation
+ * that would easily be processable into a TOSCA abstraction in the
+ * Catalog API. So ... we pack all the constraints as a JSON string and store
+ * them as a single property of the TOSCA artifact they belog to.
+ * Highs: easily un-winds in an object
+ * Lows: can't write query selectors based on constraints values ..
+ //the TOSCA/yaml spec exposes constraints as a List .. where each
+ //entry is a Map .. why??
+ */
+ private static String yamlEncodeConstraints(List theConstraints) {
+ Map allConstraints = new HashMap();
+ for (Object c : theConstraints) {
+ allConstraints.putAll((Map) c);
+ //this would be the place to add dedicate processing of those
+ //constraints with 'special' values, i.e. in_range: dual scalar,
+ //valid_values: list
+ }
+ return JSONObject.valueToString(allConstraints);
+ }
+
+ /* TODO: attributes handling to be added, similar to properties.
+ */
+ private void yamlNodeProperties(String theNodeId,
+ Map<String, Object> theProperties,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ for (Map.Entry<String, Object> propertyEntry : theProperties.entrySet()) {
+ String propName = propertyEntry.getKey();
+ Object propObject = propertyEntry.getValue();
+
+ Map propValues;
+ if (propObject instanceof Map) {
+ propValues = (Map) propObject;
+ } else {
+ //valuation, not of interest here
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNode, unknown property representation {} for {}, node {}", propObject.getClass(), propObject, theNodeId);
+ continue;
+ }
+
+ String constraintsValue = null;
+ if (propValues.containsKey("constraints")) {
+ constraintsValue = yamlEncodeConstraints(
+ (List) propValues.get("constraints"));
+ }
+
+ String neoPropId = neoCreateNode(
+ theTrx, false,
+ new JSONObject()
+ .put("name", propName)
+ .put("type", propValues.getOrDefault("type", "string"))
+ .put("required", propValues.getOrDefault("required", Boolean.TRUE))
+ .putOpt("default", propValues.get("default"))
+ .putOpt("description", propValues.get("description"))
+ .putOpt("status", propValues.get("status"))
+ .putOpt("constraints", constraintsValue),
+ "TOSCA", "Property");
+
+ neoEdge(theTrx, false,
+ neoPropId,
+ theNodeId,
+ EMPTY_JSON_OBJECT,
+ "PROPERTY_OF");
+ }
+
+ }
+
+ private void yamlNodeTypeCapabilities(String theNodeId,
+ Map<String, Object> theCapabilities,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ for (Map.Entry<String, Object> capability : theCapabilities.entrySet()) {
+ String capabilityName = capability.getKey();
+ Object capabilityValue = capability.getValue();
+
+ String capabilityType = null,
+ capabilityDesc = null;
+ Map<String, Object> capabilitySpec = null;
+
+ if (capabilityValue instanceof String) {
+ //short notation was used, we get the name of a capability type
+ capabilityType = (String) capabilityValue;
+
+ capabilitySpec = Collections.singletonMap("type", capabilityType);
+ } else if (capabilityValue instanceof Map) {
+ //extended notation
+ capabilitySpec = (Map<String, Object>) capabilityValue;
+
+ capabilityType = (String) capabilitySpec.get("type");
+ //cannot be missing
+ if (capabilityType == null) {
+ //ERROR!!
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoNode, missing capability type in {} for node {}", capabilitySpec, theNodeId);
+ continue; //rollback ..
+ }
+ capabilityDesc = (String) capabilitySpec.get("description");
+ }
+
+ //
+ String anonCapabilityTypeId = null;
+ if (capabilitySpec.containsKey("properties")) {
+ //we need an anonymous capability type (augmentation)
+ //or they could be added to the 'Capabillity' node but anonymous
+ //types make processing more uniform
+ anonCapabilityTypeId =
+ yamlAnonymousType(capabilitySpec,
+ capabilityType,
+//not a very nice owner string as theNodeId is cryptic (we should use
+//node name but do not have it here ..
+ theNodeId + "#" + capabilityName,
+ true,
+ false,
+ theTrx);
+ }
+
+ JSONObject capabilityDef = new JSONObject()
+ .put("name", capabilityName)
+ .putOpt("description", capabilityDesc);
+ if (capabilitySpec != null) {
+ List occurrences = (List) capabilitySpec.get("occurrences");
+ if (occurrences != null) {
+ capabilityDef.put("occurrences", encodeRange(occurrences));
+ }
+ List valid_source_types = (List) capabilitySpec.get("valid_source_types");
+ if (valid_source_types != null) {
+ capabilityDef.put("validSourceTypes",
+ new JSONArray(valid_source_types));
+ }
+ }
+
+ String capabilityId = neoCreateNode(
+ theTrx, false,
+ capabilityDef,
+ "TOSCA", "Capability");
+ neoEdge(theTrx, false,
+ capabilityId,
+ theNodeId,
+ EMPTY_JSON_OBJECT,
+ "CAPABILITY_OF");
+
+ if (anonCapabilityTypeId != null) {
+ neoEdge(theTrx, false,
+ capabilityId,
+ anonCapabilityTypeId,
+ new JSONObject()
+ .put("name", capabilityName)
+ .putOpt("description", capabilityDesc),
+ "FEATURES"/* TARGETS */);
+ //no reason this one would point to a non-existing capability as we just created one
+ } else {
+ if (null == neoEdge(theTrx, false,
+ capabilityId,
+ "Type",
+ new JSONObject()
+ .put("name", capabilityType),
+ new JSONObject()
+ .put("name", capabilityName)
+ .putOpt("description", capabilityDesc),
+ "FEATURES"/* TARGETS */)) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeCapabilities, Node {}, capability {} (id: {}) seems to point to invalid capability type: {}", theNodeId, capabilityName, capabilityId, capabilityType);
+ ignoreMissing(capabilityType);
+ }
+ }
+
+ }
+
+ }
+
+ private void yamlNodeTypeRequirements(
+ String theNodeTypeId,
+ List<Map<String, Object>> theRequirements,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ for (Map<String, Object> arequirement : theRequirements) {
+ //supposed to have only one entry
+ Map.Entry<String, Object> requirement =
+ arequirement.entrySet().iterator().next();
+
+ String requirementName = requirement.getKey();
+ Object requirementValue = requirement.getValue();
+
+ String targetNode = null,
+ targetCapability = null,
+ targetRelationship = null;
+ Map<String, Object> requirementSpec = null;
+
+ if (requirementValue instanceof String) {
+ //short form, points to a capability type
+ targetCapability = (String) requirementValue;
+ } else if (requirementValue instanceof Map) {
+ //extended notation
+ requirementSpec = (Map<String, Object>) requirementValue;
+
+ targetCapability = (String) requirementSpec.get("capability");
+ targetNode = (String) requirementSpec.get("node");
+ //this assumes a short form for the relationship specification
+ //it can actually be a map (indicating the relationship type and the
+ //additional interface definitions).
+ targetRelationship = (String) requirementSpec.get("relationship");
+ }
+
+ if (targetCapability == null) {
+ throw new IOException(theNodeTypeId + "missing capability type");
+ }
+
+ JSONObject requirementDef = new JSONObject()
+ .put("name", requirementName);
+ if (requirementSpec != null) {
+ List occurrences = (List) requirementSpec.get("occurrences");
+ if (occurrences != null) {
+ requirementDef.put("occurrences", encodeRange(occurrences));
+ }
+ }
+
+ String requirementId = neoCreateNode(
+ requirementDef,
+ "TOSCA", "Requirement");
+ neoEdge(theTrx, false,
+ requirementId,
+ theNodeTypeId,
+ EMPTY_JSON_OBJECT,
+ "REQUIREMENT_OF");
+
+ //we're not verifying here that this a capability type .. just a type
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetCapability),
+ EMPTY_JSON_OBJECT,
+ "CAPABILITY")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid capability type: {}", theNodeTypeId, requirementName, requirementId, targetCapability);
+ }
+
+ if (targetNode != null) {
+ //points to a node type
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetNode),
+ EMPTY_JSON_OBJECT,
+ "REQUIRES")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid capability type: {}", theNodeTypeId, requirementName, requirementId, targetCapability);
+ }
+ }
+
+ if (targetRelationship != null) {
+ //points to a relationship type
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetRelationship),
+ EMPTY_JSON_OBJECT,
+ "RELATIONSHIP")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid relationship type: {}", theNodeTypeId, requirementName, requirementId, targetRelationship);
+ }
+ }
+ }
+ }
+
+ /*
+ * handles the requirement assignments
+ */
+ private void toscaRequirementsAssignment(
+ String theNodeId,
+ List<Map<String, Object>> theRequirements,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ for (Map<String, Object> arequirement : theRequirements) {
+ //supposed to have only one entry
+ Map.Entry<String, Object> requirement =
+ arequirement.entrySet().iterator().next();
+
+ String requirementName = requirement.getKey();
+ Object requirementValue = requirement.getValue();
+
+ String targetNode = null,
+ targetCapability = null,
+ targetRelationship = null;
+ //TODO: targetFilter
+
+ Map<String, Object> requirementSpec = null;
+
+ if (requirementValue instanceof String) {
+ //short notation was used, we get the name of a local node
+ targetNode = (String) requirementValue;
+ } else if (requirementValue instanceof Map) {
+ //extended notation
+ requirementSpec = (Map<String, Object>) requirementValue;
+
+ targetNode = (String) requirementSpec.get("node");
+ targetCapability = (String) requirementSpec.get("capability");
+ targetRelationship = (String) requirementSpec.get("relationship");
+ }
+
+ /* TODO: add targetFilter definition in here (most likely place)
+ */
+ String requirementId = neoCreateNode(
+ theTrx, false,
+ new JSONObject()
+ .put("name", requirementName),
+ "TOSCA", "Requirement");
+
+ neoEdge(theTrx, false,
+ requirementId,
+ theNodeId,
+ EMPTY_JSON_OBJECT,
+ "REQUIREMENT_OF");
+
+ String targetNodeTemplate = null;
+ if (targetNode != null) {
+ //check if the target is a node within the template (in which case the
+ //requirement is really defined by that node type. i.e. its type's
+ //capabilities
+ targetNodeTemplate = tracker.lookupTemplate("Node", targetNode);
+ if (targetNodeTemplate != null) {
+ neoEdge(theTrx, false,
+ requirementId,
+ targetNodeTemplate,
+ new JSONObject()
+ .put("name", requirementName),
+ "REQUIRES" /* TARGETS */);
+ } else {
+ //if not a local node template then it must be node type
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetNode),
+ EMPTY_JSON_OBJECT,
+ "REQUIRES")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid node type: {}", theNodeId, requirementName, requirementId, targetNode);
+ }
+ }
+ }
+
+ if (targetCapability != null) {
+ /*
+ * Can point to a capability of the targetNode (template or type,
+ * whatever was specified) or to a capability type;
+ */
+ if (targetNode != null) {
+ String stmt = null;
+ if (targetNodeTemplate != null) {
+ //a capability of a local node template
+ //TODO: could be a capability type of a local node (and is up to the
+ //orchestrator to pick) given that the target node has at least one //capability of that type
+ stmt =
+ "MATCH (c:Capability)-[:CAPABILITY_OF]->(n:Node), (r:Requirement) " +
+ "WHERE id(n)=" + targetNodeTemplate + " " +
+ "AND c.name = \"" + targetCapability + "\" " +
+ "AND id(r)=" + requirementId + " " +
+ "MERGE (r)-[rq:REQUIRES_CAPABILITY]->(c) " +
+ "RETURN id(rq)";
+ } else {
+ //a capability of the node type
+ stmt =
+ "MATCH (c:Type:Capability)-[:CAPABILITY_OF]->(t:Type), (r:Requirement) " +
+ "WHERE t.name = \"" + targetNode + "\" " +
+ "AND c.name = \"" + targetCapability + "\" " +
+ "AND id(r)=" + requirementId + " " +
+ "MERGE (r)-[rq:REQUIRES_CAPABILITY]->(c) " +
+ "RETURN id(rq)";
+ }
+ if (null == neoId(theTrx
+ .statement(
+ new JSONObject()
+ .put("statement", stmt))
+ .execute()
+ .result())) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaRequirementsAssignment, Node {}, requirement {} (id: {}) seems to point to invalid node capability: {}", theNodeId, requirementName, requirementId, targetCapability);
+ }
+ } else {
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetCapability),
+ EMPTY_JSON_OBJECT,
+ "REQUIRES_CAPABILITY")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaRequirementsAssignment, Node {}, requirement {} (id: {}) seems to point to invalid capability type: {}", theNodeId, requirementName, requirementId, targetCapability);
+ }
+ }
+ }
+
+ if (targetRelationship != null) {
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetRelationship),
+ EMPTY_JSON_OBJECT,
+ "RELATIONSHIP")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaRequirementsAssignment, Node {}, requirement {} (id: {}) seems to point to invalid relationship type: {}", theNodeId, requirementName, requirementId, targetRelationship);
+ }
+ } else {
+ //TODO: does the presence of properties/attributes/interfaces in the
+ //requirement definition trigger the defintion of an anonymous
+ //relationship type?? (maybe derived from the one under the
+ //'relationship_type' key, if present?)
+ }
+ }
+ }
+
+ /* an anonymous type is created from a node specification (type,template)
+ */
+ private String yamlAnonymousType(Map<String, Object> theInfo,
+ String theType,
+ String theOwner,
+ boolean doProperties,
+ boolean doCapabilities,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ //is this naming scheme capable enough??NO!
+ String anonTypeId = theOwner + "#" + (theType == null ? "" : theType);
+
+ String neoAnonTypeId = neoMergeNode(
+ theTrx, false,
+ new JSONObject()
+ .put("name", anonTypeId)
+ .put("id", anonTypeId),
+ "TOSCA", "Type");
+
+ if (theType != null) {
+ neoEdge(theTrx, false,
+ neoAnonTypeId,
+ "Type",
+ new JSONObject()
+ .put("name", theType),
+ EMPTY_JSON_OBJECT,
+ "DERIVED_FROM");
+ }
+
+ //shoudl the properties spec be passed explcitly??
+ if (doProperties) {
+ Map<String, Object> props = (Map<String, Object>) theInfo.get("properties");
+ if (props != null) {
+ yamlNodeProperties(neoAnonTypeId, props, theTrx);
+ }
+ }
+
+ return neoAnonTypeId;
+ }
+
+ /*
+ * A first pass over a type spec provisions each type individually
+ * and its properties.
+ * We process here types for all constructs: data, capability, relationship,
+ * node, [interface, artifact]
+ */
+ private void toscaTypeSpec(String theConstruct,
+ Map<String, Map> theTypes,
+ NeoTransaction theTrx)
+ throws IOException {
+ //first pass, provision each type individually (and their properties)
+ String rule = "_" + theConstruct.toLowerCase() + "_type_definition";
+ Map storageSpec = (Map) toscaStorageSpec.get(rule);
+
+ for (Map.Entry<String, Map> toscaType : theTypes.entrySet()) {
+ String typeName = toscaType.getKey();
+ Map<String, Map> typeValue = (Map<String, Map>) toscaType.getValue();
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Type: {}", typeName);
+
+ JSONObject data = pack(storageSpec, typeValue)
+ .put("name", typeName)
+ .put("id", typeName);
+
+ String neoTypeId = neoMergeNode(theTrx, false, data, "TOSCA", "Type", theConstruct);
+
+ tracker.trackType(theConstruct, typeName, neoTypeId);
+
+ Map<String, Object> toscaTypeProps = (Map<String, Object>) typeValue.get("properties");
+ if (toscaTypeProps != null) {
+ yamlNodeProperties(neoTypeId, toscaTypeProps, theTrx);
+ } //type props
+ } //types
+
+ toscaTypePostProc(theConstruct, theTypes, theTrx);
+ }
+
+ /*
+ * A second pass to process the derived_from relationship and
+ * the capabilities (now that the capabilities types have been provisioned)
+ */
+ private void toscaTypePostProc(String theConstruct,
+ Map<String, Map> theTypes,
+ NeoTransaction theTrx)
+ throws IOException {
+ for (Map.Entry<String, Map> typeEntry : theTypes.entrySet()) {
+ Map typeValue = typeEntry.getValue();
+ String typeName = typeEntry.getKey();
+
+ //supertype and description: all types
+ String superTypeName = (String) typeValue.get("derived_from");
+ if (superTypeName != null) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}-DERIVED_FROM->{}", typeName, superTypeName);
+
+ if (tracker.tracksType(theConstruct, superTypeName)) {
+ if (null == neoEdge(theTrx, false,
+ tracker.lookupType(theConstruct, typeName),
+ tracker.lookupType(theConstruct, superTypeName),
+ EMPTY_JSON_OBJECT,
+ "DERIVED_FROM")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, missing parent type {}, id {} for type {}, id {}", superTypeName, tracker.lookupType(theConstruct, superTypeName), typeName, tracker.lookupType(theConstruct, typeName));
+ }
+ } else {
+ if (null == neoEdge(theTrx, false,
+ tracker.lookupType(theConstruct, typeName),
+ "Type",
+ new JSONObject()
+ .put("name", superTypeName),
+ new JSONObject(),
+ "DERIVED_FROM")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, missing parent type {} for type {}", superTypeName, typeName);
+ }
+ }
+ }
+
+ //requirements/capabilities: for node types
+ Map<String, Object> capabilities =
+ (Map<String, Object>) typeValue.get("capabilities");
+ if (capabilities != null) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Processing: {}", capabilities);
+ yamlNodeTypeCapabilities(
+ tracker.lookupType(theConstruct, typeName), capabilities, theTrx);
+ }
+
+ List<Map<String, Object>> requirements =
+ (List<Map<String, Object>>) typeValue.get("requirements");
+ if (requirements != null) {
+ yamlNodeTypeRequirements(
+ tracker.lookupType(theConstruct, typeName), requirements, theTrx);
+ }
+
+ //interfaces: for node types or relationship types
+ Object interfaces = typeValue.get("interfaces");
+ if (interfaces != null) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, Type {}: interfaces section declared but not handled", typeName);
+ if (interfaces instanceof List) {
+ //expect a list of interface types
+ }
+ }
+
+ //valid targets: for relationship types
+ List valid_targets = (List) typeValue.get("valid_targets");
+ if (valid_targets != null) {
+ //add as a property to the type node, can be used for validation
+ //whereever this type is used
+ //the list should contain node type names and we should check that we
+ //have those types
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, Type {}: valid_targets section declared but not handled", typeName);
+
+ }
+
+ List artifacts = (List) typeValue.get("artifacts");
+ if (artifacts != null) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, Type {}: artifacts section declared but not handled", typeName);
+ }
+
+ /* Artifact types can have "mime_type" and "file_ext" sections
+ */
+ }
+ }
+
+ private void toscaTemplate(String theTopologyTemplateId,
+ String theConstruct,
+ Map<String, Object> theTemplates,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ String rule = "_" + theConstruct.toLowerCase() + "_template_definition";
+ Map storageSpec = (Map) toscaStorageSpec.get(rule);
+ if (storageSpec == null) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No rule '{}', can't make up the storage specification for {}", rule, theConstruct);
+ }
+
+ for (Map.Entry<String, Object> template : theTemplates.entrySet()) {
+
+ String templateName = template.getKey();
+ Map<String, Object> templateSpec = (Map<String, Object>) template.getValue();
+
+ String templateType = (String) templateSpec.get("type");
+ if (templateType == null) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoNode, template {}'{}', does not have a type specification .. skipping", theConstruct, templateName);
+ continue;
+ }
+
+ try {
+ //we use create here as node names are not unique across templates
+ JSONObject neoTemplateNode =
+ pack(storageSpec, templateSpec)
+ .put("name", templateName);
+
+ String templateNodeId = neoCreateNode(
+ theTrx, false, neoTemplateNode, "TOSCA", theConstruct);
+
+ tracker.trackTemplate(theConstruct, templateName, templateNodeId);
+
+ neoEdge(theTrx, false,
+ templateNodeId,
+ theTopologyTemplateId,
+ new JSONObject(),
+ theConstruct.toUpperCase() + "_OF");
+
+ if (null == neoEdge(theTrx, false,
+ templateNodeId,
+ "Type",
+ new JSONObject()
+ .put("name", templateType),
+ new JSONObject(),
+ "OF_TYPE")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlSpec, Template {}, {} {}: failed to identify type {}", theTopologyTemplateId, theConstruct, templateName, templateType);
+ }
+
+ //facets
+
+ //we handle properties for all constructs (as they all have them)
+ Map<String, Object> templateProps =
+ (Map<String, Object>) templateSpec.get("properties");
+ if (templateProps != null) {
+ for (Map.Entry<String, Object> templateProp :
+ templateProps.entrySet()) {
+ String templatePropName = templateProp.getKey();
+ Object templatePropObject = templateProp.getValue();
+
+ final Map templatePropValues;
+ if (templatePropObject instanceof Map) {
+ templatePropValues = (Map) templatePropObject;
+ } else {
+
+ //this is dealing with short form, if we ran the first 2 stages of the checker //we'd always be working on a canonical form ..
+ //
+ templatePropValues = new HashMap();
+ templatePropValues.put("value", templatePropObject);
+ }
+
+ //a node will contain the means for property valuation:
+ //straight value or a call to get_input/get_property/get_attribute
+
+ //find the property node (in the type) this valuation belongs to
+ if (templatePropValues != null) {
+
+ String propertyId =
+ neoId(
+ theTrx.statement(
+ new JSONObject()
+ .put("statement",
+ "MATCH (t:Type)-[:DERIVED_FROM*0..5]->(:Type)<-[:PROPERTY_OF]-(p:Property) " +
+ "WHERE t.name='" + templateType + "' " +
+ "AND p.name='" + templatePropName + "' " +
+ "RETURN id(p)"))
+ .execute()
+ .result()
+ );
+
+ if (propertyId == null) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlSpec, Template {}, {} template {}, property {} does not match the node type spec, skipping property", templateName, theConstruct, templateName, templatePropName);
+ continue;
+ }
+
+ //remove valuation by function: for now handle only get_input
+ String propInput = (String) templatePropValues.remove("get_input");
+
+ List constraints = (List) templatePropValues.remove("constraints");
+ if (constraints != null) {
+ //flattening
+ templatePropValues.put("constraints",
+ yamlEncodeConstraints(constraints));
+ }
+
+ Object val = templatePropValues.remove("value");
+ //check if the value is a collection or user defined data type, the cheap way
+ if (val instanceof List ||
+ val instanceof Map) {
+ /* An interesting option here:
+ * 1. store the whole flatten value under the 'value' property
+ templatePropValues.put("value", JsonFlattener.flatten(JsonObject.valueToString(val)));
+ Simpler but almost impossible to write queries based on property value
+ * 2. store each entry in the flatten map as a separate property (we prefix it with 'value' for
+ * clarity).
+ * see below
+ */
+ /*
+ JsonFlattener.flattenAsMap(JSONObject.valueToString(Collections.singletonMap("value",val)))
+ .entrySet()
+ .stream()
+ .forEach(e -> templatePropValues.put(e.getKey(), e.getValue()));
+ */
+ //simply stores a collection in its (json) string representation. Cannot be used if
+ //queries are necessary based on the value (on one of its elements).
+ templatePropValues.put("value", JSONObject.valueToString(val));
+ } else {
+ /* scalar, store as such */
+ templatePropValues.put("value", val);
+ }
+
+ String templatePropValueId =
+ neoCreateNode(
+ theTrx, false,
+ new JSONObject(templatePropValues),
+ "TOSCA", /*"Property",*/ "Assignment");
+
+ neoEdge(theTrx, false,
+ templatePropValueId,
+ templateNodeId,
+ new JSONObject(),
+ "OF_TEMPLATE");
+
+ neoEdge(theTrx, false,
+ templatePropValueId,
+ propertyId,
+ new JSONObject(),
+ "OF_" + theConstruct.toUpperCase() + "_PROPERTY");
+
+ if (propInput != null) {
+ String inputId = tracker.lookupTemplate("Input", propInput);
+ if (inputId == null) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoNode, Template {},node {}, property {} input {} not found", theTopologyTemplateId, templateName, templatePropName, propInput);
+ }
+
+ neoEdge(theTrx, false,
+ templatePropValueId,
+ inputId,
+ new JSONObject(),
+ "GET_INPUT");
+ }
+ }
+ }
+ }
+ tracker.trackTemplate(theConstruct, templateName, templateNodeId);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} template {} of type {}", theConstruct, templateName, templateType);
+ } catch (IOException iox) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaTemplate, Failed to persist template {}", iox);
+ throw iox;
+ }
+ }
+ }
+
+ /* while we persist basic type values inline (in the assigment node) we store complex values
+ * in a graph of their own.
+ * We handle the neo4j 'limitation' stated below
+ * Neo4j can only store collections (map, list) of basic types.
+ *
+ * User defined data types can created undefinitely nested strctures of collections.
+ * We could store collections of basic types inline but it would make for a less uniform structure.
+ */
+ private void toscaPropertyAssignment(
+ String theAssignmentId,
+ Object theValue,
+ NeoTransaction theTrx)
+ throws IOException {
+ //look the grammar rules to see if we inline (stringify) or not
+
+ if (theValue instanceof Map) {
+ //a map type property or a user-defined datatype
+ Map<String, Object> elements = (Map<String, Object>) theValue;
+ for (Map.Entry element : elements.entrySet()) {
+
+ String elementId = neoCreateNode(theTrx, false,
+ new JSONObject().
+ put("name", element.getKey()),
+ "TOSCA", "Data", "Element");
+
+ neoEdge(theTrx, false,
+ elementId,
+ theAssignmentId,
+ EMPTY_JSON_OBJECT,
+ "ELEMENT_OF");
+
+ toscaPropertyAssignment(elementId, element.getValue(), theTrx);
+ }
+ } else if (theValue instanceof List) {
+ //a list type property
+ for (int i = 0; i < ((List) theValue).size(); i++) {
+
+ String elementId = neoCreateNode(theTrx, false,
+ new JSONObject().
+ put("pos", i),
+ "TOSCA", "Data", "Element");
+
+ neoEdge(theTrx, false,
+ elementId,
+ theAssignmentId,
+ EMPTY_JSON_OBJECT,
+ "ELEMENT_OF");
+
+ toscaPropertyAssignment(elementId, ((List) theValue).get(i), theTrx);
+ }
+
+ //update theAssignment with a length property
+ neoNodeProperties(theTrx, false, theAssignmentId,
+ new JSONObject().
+ put("length", ((List) theValue).size()));
+ } else {
+ //update the assignment with a 'value' attribute
+ neoNodeProperties(theTrx, false, theAssignmentId,
+ new JSONObject().
+ put("value", theValue));
+ }
+ }
+
+ /*
+ * We only handle properties for now so we assume these are properties
+ * assignments
+ */
+ private void toscaCapabilityAssignment(
+ String theNodeTemplateId,
+ String theCapabilityName,
+ Map<String, Object> theValuations,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ for (Map.Entry<String, Object> valuation : theValuations.entrySet()) {
+ String propertyName = valuation.getKey();
+ Object propertyValueSpec = valuation.getValue();
+
+ Map propertyValue = null;
+ if (propertyValueSpec instanceof Map) {
+ propertyValue = (Map) propertyValueSpec;
+ } else {
+ //this is dealing with short form, if we ran the first 2 stages of
+ //the checker we'd always be working on a canonical form ..
+ propertyValue = new HashMap();
+ propertyValue.put("value", propertyValueSpec);
+ }
+
+ //we need to link the assignment to the node template, the capability
+ //and the property of the capability type (a node can have multiple
+ //capabilities of the same type).
+ String[] ids =
+ neoIds(
+ theTrx.statement(
+ new JSONObject()
+ .put("statement",
+ "MATCH (n:Node)-[:OF_TYPE]->(:Node:Type)<-[:CAPABILITY_OF]-(c:Capability)-[:FEATURES]->(:Capability:Type)-[:DERIVED_FROM*0..5]->(:Capability:Type)<-[:PROPERTY_OF]-(p:Property) " +
+ "WHERE id(n) = " + theNodeTemplateId + " " +
+ "AND c.name = '" + theCapabilityName + "' " +
+ "AND p.name = '" + propertyName + "' " +
+ "RETURN id(p), id(c)"))
+ .execute()
+ .result());
+
+ if (ids == null) {
+ throw new IOException("toscaCapabilityAssignment: " +
+ "node template " + theNodeTemplateId + ", " +
+ "capability " + theCapabilityName + ", " +
+ "property " + propertyName +
+ " does not match the node type spec");
+ }
+
+ /* this node represents the assignment of a value to a capability property
+ * hence my doubts about hoe to label it ['Assignment', 'Property'] or ['Assignment','Capability']
+ * I am inclined towards the second option as there is no other capability assignment in itself.
+ */
+ String assignmentId =
+ neoCreateNode(
+ theTrx, false,
+ new JSONObject(propertyValue),
+ "TOSCA", /*Capability,*/"Assignment");
+
+ neoEdge(theTrx, false,
+ assignmentId,
+ theNodeTemplateId,
+ new JSONObject(),
+ "OF_TEMPLATE");
+
+ neoEdge(theTrx, false,
+ assignmentId,
+ ids[1],
+ new JSONObject(),
+ "OF_CAPABILITY");
+
+ neoEdge(theTrx, false,
+ assignmentId,
+ ids[0],
+ new JSONObject(),
+ "OF_CAPABILITY_PROPERTY");
+ }
+ }
+
+ /*
+ *
+ * */
+ private void importTemplate(String thePath) throws IOException {
+ try (FileInputStream input = new FileInputStream(thePath)){
+ for (Object yaml : new Yaml().loadAll(input)) {
+ toscaSpec((Map) yaml);
+ }
+ }
+ }
+
+ private void toscaSpec(Map theSpec) throws IOException {
+
+ // type specifications
+ // at this time we do not record the relation between a type and the
+ // template it was defined in.
+
+ NeoTransaction trx = new NeoTransaction(this.neoUri);
+ try {
+ {
+ Map<String, Map> types = (Map<String, Map>) theSpec.get("data_types");
+ if (types != null) {
+ toscaTypeSpec("Data", types, trx);
+ }
+
+ types = (Map<String, Map>) theSpec.get("capability_types");
+ if (types != null) {
+ toscaTypeSpec("Capability", types, trx);
+ }
+
+ types = (Map<String, Map>) theSpec.get("relationship_types");
+ if (types != null) {
+ toscaTypeSpec("Relationship", types, trx);
+ }
+
+ types = (Map<String, Map>) theSpec.get("node_types");
+ if (types != null) {
+ toscaTypeSpec("Node", types, trx);
+ }
+
+ types = (Map<String, Map>) theSpec.get("policy_types");
+ if (types != null) {
+ toscaTypeSpec("Policy", types, trx);
+ }
+ }
+
+ Map<String, Map> topologyTemplate = (Map<String, Map>)
+ theSpec.get("topology_template");
+ if (topologyTemplate != null) {
+
+ Map<String, Object> metadata = (Map<String, Object>) theSpec.get("metadata");
+ if (metadata == null) {
+ throw new IOException("Missing metadata, cannot register template");
+ }
+ String templateName = (String) metadata.get("template_name");
+ String templateId = neoMergeNode(
+ trx, false,
+ new JSONObject()
+ .put("name", templateName)
+ .putOpt("description", (String) theSpec.get("description"))
+ .putOpt("version", (String) metadata.get("template_version"))
+ .putOpt("author", (String) metadata.get("template_author"))
+ .putOpt("scope", (String) metadata.get("scope")),
+ "TOSCA", "Template");
+
+ /* inputs */
+ Map<String, Map> toscaInputs = (Map) topologyTemplate.get("inputs");
+ if (toscaInputs != null) {
+ for (Map.Entry<String, Map> toscaInput : toscaInputs.entrySet()) {
+ //we use create here as input names are not unique across templates
+ //also, constraints require special encoding
+ Map toscaInputSpec = toscaInput.getValue();
+
+ List constraints = (List) toscaInputSpec.remove("constraints");
+ if (constraints != null) {
+ //flattening
+ toscaInputSpec.put("constraints",
+ yamlEncodeConstraints(constraints));
+ }
+ String neoInputNodeId =
+ neoCreateNode(
+ trx, false,
+ new JSONObject(toscaInputSpec)
+ .put("name", toscaInput.getKey())
+ .putOpt("type", toscaInputSpec.get("type")),
+ "TOSCA", "Input");
+
+ tracker.trackTemplate(
+ "Input", (String) toscaInput.getKey(), neoInputNodeId);
+
+ neoEdge(trx, false,
+ neoInputNodeId,
+ templateId,
+ new JSONObject(),
+ "INPUT_OF");
+ }
+ }
+
+ /*
+ * The main issue that I have here is with the defintion given to each
+ * section (properties, capabilities, requirements ..) of a Node template:
+ * they are said to 'augment' the information provided in its Node Type but
+ * without specifying the semantics of 'augment'. Can new properties be
+ * added? can interface specification contain new operations?
+ */
+ Map<String, Object> toscaNodes = (Map) topologyTemplate.get("node_templates");
+ if (toscaNodes != null) {
+ toscaTemplate(templateId, "Node", toscaNodes, trx);
+
+ //now that all nodes are in we need a second path over the nodes set in
+ //order to handle the capabilities, requirements ..
+
+ for (Map.Entry<String, Object> toscaNode : toscaNodes.entrySet()) {
+
+ String toscaNodeName = toscaNode.getKey();
+ Map<String, Object> toscaNodeValues = (Map<String, Object>) toscaNode.getValue();
+
+ Map<String, Map> capabilities =
+ (Map<String, Map>) toscaNodeValues.get("capabilities");
+ if (capabilities != null) {
+ for (Map.Entry<String, Map> capability : capabilities.entrySet()) {
+ Map<String, Map> assignments = (Map<String, Map>) capability.getValue();
+ Map<String, Object> propertiesAssignments =
+ assignments.get("properties");
+ if (propertiesAssignments != null) {
+ toscaCapabilityAssignment(
+ tracker.lookupTemplate("Node", toscaNodeName),
+ capability.getKey(),
+ propertiesAssignments,
+ trx);
+ }
+ }
+ }
+
+ List<Map<String, Object>> requirements = (List<Map<String, Object>>)
+ toscaNodeValues.get("requirements");
+ if (requirements != null) {
+ toscaRequirementsAssignment(
+ tracker.lookupTemplate("Node", toscaNodeName), requirements, trx);
+ }
+
+ //interfaces
+ }
+ }
+
+ List toscaPolicies = (List) topologyTemplate.get("policies");
+ if (toscaPolicies != null) {
+ for (Object toscaPolicy : toscaPolicies) {
+ toscaTemplate(templateId, "Policy", (Map<String, Object>) toscaPolicy, trx);
+ }
+ }
+
+ Map<String, Map> toscaOutputs = (Map) topologyTemplate.get("outputs");
+ if (toscaOutputs != null) {
+ for (Map.Entry<String, Map> toscaOutput : toscaOutputs.entrySet()) {
+ Object outputValue = toscaOutput.getValue().get("value");
+ if (outputValue instanceof Map) { //shouldn't I be doing this in all cases??
+ outputValue = JSONObject.valueToString((Map) outputValue);
+ }
+
+ String neoOutputNodeId = neoCreateNode(
+ trx, false,
+ new JSONObject()
+ .put("name", (String) toscaOutput.getKey())
+ .putOpt("description", (String) toscaOutput.getValue().get("description"))
+ .put("value", outputValue.toString()),
+ "TOSCA", "Output");
+
+ neoEdge(trx, false,
+ neoOutputNodeId,
+ templateId,
+ new JSONObject(),
+ "OUTPUT_OF");
+ }
+ }
+
+ //if this is a service template look for its type mapping specification
+ Map<String, Object> substitutionSpec =
+ (Map<String, Object>) theSpec.get("substitution_mappings");
+ if (substitutionSpec != null) {
+
+ String nodeType = (String) substitutionSpec.get("node_type");
+ if (nodeType != null) {
+ neoEdge(trx, false,
+ templateId,
+ "Type",
+ new JSONObject()
+ .put("name", nodeType),
+ new JSONObject(),
+ "SUBSTITUTES");
+ } else {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoProc, Template {} substitution_mapping is missing a node_type in spec: {}", templateName, substitutionSpec);
+ }
+
+ //process the rest of the mapping definition
+ } else {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoProc, Template {} does not have a substitution mapping", templateName);
+ }
+
+ //try to connect template to catalog item if information was provided
+ //
+ String catalogItemSelector = (String) metadata.get("asc_catalog");
+ if (catalogItemSelector != null) {
+ if (null == neoEdge(trx, false,
+ templateId,
+ "CatalogItem",
+ new JSONObject(catalogItemSelector),
+ new JSONObject(),
+ "MODEL_OF")) {
+ throw new IOException("No such catalog item: " + catalogItemSelector);
+ }
+ }
+ }
+ trx.commit();
+ } catch (IOException iox) {
+ try {
+ trx.rollback();
+ } catch (IOException riox) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), riox.getMessage());
+ }
+ throw iox;
+ }
+ }
+
+ private void annotateItem(String thePath, String theLabels) throws IOException {
+
+ if (theLabels == null) {
+ throw new IOException("Labels ??");
+ }
+
+ try (FileInputStream input = new FileInputStream(thePath)){
+ for (Object yaml : new Yaml().loadAll(input)) {
+ annotateItem((Map) yaml, theLabels);
+ }
+ }
+ }
+
+ private void annotateItem(Map theSpec, String theLabels) throws IOException {
+
+ Map<String, Object> metadata = (Map<String, Object>) theSpec.get("metadata");
+ if (metadata == null) {
+ throw new IOException("Missing metadata, cannot register template");
+ }
+
+ String catalogItemSelector = (String) metadata.remove("asc_catalog");
+ if (catalogItemSelector == null) {
+ throw new IOException("Missing item selector");
+ }
+
+ JSONObject annotation = new JSONObject();
+ for (Map.Entry<String, Object> e : metadata.entrySet()) {
+ String key = e.getKey();
+ if (key.startsWith("asc_")) {
+ annotation.put(key.substring(4, key.length()), e.getValue());
+ }
+ }
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "annotation: " + annotation);
+
+ NeoTransaction trx = new NeoTransaction(this.neoUri);
+ try {
+ String id = neoCreateNode(trx, false, annotation, ("Annotation:" + theLabels).split(":"));
+ if (id == null) {
+ throw new IOException("No such catalog item: " + catalogItemSelector);
+ }
+
+ id = neoEdge(trx, false,
+ id,
+ "CatalogItem",
+ new JSONObject(catalogItemSelector),
+ new JSONObject(),
+ "ANNOTATION_OF");
+ if (id == null) {
+ throw new IOException("No such catalog item: " + catalogItemSelector);
+ }
+
+ trx.commit();
+ } catch (IOException iox) {
+ try {
+ trx.rollback();
+ } catch (IOException riox) {
+ errLogger.log(LogLevel.ERROR, this.getClass().getName(), riox.getMessage());
+ }
+ throw iox;
+ }
+ }
+
+ private void listTemplates(String theSelector) throws IOException {
+
+ JSONObject selector = null;
+
+ if (theSelector != null) {
+ selector = new JSONObject(theSelector);
+ }
+
+ NeoTransaction trx = new NeoTransaction(this.neoUri);
+
+ JSONObject res = trx.statement(new JSONObject()
+ .put("statement",
+ "MATCH (t:TOSCA:Template" +
+ (selector != null ? neoLiteralMap(selector) : "") + ") RETURN t, id(t)")
+ .put("parameters",
+ new JSONObject()
+ .put("props", selector != null ? selector : new JSONObject())))
+ .commit()
+ .result();
+
+ JSONArray data = res
+ .getJSONArray("results")
+ .getJSONObject(0)
+ .getJSONArray("data");
+ if (data.length() == 0) {
+ return;
+ }
+
+ for (int i = 0; i < data.length(); i++) {
+ JSONArray row = data.getJSONObject(i)
+ .getJSONArray("row");
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}: {}", row.getInt(1), row.getJSONObject(0));
+ }
+ }
+
+
+ private void removeTemplate(String theId) throws IOException {
+
+ //find the nodes to delete and then use 'detach delete'
+
+ NeoTransaction trx = new NeoTransaction(this.neoUri);
+
+ try {
+ //Template elements are never more then three hops away and point towards the template
+ JSONObject res = trx.statement(new JSONObject()
+ .put("statement",
+ "MATCH (t:TOSCA:Template)<-[*0..3]-(x) " +
+ "WHERE id(t)=" + theId + " RETURN {labels:labels(x),id:id(x)} as tgt"))
+ .execute()
+ .result();
+
+ JSONArray data = res
+ .getJSONArray("results")
+ .getJSONObject(0)
+ .getJSONArray("data");
+ if (data.length() == 0) {
+ return;
+ }
+
+ for (int i = data.length() - 1; i >= 0; i--) {
+ JSONArray row = data.getJSONObject(i)
+ .getJSONArray("row");
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "> {}", row.getJSONObject(0));
+
+ //double check
+
+
+ res = trx.statement(new JSONObject()
+ .put("statement",
+ "MATCH (n) " +
+ "WHERE id(n)=" + row.getJSONObject(0).getInt("id") + " " +
+ "DETACH DELETE n"))
+ .execute()
+ .result();
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "> {}", res);
+ }
+
+ trx.commit();
+ } catch (IOException iox) {
+ try {
+ trx.rollback();
+ } catch (IOException riox) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Rollback failed: {}", riox);
+ }
+ throw iox;
+ }
+ }
+
+ /*
+ */
+ private static void ignoreMissing(String theTarget) throws IOException {
+
+ for (String prefix : ignoreMissing) {
+ //make sure they are only one name element away
+ if ((theTarget.startsWith(prefix)) && (theTarget.substring(prefix.length()).lastIndexOf('.') == 0)) {
+ return;
+ }
+ }
+
+ throw new IOException("Not configured to ignore missing " + theTarget);
+ }
+
+ private static JSONArray encodeRange(List theRange) throws IOException {
+ JSONArray range = new JSONArray();
+ for (Object value : theRange) {
+ if (value instanceof Number) {
+ range.put(((Number) value).intValue());
+ } else if (value instanceof String &&
+ "UNBOUNDED".equals(value)) {
+ range.put(Integer.MAX_VALUE);
+ } else {
+ throw new IOException("Unexpected value in range definition: " + value);
+ }
+ }
+ return range;
+ }
+
+ private static String neoLiteralMap(JSONObject theProps) {
+ return neoLiteralMap(theProps, "props");
+ }
+
+ private static String neoLiteralMap(JSONObject theProps, String theArg) {
+ if (theProps.length() == 0) {
+ return "";
+ }
+ StringBuilder sb = new StringBuilder("");
+ for (Iterator i = theProps.keys(); i.hasNext(); ) {
+ String key = (String) i.next();
+ sb.append("`")
+ .append(key)
+ .append("`: {")
+ .append(theArg)
+ .append("}.`")
+ .append(key)
+ .append("`,");
+ }
+ return "{ " + sb.substring(0, sb.length() - 1) + " }";
+ }
+
+ private static String neoLabelsString(int theStartPos, String... theLabels) {
+ StringBuffer lbls = new StringBuffer("");
+ for (int i = theStartPos; i < theLabels.length; i++) {
+ lbls.append(":")
+ .append(theLabels[i]);
+ }
+ return lbls.toString();
+ }
+
+ private String neoCreateNode(
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+ return neoNode("CREATE", theProperties, theLabels);
+ }
+
+ /* executes the (up to 2) statements required to construct a node
+ in a dedicated transaction */
+ private String neoNode(
+ String theVerb,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+ NeoTransaction trx = new NeoTransaction(this.neoUri);
+ try {
+ return neoNode(trx, true,
+ theVerb, theProperties, theLabels);
+ } catch (IOException iox) {
+ try {
+ trx.rollback();
+ } catch (IOException ioxx) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), ioxx.getMessage());
+ }
+ throw iox;
+ }
+ }
+
+ private String neoCreateNode(
+ NeoTransaction theTransaction,
+ boolean doCommit,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+ return neoNode(theTransaction, doCommit, "CREATE", theProperties, theLabels);
+ }
+
+ private String neoMergeNode(
+ NeoTransaction theTransaction,
+ boolean doCommit,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+ return neoNode(theTransaction, doCommit, "MERGE", theProperties, theLabels);
+ }
+
+ /* execute the statements required to construct a node as part of the
+ given transaction
+
+ */
+ private String neoNode(
+ NeoTransaction theTransaction,
+ boolean doCommit,
+ String theVerb,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNode {}", new Object[]{theProperties, theLabels});
+
+ JSONObject node;
+ String nodeId;
+
+ node = theTransaction
+ .statement(
+ new JSONObject()
+ .put("statement",
+ theVerb + " (n:" + theLabels[0] + neoLiteralMap(theProperties) + " ) RETURN id(n)")
+ .put("parameters",
+ new JSONObject()
+ .put("props", theProperties)))
+ .execute()
+ .result();
+
+
+ nodeId = neoId(node);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNode, node: {}", nodeId);
+
+ if (theLabels.length > 1) {
+ theTransaction.statement(
+ new JSONObject()
+ .put("statement",
+ "START n=node(" + nodeId + ") SET n " + neoLabelsString(1, theLabels)));
+ }
+ theTransaction.execute(doCommit);
+
+ return nodeId;
+ }
+
+ private void neoNodeProperties(
+ NeoTransaction theTransaction,
+ boolean doCommit,
+ String theId,
+ JSONObject theProperties) throws IOException {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNodeProperties {}", new Object[]{theId, theProperties});
+ theTransaction
+ .statement(
+ new JSONObject()
+ .put("statement",
+ "START n=node(" + theId + ") SET n+= " +
+ neoLiteralMap(theProperties) + " RETURN id(n)")
+ .put("parameters",
+ new JSONObject()
+ .put("props", theProperties)))
+ .execute(doCommit);
+ }
+
+ private String neoEdge(
+ NeoTransaction theTransaction,
+ boolean doCommit,
+ String theFrom, String theTo,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoEdge: {}", new Object[]{theFrom, theTo, theProperties, theLabels});
+
+ return neoEdge(
+ theTransaction, doCommit,
+ new JSONObject()
+ .put("statement",
+ "START a=node(" + theFrom + "),b=node(" + theTo + ") " +
+ "MERGE (a)-[r:" + theLabels[0] + neoLiteralMap(theProperties) + "]->(b) " +
+ "RETURN id(r)")
+ .put("parameters",
+ new JSONObject()
+ .put("props", theProperties)));
+ }
+
+ private String neoEdge(
+ NeoTransaction theTransaction, boolean doCommit,
+ String theFromId,
+ String theToLabel, JSONObject theToProps,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+
+ return neoEdge(theTransaction, doCommit,
+ new JSONObject()
+ .put("statement",
+ //"START a=node(" + theFromId + ") " +
+ "MATCH (a),(b:" + theToLabel + neoLiteralMap(theToProps, "toProps") + ") " +
+ "WHERE id(a)=" + theFromId + " " +
+ "MERGE (a)-[r:" + theLabels[0] + neoLiteralMap(theProperties) + "]->(b) " +
+ "RETURN id(r)")
+ .put("parameters",
+ new JSONObject()
+ .put("toProps", theToProps)
+ .put("props", theProperties)));
+ }
+
+ private String neoEdge(NeoTransaction theTransaction,
+ boolean doCommit,
+ JSONObject theEdgeStatement)
+ throws IOException {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoEdge {}", new Object[]{theEdgeStatement});
+
+ return neoId(
+ theTransaction
+ .statement(theEdgeStatement)
+ .execute(doCommit)
+ .result()
+ );
+ }
+
+ private static String neoId(JSONObject theResult) throws IOException {
+ try {
+ JSONArray data = theResult
+ .getJSONArray("results")
+ .getJSONObject(0)
+ .getJSONArray("data");
+ if (data.length() == 0) {
+ return null;
+ }
+
+ return String.valueOf(
+ data.getJSONObject(0)
+ .getJSONArray("row")
+ .getInt(0));
+ } catch (JSONException jsonx) {
+ errLogger.log(LogLevel.WARN, Modeled.class.getName(), "neoId, No 'id' in result: {} {}", theResult, jsonx);
+ throw new IOException("no 'id' in result", jsonx);
+ }
+ }
+
+ private static String[] neoIds(JSONObject theResult) throws IOException {
+ try {
+ JSONArray data = theResult
+ .getJSONArray("results")
+ .getJSONObject(0)
+ .getJSONArray("data");
+ if (data.length() == 0) {
+ return new String[]{};
+ }
+
+ JSONArray array = data.getJSONObject(0)
+ .getJSONArray("row");
+
+ String[] res = new String[array.length()];
+ for (int i = 0; i < array.length(); i++) {
+ res[i] = String.valueOf(array.getInt(i));
+ }
+ return res;
+ } catch (JSONException jsonx) {
+ errLogger.log(LogLevel.WARN, Modeled.class.getName(), "neoId, No 'id' in result: {} {}", theResult, jsonx);
+ throw new IOException("no 'id' in result", jsonx);
+ }
+ }
+
+ private static class NeoTransaction {
+
+ private HttpClient client = null;
+ private String uri = null;
+ private String auth = null;
+ private JSONObject result = null;
+ private JSONArray stmts = new JSONArray();
+
+ NeoTransaction(URI theTarget) {
+
+ client = httpClientBuilder.build();
+ this.uri = theTarget.getScheme() + "://" + theTarget.getHost() + ":" + theTarget.getPort() + "/db/data/transaction";
+
+ String userInfo = theTarget.getUserInfo();
+ if (userInfo != null) {
+ this.auth = "Basic " + new String(
+ Base64.encodeBase64(
+ userInfo.getBytes(Charset.forName("ISO-8859-1"))));
+ }
+ }
+
+ /* adds a statement to the next execution cycle */
+ NeoTransaction statement(JSONObject theStatement) {
+ if (this.client == null) {
+ throw new IllegalStateException("Transaction was completed");
+ }
+ this.stmts.put(theStatement);
+ return this;
+ }
+
+ /* executes all pending statements but does not commit the transaction */
+ /* executing a transaction with no statements refreshes the transaction timer in order to keep the transaction alive */
+ NeoTransaction execute() throws IOException {
+ if (this.client == null) {
+ throw new IllegalStateException("Transaction was completed");
+ }
+ post(this.uri);
+ return this;
+ }
+
+ /* executes all pending statements and commits the transaction */
+ NeoTransaction commit() throws IOException {
+ if (this.client == null) {
+ throw new IllegalStateException("Transaction was completed");
+ }
+ post(this.uri + "/commit");
+ //mark the transaction as terminated
+ this.client = null;
+ return this;
+ }
+
+ /* just to simplify some code written on top of NeoTransaction */
+ NeoTransaction execute(boolean doCommit) throws IOException {
+ return doCommit ? commit() : execute();
+ }
+
+ private void post(String theUri) throws IOException {
+ HttpPost post = new HttpPost(theUri);
+ JSONObject payload = new JSONObject()
+ .put("statements", this.stmts);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "post> " + payload);
+ post.setEntity(new StringEntity(payload.toString(),
+ ContentType.APPLICATION_JSON));
+ run(post);
+ }
+
+ /* rollbacks the transaction changes */
+ NeoTransaction rollback() throws IOException {
+ if (this.client == null) {
+ throw new IllegalStateException("Transaction was completed");
+ }
+ if (this.uri == null) {
+ throw new IllegalStateException("Transaction not started");
+ }
+ run(new HttpDelete(this.uri));
+ return this;
+ }
+
+ /* retrieve the (raw) results of the last execute/commit cycle */
+ JSONObject result() {
+ return this.result;
+ }
+
+ private void run(HttpUriRequest theRequest) throws IOException {
+ theRequest.setHeader(HttpHeaders.ACCEPT, "application/json; charset=UTF-8");
+ if (this.auth != null) {
+ theRequest.setHeader(HttpHeaders.AUTHORIZATION, this.auth);
+ }
+
+ HttpResponse response = this.client.execute(theRequest);
+ int statusCode = response.getStatusLine().getStatusCode();
+ if (statusCode >= 300) {
+ try {
+ this.result = new JSONObject(IOUtils.toString(response.getEntity().getContent(), "UTF-8"));
+ } catch (Exception x) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), x.getMessage());
+ }
+ throw new IOException("Neo statement(s) '" + this.stmts + "' failed: " + response.getStatusLine());
+ }
+
+ try {
+ this.result = new JSONObject(
+ IOUtils.toString(response.getEntity().getContent(), "UTF-8"));
+ } catch (Exception x) {
+ throw new IOException("no json in response", x);
+ }
+
+ JSONArray errors = this.result.getJSONArray("errors");
+ if (errors.length() > 0) {
+ throw new IOException("Neo statement(s) '" + this.stmts + "' have errors: " + errors);
+ }
+ //we only get a header if this was not a one statement transaction
+ Header hdr = response.getFirstHeader("Location");
+ if (hdr != null) {
+ if (!hdr.getValue().startsWith(this.uri)) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "new transaction location?? : {} vs. {}", this.uri, hdr.getValue());
+ }
+ this.uri = hdr.getValue();
+ }
+ this.stmts = new JSONArray();
+ }
+ }
+
+ private static JSONObject pack(Map theRule, Map theDef) {
+ JSONObject pack = new JSONObject();
+
+ if (theRule == null) {
+ return pack;
+ }
+
+ //these are the facets of the construct definition
+ Map facets = (Map) theRule.get("mapping");
+ if (facets == null) {
+ return pack;
+ }
+
+ facets.entrySet().stream()
+ .forEach(
+ theEntry ->
+ {
+ Map.Entry entry = (Map.Entry) theEntry;
+ Map facetDef = (Map) entry.getValue();
+
+ String storage = (String) facetDef.getOrDefault("storage", "");
+ String type = (String) facetDef.get("type");
+
+ if ("none".equals(storage)) {
+ return;
+ }
+ if ("map".equals(type)) {
+ //maps are used for cross-references between constructs or for
+ //constructs facets
+ return;
+ }
+ Object val = theDef.get(entry.getKey());
+ if ("seq".equals(type)) {
+ //sequences can be stored inlined, if so instructed ..
+ if ("inline".equals(storage)) {
+ val = JSONObject.valueToString(val);
+ } else {
+ return;
+ }
+ }
+ if ("no".equals(facetDef.getOrDefault("required", "no"))) {
+ pack.putOpt((String) entry.getKey(), theDef.get(entry.getKey()));
+ } else {
+ pack.putOnce((String) entry.getKey(), theDef.get(entry.getKey()));
+ }
+ });
+ return pack;
+ }
+
+ /* a sort of catalog of neo identifiers generated for the different
+ * constructs (or their types) we store
+ */
+ private static class Tracker<T> {
+
+ private Table<String, String, T>
+ typeTracker = HashBasedTable.create(),
+ templateTracker = HashBasedTable.create();
+
+ void trackType(String theConstruct, String theName, T theInfo) {
+ typeTracker.put(theConstruct, theName, theInfo);
+ }
+
+ T lookupType(String theConstruct, String theName) {
+ return typeTracker.get(theConstruct, theName);
+ }
+
+ boolean tracksType(String theConstruct, String theName) {
+ return typeTracker.contains(theConstruct, theName);
+ }
+
+ void trackTemplate(String theConstruct, String theName, T theInfo) {
+ templateTracker.put(theConstruct, theName, theInfo);
+ }
+
+ T lookupTemplate(String theConstruct, String theName) {
+ return templateTracker.get(theConstruct, theName);
+ }
+
+ }
+}
diff --git a/dcaedt_catalog/db/src/main/resources/tosca-schema.yaml b/dcaedt_catalog/db/src/main/resources/tosca-schema.yaml
new file mode 100644
index 0000000..5944e22
--- /dev/null
+++ b/dcaedt_catalog/db/src/main/resources/tosca-schema.yaml
@@ -0,0 +1,1231 @@
+_status_values: &status_values
+ enum:
+ - supported
+ - unsupported
+ - experimental
+ - deprecated
+
+#I do not know that the lists and maps qualify as 'primitive' ..
+_primitive_types: &primitive_types
+ enum: [string,integer,float,boolean,timestamp,list,map,version,range,scalar-unit.size,scalar_unit.frequency,scalar_unit.time]
+
+#needs custom validation as we have to make sure there are 2 elements and allow for the
+#UNBOUNDED keyword as second element
+_range_definition: &range_definition
+ type: seq
+ name: range_definition
+ sequence:
+ - type: scalar
+
+#see A.5.2
+#this is where the need of verifying the size of a collection (sequence/map) came from
+#this is specified as a sequence where each entry is a map with one entry??
+_constraints_sequence: &constraints_sequence
+ name: constraints_sequence
+# short: "0"
+ type: seq
+ sequence:
+ - type: map
+# length: 1
+ mapping:
+ equal:
+ desc: "Constrains a property or parameter to a value equal to the value declared."
+ type: any
+ required: no
+ greater_than:
+ desc: "Constrains a property or parameter to a value greater than the value declared"
+ type: scalar
+ required: no
+ greater_or_equal:
+ desc: "Constrains a property or parameter to a value greater than or equal to the value declared."
+ type: scalar
+ required: no
+ less_than:
+ desc: "Constrains a property or parameter to a value less than the value declared"
+ type: scalar
+ required: no
+ less_or_equal:
+ desc: "Constrains a property or parameter to a value less than or equal to the value declared."
+ type: scalar
+ required: no
+ in_range:
+ desc: "Constrains a property or parameter to a value in range of (inclusive) the two values declared.
+"
+ type: seq
+# length: 2
+ sequence:
+ - type: scalar
+ required: no
+ valid_values:
+ desc: "Constrains a property or parameter to a value that is in the list of declared values"
+ type: seq
+ sequence:
+ - type: scalar
+ required: no
+ length:
+ desc: "Constrains the property or parameter to a value of a given length."
+ type: int
+ required: no
+ min_length:
+ desc: "Constrains the property or parameter to a value to a minimum length"
+ type: scalar
+ required: no
+ max_length:
+ desc: "Constrains the property or parameter to a value to a maximum length"
+ type: scalar
+ required: no
+ pattern:
+ desc: "Constrains the property or parameter to a value that is allowed by the provided regular expression."
+ type: str
+ required: no
+
+# section A.5.3 property_filter_definition
+# it is a constraints sequence that gets attached to a property ..
+_property_filter_definition: &property_filter_definition
+ name: property_filter_definition
+ type: map
+ mapping:
+ =:
+ *constraints_sequence
+
+#section A.5.4 node_filter_definition
+_node_filter_definition: &node_filter_definition
+ type: map
+ name: node_filter_definition
+ mapping:
+ properties:
+ desc: "property names to constraints to be applied to those properties"
+ required: no
+ type: seq
+ sequence:
+ - *property_filter_definition
+# - type: map
+# mapping:
+# =:
+# *constraints_sequence
+ capabilities:
+ desc: ""
+ required: no
+ type: seq
+ sequence:
+ - type: map
+ name: node_filter_capabilities_sequence
+ desc: "the key is a capability name or type"
+ mapping:
+ =:
+ name: node_filter_capabilities_entry
+ type: map
+ mapping:
+ properties:
+ desc: "the capability properties and their constraints"
+ name: node_filter_capabilities_properties
+ type: seq
+ sequence:
+ - type: map
+ name: node_filter_capabilities_property
+ mapping:
+ =: *constraints_sequence
+
+#used in property and attribute definitions
+_entry_schema_definition: &entry_schema_definition
+ desc: "The optional key that is used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map"
+ name: entry_schema_definition
+ required: no
+ type: map
+ short: type
+ mapping:
+ "type":
+ desc: "collection element type"
+ required: yes
+ type: str
+ description:
+ required: no
+ type: str
+ constraints:
+ *constraints_sequence
+
+# see section A.5.5
+_artifact_definition: &artifact_definition
+ type: map
+ name: artifact_definition
+ short: implementation # assumes type can be inferred ..
+ mapping:
+ "type":
+ desc: "The required artifact type for the artifact definition"
+ required: yes
+ type: str
+ description:
+ desc: "The optional description for the artifact definition"
+ required: no
+ type: str
+ implementation:
+ desc: "The optional URI string (relative or absolute) which can be used to locate the artifacts file.
+"
+ required: no
+ type: str
+ repository:
+ desc: "The optional name of the repository definition which contains the location of the external repository that contains the artifact"
+ required: no
+ type: str
+ deploy_path:
+ desc: "The file path the associated file would be deployed into within the target nodes container."
+ required: no
+ type: str
+
+# see section A.5.6
+_repository_definition: &repository_definition
+ type: map
+ name: repository_definition
+ short: url
+ mapping:
+ description:
+ desc: "The optional description for the repository.
+"
+ required: no
+ type: str
+ url:
+ desc: "The required URL or network address used to access the repository"
+ required: yes
+ type: str
+ credential:
+ desc: "The optional Credential used to authorize access to the repository"
+ required: no
+ type: str
+
+#see section A.5.7
+_property_definition: &property_definition
+ type: map
+ name: property_definition
+ mapping:
+ "type":
+ type: str
+ required: yes
+#not as easy, it can be an user defined data type
+# <<: *primitive_types
+ description:
+ type: str
+ required: no
+ constraints:
+ desc: "The optional list of sequenced constraint clauses for the Data Type."
+ required: no
+ <<: *constraints_sequence
+ default:
+ type: any
+ required: no
+ "required":
+ type: bool
+ required: no
+ status:
+ type: str
+ required: no
+ <<: *status_values
+ entry_schema:
+ <<: *entry_schema_definition
+# desc: "used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map."
+# type: str
+# required: no
+
+#see section A.5.8
+#_property_assignment_definition: &property_assignment_definition
+
+#see A.5.9
+_attribute_definition: &attribute_definition
+ type: map
+ name: attribute_definition
+ mapping:
+ "type":
+ type: str
+ required: yes
+# <<: *primitive_types
+ description:
+ type: str
+ required: no
+ default:
+ type: any
+ required: no
+ status:
+ desc: "The optional status of the attribute relative to the specification or implementation"
+ type: str
+ required: no
+ <<: *status_values
+ entry_schema:
+ <<: *entry_schema_definition
+
+#see section A.5.10
+#here again, we must support the short form which is the most common
+_attribute_assignment_definition: &attribute_assignment_definition
+ type: map
+ name: attribute_assignment_definition
+ mapping:
+ description:
+ desc: "The optional description of the attribute."
+ required: no
+ type: str
+ value:
+#actually 'value | value_expression'
+ desc: "represent the type-compatible value to assign to the named attribute. Attribute values may be provided as the result from the evaluation of an expression or a function"
+ required: yes
+ type: any
+
+
+# see spec section A.5.11
+
+# see spec section A.5.11.1: variant to be used in node or relationship type definitions
+_type_operation_definition: &type_operation_definition
+ type: map
+ name: type_operation_definition
+ short: implementation
+ mapping:
+ description:
+ desc: "The optional description string for the associated named operation."
+ required: no
+ type: str
+ implementation:
+ desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)"
+ required: no
+ type: str
+ inputs:
+ desc: ""
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: "a property value or an expression providing a input value"
+ name: property_assignment
+ type: any
+
+# from A.5.11.2
+_template_operation_definition: &template_operation_definition
+ type: map
+ name: template_operation_definition
+ short: implementation
+ mapping:
+ description:
+ desc: "The optional description string for the associated named operation."
+ required: no
+ type: str
+ implementation:
+ desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)"
+ name: template_operation_implementation_definition
+ required: no
+ short: primary
+ type: map
+ mapping:
+ primary:
+ desc: "The optional implementation artifact name (e.g., the primary script file name within a TOSCA CSAR file). "
+ required: no
+ type: str
+ dependencies:
+ desc: "The optional list of one or more dependent or secondary implementation artifact name which are referenced by the primary implementation artifact (e.g., a library the script installs or a secondary script)"
+ required: no
+ type: seq
+ sequence:
+ - type: str
+ inputs:
+ desc: ""
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: "a property value or an expression providing a input value"
+ name: property_assignment
+ type: any
+
+
+# see section A.5.12, specifically A.5.12.2.1 : definition to be used in node or relationship type definition
+_type_interface_definition: &type_interface_definition
+ type: map
+ name: type_interface_definition
+ mapping:
+ "type":
+ desc: "represents the required name of the Interface Type for the interface definition
+"
+ required: yes
+ type: str
+ inputs:
+ desc: "The optional list of input property definitions available to all defined operations"
+ type: map
+ mapping:
+ =:
+ *property_definition
+ =:
+ *type_operation_definition
+
+# see section A.5.12.2.2, extended notation to be used in node or relationship template definitions
+_template_interface_definition: &template_interface_definition
+ type: map
+ name: template_interface_definition
+ mapping:
+ inputs:
+ desc: "The optional list of input property definitions available to all defined operations"
+ type: map
+ mapping:
+ =:
+ desc: "a property value or an expression providing a property value"
+ name: property_assignment
+ type: any
+ =:
+ *template_operation_definition
+
+
+# A.6 section: type specific definitions
+
+# see section A.6.1
+_capability_definition: &capability_definition
+ type: map
+ name: capability_definition
+ short: type
+ mapping:
+ "type":
+ desc: "The required name of the Capability Type the capability definition is based upon"
+ required: yes
+ type: str
+ description:
+ desc: "The optional description of the Capability definition"
+ required: no
+ type: str
+ properties:
+ desc: ""
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ attributes:
+ desc: "An optional list of property definitions for the Capability definition"
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_definition
+ valid_source_types:
+ desc: ""
+ required: no
+ type: seq
+ sequence:
+ - type: str
+ occurrences:
+ desc: "The optional minimum and maximum occurrences for the capability."
+ required: no
+ <<: *range_definition
+
+# see section A.6.2
+#
+_requirement_definition: &requirement_definition
+ type: map
+ name: requirement_definition
+ short: capability #as per A.6.2.2.1
+ mapping:
+ capability:
+ desc: "The required reserved keyname used that can be used to provide the name of a valid Capability Type that can fulfil the requirement"
+ required: yes
+ type: str
+ node:
+ desc: "The optional reserved keyname used to provide the name of a valid Node Type that contains the capability definition that can be used to fulfil the requirement. "
+ required: no
+ type: str
+ relationship:
+# and from section A.6.2.1, this one is an oddball
+ desc: "The optional reserved keyname used to provide the name of a valid Relationship Type to construct when fulfilling the requirement."
+ required: no
+ name: requirement_relationship_definition
+ short: type
+ type: map
+ mapping:
+ type:
+ desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement definitions relationship keyname.
+"
+ required: yes
+ type: str
+ interfaces:
+ #not clear which interface definition is to be used here
+ desc: "allows augmentation (additional properties and operations) of the interfaces defined by the relationship type indicated above"
+ required: no
+ type: map
+ mapping:
+ =:
+ *type_interface_definition
+ occurrences:
+ desc: "The optional minimum and maximum occurrences for the requirement."
+ required: no
+ <<: *range_definition
+
+# see section A.6.3
+_artifact_type_definition: &artifact_type_definition
+ type: map
+ name: artifact_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent Artifact Type name the Artifact Type derives from"
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Artifact Type."
+ required: no
+ type: str
+ mime_type:
+ desc: "The required mime type property for the Artifact Type."
+ required: no
+ type: str
+ file_ext:
+ desc: "The required file extension property for the Artifact Type"
+ required: no
+ type: seq
+ sequence:
+ - type: str
+ properties:
+ desc: "An optional list of property definitions for the Artifact Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+
+#see spec section #A.6.4
+_interface_type_definition: &interface_type_definition
+ type: map
+ name: interface_type_definition
+ mapping:
+ inputs:
+ desc: "The optional list of input property definitions available to all defined operations"
+ type: map
+ mapping:
+ =:
+ type: str
+ desc: "property_name to property_value(_expression) mapping"
+ =:
+ *type_operation_definition
+
+# A.6.5
+_data_type_definition: &data_type_definition
+ type: map
+ name: data_type_definition
+ mapping:
+ derived_from:
+ desc: "The optional key used when a datatype is derived from an existing TOSCA Data Type.
+"
+ required: no
+ type: str
+ description:
+ desc: "The optional description for the Data Type.
+"
+ required: no
+ type: str
+ constraints:
+ desc: "The optional list of sequenced constraint clauses for the Data Type."
+ <<: *constraints_sequence
+ properties:
+ desc: "The optional list property definitions that comprise the schema for a complex Data Type in TOSCA"
+ type: map
+ mapping:
+ =:
+ *property_definition
+
+# see section A.6.6
+_capability_type_definition: &capability_type_definition
+ type: map
+ name: capability_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent capability type name this new Capability Type derives from."
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Capability Type"
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property definitions for the Capability Type."
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ attributes:
+ desc: "An optional list of attribute definitions for the Capability Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_definition
+ valid_source_types:
+ desc: "An optional list of one or more valid names of Node Types that are supported as valid sources of any relationship established to the declared Capability Type"
+ required: no
+ type: seq
+ sequence:
+ - type: str
+
+# section A.6.7 requirement definition: TOSCA YAML profile relies on capability types to
+# define requirements
+
+# see section A.6.9
+_relationship_type_definition: &relationship_type_definition
+ type: map
+ name: relationship_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent Relationship Type name the Relationship Type derives from"
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Relationship Type."
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property definitions for the Relationship Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ attributes:
+ desc: "An optional list of attribute definitions for the Relationship Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_definition
+ interfaces:
+ desc: "An optional list of interface definitions interfaces supported by the Relationship Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *type_interface_definition
+ valid_target_types:
+ desc: "An optional list of one or more names of Capability Types that are valid targets for this relationship. "
+ required: no
+ type: seq
+ sequence:
+ - type: str
+
+#see section 3.6.10
+_group_type_definition: &group_type_definition
+ type: map
+ name: group_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent Group Type name this new Group Type derives from"
+ required: no
+ type: str
+ version:
+ desc: "An optional version for the Group Type definition"
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Group Type"
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property definitions for the Group Type."
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ targets:
+ desc: "An optional list of one or more names of Node Types that are valid
+(allowed) as members of the Group Type."
+ required: no
+ type: seq
+ sequence:
+ - type: str
+ interfaces:
+ desc: "An optional list of interface definitions supported by the Group Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *type_interface_definition
+
+#see section 3.6.11
+_policy_type_definition: &policy_type_definition
+ type: map
+ name: policy_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent Policy Type name this new Policy Type derives from"
+ required: no
+ type: str
+ version:
+ desc: "An optional version for the Policy Type definition"
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Policy Type"
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property definitions for the Policy Type."
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ targets:
+ desc: "An optional list of valid Node Types or Group Types the Policy Type
+can be applied to"
+ required: no
+ type: seq
+ sequence:
+ - type: str
+
+# see section A.6.8
+_node_type_definition: &node_type_definition
+ type: map
+ name: node_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent Node Type name this new Node Type derives from"
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Node Type"
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property definitions for the Node Type."
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ attributes:
+ desc: "An optional list of attribute definitions for the Node Type.
+"
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_definition
+ requirements:
+ desc: "An optional sequenced list of requirement definitions for the Node Type.
+"
+ required: no
+ type: seq
+ sequence:
+ - type: map
+ mapping:
+ =:
+ *requirement_definition
+ capabilities:
+ desc: "An optional list of capability definitions for the Node Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *capability_definition
+ interfaces:
+ desc: ""
+ required: no
+ type: map
+ mapping:
+ =:
+ *type_interface_definition
+ artifacts:
+ desc: "An optional list of named artifact definitions for the Node Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *artifact_definition
+
+# A.7 Template specific definitions
+
+# see section A.7.1
+_capability_assignment_definition: &capability_assignment_definition
+ type: map
+ name: capability_assignment_definition
+ mapping:
+ properties:
+ # list of property assignments
+ desc: "An optional list of property definitions for the Capability definition"
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: "a property value or an expression providing a property value"
+ name: property_assignment
+ type: any
+ attributes:
+ # list of attribute assignments
+ desc: "An optional list of attribute definitions for the Capability definition"
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: ""
+ name: attribute_assignment
+ type: any
+
+# see section A.7.2
+_requirement_assignment_definition: &requirement_assignment_definition
+ type: map
+ name: requirement_assignment_definition
+ short: node
+ mapping:
+ capability:
+ desc: " used to provide the name of either a: Capability definition within a target node template that can fulfill the requirement or Capability Type that the provider will use to select a type-compatible target node template to fulfill the requirement at runtime."
+ required: no
+ type: str
+ node:
+#why is this a reference to a node type and not to a node template??
+ desc: "used to identify the target node of a relationship: Node Template name that can fulfil the target node requirement or Node Type name that the provider will use to select a type-compatible node template to fulfil the requirement at runtime"
+ required: no
+ type: str
+ relationship:
+ desc: ""
+ required: no
+#fins a better name name: relationship_definition
+ type: map
+ short: type
+ mapping:
+ "type":
+ desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement assignments relationship keyname"
+ required: no
+ type: str
+ properties:
+ desc: ""
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: "a property value or an expression providing a property value"
+ name: property_assignment
+ type: any
+ interfaces:
+ desc: "from A.5.12.2.2, right?"
+ required: no
+ type: map
+ mapping:
+ =:
+ *template_interface_definition
+ node_filter:
+ desc: "The optional filter definition that TOSCA orchestrators or providers would use to select a type-compatible target node that can fulfill the associated abstract requirement at runtime."
+ required: no
+ <<: *node_filter_definition
+
+# see section A.7.3
+_node_template_definition: &node_template_definition
+ type: map
+ name: node_template_definition
+ mapping:
+ "type":
+ desc: "The required name of the Node Type the Node Template is based upon"
+ required: yes
+ type: str
+ description:
+ desc: "An optional description for the Node Template"
+ required: no
+ type: str
+ directives:
+ desc: "An optional list of directive values to provide processing instructions to orchestrators and tooling."
+ required: no
+ type: seq
+ sequence:
+ - type: str
+ properties:
+#custom check needs to be added: the value or expression providing the property value
+#needs to be compatible with the property definition
+ desc: "An optional list of property value assignments for the Node Template."
+ required: no
+ type: map
+ mapping:
+ =:
+ type: any
+ name: property_assignment
+ desc: "a property value or an expression providing a property value"
+ attributes:
+ desc: "An optional list of attribute value assignments for the Node Template"
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_assignment_definition
+ requirements:
+ desc: "An optional sequenced list of requirement assignments for the Node Template."
+ required: no
+ type: seq
+ sequence:
+ - type: map
+ mapping:
+ =:
+ *requirement_assignment_definition
+ capabilities:
+ desc: "An optional list of capability assignments for the Node Template."
+ required: no
+ type: map
+ mapping:
+ =:
+ *capability_assignment_definition
+ interfaces:
+ desc: "An optional list of named interface definitions for the Node Template"
+ required: no
+ type: map
+ mapping:
+ =:
+ *template_interface_definition
+ artifacts:
+ desc: "An optional list of named artifact definitions for the Node Template.
+"
+ required: no
+ type: map
+ mapping:
+ =:
+ *artifact_definition
+ node_filter:
+ desc: "The optional filter definition that TOSCA orchestrators would use to select the correct target node. This keyname is only valid if the directive has the value of 'selectable' set."
+ required: no
+ <<: *node_filter_definition
+ copy:
+ desc: "The optional (symbolic) name of another node template to copy into (all keynames and values) and use as a basis for this node template."
+ required: no
+ type: str
+
+# see section A.7.4
+_relationship_template_definition: &relationship_template_definition
+ type: map
+ name: relationship_template_definition
+ mapping:
+ "type":
+ desc: "The required name of the Relationship Type the Relationship Template is based upon"
+ required: yes
+ type: str
+ alias:
+ desc: "The optional name of a different Relationship Template definition whose values are (effectively) copied into the definition for this Relationship Template (prior to any other overrides)."
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Relationship Template"
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property assignments for the Relationship Template."
+ required: no
+ name: properties_assignment_validation
+ type: map
+ mapping:
+ =:
+ type: any
+#scalar
+ desc: "an expression providing a property value"
+ attributes:
+ desc: "An optional list of attribute value assignments for the Relationship Template"
+ required: no
+ name: attributes_assignment_validation
+ type: map
+ mapping:
+ =:
+ type: scalar
+ desc: "an expression providing an attribute value"
+ interfaces:
+ desc: "An optional list of named interface definitions for the Relationship Template ('augmentation' is allowed here)"
+ required: no
+ type: map
+ mapping:
+ =:
+ *template_interface_definition
+ copy:
+ desc: "The optional (symbolic) name of another relationship template to copy into (all keynames and values) and use as a basis for this relationship template."
+ required: no
+ type: str
+
+
+# see section 3.7.5
+_group_definition: &group_definition
+ type: map
+ name: group_definition
+ mapping:
+ "type":
+ desc: "The required name of the group type the group definition is based upon"
+ required: yes
+ type: str
+ description:
+ desc: "The optional description for the group definition"
+ required: no
+ properties:
+ desc: " represents the optional list of property assignments for the group definition that provide values for properties defined in its declared Group Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ type: any
+ name: property_assignment
+ targets:
+ desc: "contains the required list of one or more node template names (within the same topology template) that are members of this logical group"
+ required: yes
+ type: seq
+ sequence:
+ - type: str
+ interfaces:
+ desc: "represents the optional list of interface definitions for the group definition that augment those provided by its declared Group Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *template_interface_definition
+
+# see section 3.7.6
+_policy_template_definition: &policy_template_definition
+ type: map
+ name: policy_definition
+ mapping:
+ "type":
+ desc: "The required name of the policy type the policy definition is based upon"
+ required: yes
+ type: str
+ description:
+ desc: "The optional description for the policy definition"
+ required: no
+ properties:
+ desc: "represents the optional list of property assignments for the policy definition that provide values for properties defined in its declared Policy Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ type: any
+ name: property_assignment
+ targets:
+ desc: "represents the optional list of names of node templates or groups that the policy is to applied to"
+ required: no
+ type: seq
+ sequence:
+ - type: str
+
+# see section 3.8 Topology Template definition: defines the topology template of a cloud application.
+# described as a a reusable grammar as it can be a part of a service template definition
+_topology_template_definition: &topology_template_definition
+ type: map
+ name: topology_template_definition
+ mapping:
+ description:
+ desc: "a description of the topology template"
+ required: no
+ type: str
+ inputs:
+ desc: "definition of input parameters for the topology template"
+ name: inputs
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ node_templates:
+ desc: "definition of the node templates of the topology"
+ name: node_templates
+ required: no
+ type: map
+ mapping:
+ =:
+ *node_template_definition
+ relationship_templates:
+ desc: "definition of the relationship templates of the topology"
+ required: no
+ name: relationship_templates
+ type: map
+ mapping:
+ =:
+ *relationship_template_definition
+ outputs:
+ desc: "definition of output parameters for the topology template"
+ name: outputs
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_assignment_definition
+ groups:
+ desc: "An optional list of Group definitions whose members are node templates defined within this same Topology Template"
+ name: groups
+ required: no
+ type: map
+ mapping:
+ =:
+ *group_definition
+ policies:
+ # see 8.2.3, initially the list is not described as sequenced but then the grammar shows it as such !?
+ desc: "An optional sequenced?? list of Policy definitions for the Topology Template."
+ name: policies
+ required: no
+ type: seq
+ sequence:
+ - type: map
+ mapping:
+ =:
+ *policy_template_definition
+ substitution_mappings:
+# one possible short-coming that is visible here is that the definition of the capability
+# and requirements mappings are given in the spec only with the short/inline version of a
+# YAML list/sequence, which cannot be enforced here ..
+ desc: " a description of the topology template"
+ name: substitution_mappings
+ required: no
+ type: map
+ mapping:
+ node_type:
+ desc: "node type name"
+ required: yes
+ type: str
+ capabilities:
+ desc: "map_of_capability_mappings_to_expose"
+ type: map
+ mapping:
+ =:
+ type: seq
+ sequence:
+ - type: str
+ requirements:
+ desc: "map_of_requirement_mapping_to_expose"
+ type: map
+ mapping:
+ =:
+ type: seq
+ sequence:
+ - type: str
+
+
+# see A.9 Service Template definition: A TOSCA Service Template (YAML) document contains
+# element definitions of building blocks for cloud application, or complete models of cloud applications.
+
+type: map
+name: service_template_definition
+mapping:
+ tosca_definitions_version:
+ desc: "Required TOSCA Definitions version string"
+ required: yes
+ type: str
+
+ tosca_default_namespace:
+ desc: "Optional. default namespace (for type schema)"
+ required: no
+ type: str
+
+ metadata:
+ desc: "Optional metadata keyname: value pairs"
+ name: metadata
+ required: no
+ type: map
+ mapping:
+ template_name:
+ desc: "Optional name of this service template"
+ required: no
+ type: str
+ template_author:
+ desc: "Optional author of this service template"
+ required: no
+ type: str
+ template_version:
+ desc: "Optional version of this service template"
+ required: no
+ type: str
+#to add, the spec says: "Optional list of domain or profile specific metadata keynames"
+
+ description:
+ desc: "Optional description of the definitions inside the file"
+ required: no
+ type: str
+
+ imports:
+ desc: "list of import statements for importing other definitions files"
+ name: imports
+ required: no
+ type: seq
+ sequence:
+ - type: str
+
+ dsl_definitions:
+ desc: "list of YAML alias anchors (or macros)"
+ name: dsl_definitions
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: "some piece of valid yaml that makes the anchor/alias definition"
+ type: any
+ required: no
+
+ repositories:
+ desc: "list of external repository definitions which host TOSCA artifacts"
+ name: repositories
+ required: no
+ type: map
+ mapping:
+ =:
+ *repository_definition
+
+ data_types:
+ desc: "list of TOSCA datatype definitions"
+ name: data_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *data_type_definition
+
+ node_types:
+ desc: "list of node type definitions"
+ name: node_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *node_type_definition
+
+ capability_types:
+ desc: "list of capability type definitions"
+ name: capability_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *capability_type_definition
+
+ relationship_types:
+ desc: "list of relationship type definitions"
+ name: relationship_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *relationship_type_definition
+
+ artifact_types:
+ desc: "list of artifact type definitions"
+ name: artifact_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *artifact_type_definition
+
+ interface_types:
+ desc: "list of interface type definitions"
+ name: interface_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *interface_type_definition
+
+ group_types:
+ desc: "list of group type definitions"
+ name: group_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *group_type_definition
+
+ policy_types:
+ desc: "list of policy type definitions"
+ name: policy_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *policy_type_definition
+
+ topology_template:
+ desc: "topology template definition of the cloud application or service"
+ required: no
+ <<: *topology_template_definition
diff --git a/dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml b/dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml
new file mode 100644
index 0000000..5ca7061
--- /dev/null
+++ b/dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml
@@ -0,0 +1,37 @@
+#_policy_type_storage_definition:
+# <<: *policy_type_definition
+# mapping:
+# <<: *policy_type_mapping
+# targets:
+# <<: *policy_type_targets
+# storage: inline
+
+/_data_type_definition/mapping/derived_from:
+ storage: none
+
+/_node_type_definition/mapping/derived_from:
+ storage: none
+
+/_capability_type_definition/mapping/derived_from:
+ storage: none
+/_capability_type_definition/mapping/valid_source_types:
+ storage: inline
+
+/_relationship_type_definition/mapping/derived_from:
+ storage: none
+/_relationship_type_definition/mapping/valid_target_types:
+ storage: inline
+
+/_policy_type_definition/mapping/derived_from:
+ storage: none
+/_policy_type_definition/mapping/targets:
+ storage: inline
+
+/_node_template_definition/mapping/type:
+ storage: none
+
+/_policy_template_definition/mapping/targets:
+ storage: inline
+
+/_policy_template_definition/mapping/type:
+ storage: none