summaryrefslogtreecommitdiffstats
path: root/dcaedt_catalog
diff options
context:
space:
mode:
authorStone, Avi (as206k) <as206k@att.com>2018-04-12 15:46:31 +0300
committerStone, Avi (as206k) <as206k@att.com>2018-04-12 15:49:38 +0300
commit5032434b101f25fa44d2e1f8dc8393e30af1ed4f (patch)
tree2dc7d37a8048e025c7412af080640da4c9a22b65 /dcaedt_catalog
parent2205633792f95f46a02bbf8f87f0c2637265d924 (diff)
DCAE-D be initial commit
DCAE-D be initial commit Issue-ID: SDC-1218 Change-Id: Id18ba96c499e785aa9ac395fbaf32d57f08c281b Signed-off-by: Stone, Avi (as206k) <as206k@att.com>
Diffstat (limited to 'dcaedt_catalog')
-rw-r--r--dcaedt_catalog/.gitignore12
-rw-r--r--dcaedt_catalog/api/pom.xml198
-rw-r--r--dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/Catalog.java440
-rw-r--r--dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCCatalog.java1227
-rw-r--r--dcaedt_catalog/api/src/main/resources/log4j.properties8
-rw-r--r--dcaedt_catalog/api/src/test/java/org/onap/sdc/dcae/catalog/ASDCCatalogTest.java88
-rw-r--r--dcaedt_catalog/asdc/pom.xml139
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDC.java1101
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCController.java0
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCEngine.java25
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCException.java18
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtils.java448
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtilsController.java76
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Blueprinter.java76
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Cloudify.java249
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/ISdcClient.java47
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/SdcRestClient.java221
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactGroupType.java5
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactType.java16
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/AssetType.java5
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/LifecycleOperationType.java16
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/SdcConsumerInfo.java5
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/AbstractSdncException.java97
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/BaseException.java61
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/OkResponseInfo.java8
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/PolicyException.java11
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/RequestError.java65
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ResponseFormat.java75
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ServiceException.java12
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/Normalizers.java34
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SDCResponseErrorHandler.java43
-rw-r--r--dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SdcRestClientUtils.java85
-rw-r--r--dcaedt_catalog/asdc/src/test/org/onap/sdc/dcae/utils/NormalizersTest.java51
-rw-r--r--dcaedt_catalog/commons/pom.xml135
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Action.java11
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Actions.java201
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Future.java35
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/FutureHandler.java13
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Futures.java257
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Http.java107
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/JSONHttpMessageConverter.java100
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ListBuilder.java59
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/MapBuilder.java80
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Neo.java54
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxies.java37
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.java144
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.pojo145
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ProxyBuilder.java92
-rw-r--r--dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Recycler.java329
-rw-r--r--dcaedt_catalog/db/pom.xml149
-rw-r--r--dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java1980
-rw-r--r--dcaedt_catalog/db/src/main/resources/tosca-schema.yaml1231
-rw-r--r--dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml37
-rw-r--r--dcaedt_catalog/pom.xml29
-rw-r--r--dcaedt_catalog/service/README.md4
-rw-r--r--dcaedt_catalog/service/pom.xml80
-rw-r--r--dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogController.java594
-rw-r--r--dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogEngine.java26
-rw-r--r--dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogError.java20
-rw-r--r--dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogMessage.java27
-rw-r--r--dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogRequest.java27
-rw-r--r--dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogResponse.java39
-rw-r--r--dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementRequest.java6
-rw-r--r--dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementsLookup.java49
-rw-r--r--dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ItemsRequest.java16
-rw-r--r--dcaedt_catalog/service/src/main/resources/log4j.properties15
66 files changed, 11090 insertions, 0 deletions
diff --git a/dcaedt_catalog/.gitignore b/dcaedt_catalog/.gitignore
new file mode 100644
index 0000000..a6609e3
--- /dev/null
+++ b/dcaedt_catalog/.gitignore
@@ -0,0 +1,12 @@
+.classpath
+.settings/
+.project
+.DS_Store
+
+#a bit dangerous, make sure we do not have any package named target ..
+**/target/
+pom.xml.tag
+pom.xml.releaseBackup
+pom.xml.versionsBackup
+pom.xml.next
+release.properties
diff --git a/dcaedt_catalog/api/pom.xml b/dcaedt_catalog/api/pom.xml
new file mode 100644
index 0000000..234f12f
--- /dev/null
+++ b/dcaedt_catalog/api/pom.xml
@@ -0,0 +1,198 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Catalog</artifactId>
+ <version>1806.0.1-SNAPSHOT</version>
+ </parent>
+ <artifactId>DCAE-DT-Catalog-API</artifactId>
+ <packaging>jar</packaging>
+ <name>DCAE DT Catalog API</name>
+
+ <build>
+ <sourceDirectory>src/main/java</sourceDirectory>
+ <testSourceDirectory>src/test/java</testSourceDirectory>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>3.1</version>
+ <configuration>
+ <source>1.8</source>
+ <target>1.8</target>
+ <encoding>${project.build.sourceEncoding}</encoding>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.10</version>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/deps</outputDirectory>
+ <overWriteReleases>false</overWriteReleases>
+ <overWriteSnapshots>false</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>buildnumber-maven-plugin</artifactId>
+ <version>1.4</version>
+ <executions>
+ <execution>
+ <phase>validate</phase>
+ <goals>
+ <goal>create</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <doCheck>false</doCheck>
+ <doUpdate>false</doUpdate>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.1</version>
+ <configuration>
+ <archive>
+ <manifest>
+ <addDefaultImplementationEntries>true</addDefaultImplementationEntries>
+ </manifest>
+ <manifestEntries>
+ <Implementation-Build>${buildNumber}</Implementation-Build>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ </plugin>
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>3.0.2</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.6</version>
+ <configuration>
+ <descriptorRefs>
+ <descriptorRef>jar-with-dependencies</descriptorRef>
+ </descriptorRefs>
+ <archive>
+ <manifest>
+ <mainClass>org.onap.sdc.dcae.catalog.test.Cataloged</mainClass>
+ </manifest>
+ <manifestEntries>
+ <Implementation-Build>${buildNumber}</Implementation-Build>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ <!-- <executions> <execution> <id>make-assembly</id> this is used for
+ inheritance merges <phase>package</phase> bind to the packaging phase <goals>
+ <goal>single</goal> </goals> </execution> </executions> -->
+ </plugin>
+ </plugins>
+ </build>
+ <repositories>
+ <repository>
+ <snapshots>
+ <enabled>false</enabled>
+ </snapshots>
+ <id>jcenter</id>
+ <name>Bintray JCenter</name>
+ <url>http://repo1.maven.org/maven2/</url>
+ </repository>
+ </repositories>
+ <dependencies>
+ <dependency>
+ <groupId>commons-jxpath</groupId>
+ <artifactId>commons-jxpath</artifactId>
+ <version>1.3</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
+ <version>3.5</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpasyncclient</artifactId>
+ <version>4.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>2.4</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>1.3</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ <version>20160810</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Catalog-Commons</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Catalog-ASDC</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Validator-Checker</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.12</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-core</artifactId>
+ <version>1.10.19</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <!-- use 2.8.0 for Java 7 projects -->
+ <version>3.8.0</version>
+ <scope>test</scope>
+ </dependency>
+
+
+ </dependencies>
+</project>
diff --git a/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/Catalog.java b/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/Catalog.java
new file mode 100644
index 0000000..b73bb09
--- /dev/null
+++ b/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/Catalog.java
@@ -0,0 +1,440 @@
+package org.onap.sdc.dcae.catalog;
+
+import java.net.URI;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.LinkedList;
+import java.util.HashMap;
+import java.util.EnumSet;
+
+import org.json.JSONObject;
+import org.onap.sdc.dcae.catalog.commons.Action;
+import org.onap.sdc.dcae.catalog.commons.Future;
+import org.onap.sdc.dcae.catalog.commons.Futures;
+import org.onap.sdc.dcae.catalog.commons.Proxies;
+
+
+import org.json.JSONArray;
+
+/*
+ *
+ */
+public interface Catalog {
+
+
+ public abstract URI getUri();
+
+ public abstract String namespace();
+
+ public abstract boolean same(Catalog theCatalog);
+
+ public abstract <T> T proxy(JSONObject theData, Class<T> theType);
+
+
+ /* Base class for all Catalog objects.
+ */
+ public static interface Element<T extends Element<T>> {
+
+ /**
+ * provide a typed 'self' reference
+ */
+ public default T self() { return (T)this; }
+
+ /**
+ */
+ public default Class<T> selfClass() {
+ return (Class<T>)getClass().getInterfaces()[0];
+ }
+
+ /* */
+ public Catalog catalog();
+
+ /**
+ */
+ public String id();
+
+ /**
+ * Direct access to the underlying JSON object.
+ * Warning: Modifications to the JSON object are reflected in the Element.
+ */
+ public JSONObject data();
+
+ /**
+ * Provides the labels of the artifacts (we use labels to type/classify the
+ * neo4j artifacts, nodes and edges.
+ * Currently not all queries retrieve the labels.
+ */
+ public String[] labels();
+
+ /* Allows for typed deep exploration of the backing JSON data structure
+ * <pre>
+ * {@code
+ * element("type", Type.class);
+ * }
+ * </pre>
+ *
+ * @arg theName name of a JSON entry ; It must map another JSONObject.
+ * @arg theType the expected wrapping catalog artifact type
+ * @return the JSON entry wrapped in the specified type
+ */
+ public default <E extends Element<E>> E element(String theName, Class<E> theType) {
+ JSONObject elemData = data().optJSONObject(theName);
+ if (elemData == null)
+ return null;
+ else
+ return catalog().proxy(elemData, theType);
+ }
+
+ /* Similar to {@link #element(String,Class)} but for collection wrapping.
+ * Example:
+ * <pre>
+ * {@code
+ * element("nodes", Nodes.class);
+ * }
+ * </pre>
+ */
+ public default <E extends Elements> E elements(String theName, Class<E> theType) {
+ //throws ReflectiveOperationException {
+ JSONArray elemsData = data().optJSONArray(theName);
+ if (elemsData == null) {
+ return null;
+ }
+ else {
+ Class etype = Proxies.typeArgument(theType);
+ Elements elems = null;
+ try {
+ elems = theType.newInstance();
+ }
+ catch (ReflectiveOperationException rox) {
+ throw new RuntimeException("Failed to instantiate " + theType, rox);
+ }
+
+ try{
+ for (Iterator i = elemsData.iterator(); i.hasNext();) {
+ JSONObject elemData = (JSONObject)i.next();
+ elems.add(catalog().proxy(elemData, etype));
+ }
+ }
+ catch(Exception e){
+ throw new RuntimeException("Failed to fetch json data ", e);
+ }
+ return (E)elems;
+ }
+ }
+
+ /*
+ */
+ public default boolean same(Element theElem) {
+ return this.catalog().same(theElem.catalog()) &&
+ this.id().equals(theElem.id());
+ }
+ }
+
+ /*
+ * Base class for all collections of elements.
+ */
+ public static class Elements<T extends Element>
+ extends LinkedList<T> {
+
+ public String toString() {
+ StringBuilder sb = new StringBuilder("[");
+ for (Element el: this) {
+ sb.append(el.selfClass().getSimpleName())
+ .append("(")
+ .append(el.data())
+ .append("),");
+ }
+ sb.append("]");
+ return sb.toString();
+ }
+ }
+
+ /*
+ * We need this contraption in order to store a mix of Folders and CatalogItem
+ * instances (Elements in self is not good because it is defined around a
+ * type variable so we cannot use reflection to determine the type at runtime
+ * - generics are resolved compile time)
+ */
+ public static class Mixels extends Elements<Element> {
+ }
+
+ /*
+ */
+ public static interface Item<T extends Item<T>> extends Element<T> {
+
+ public String name();
+
+ public String description();
+
+ /* catalog item native identifier */
+ public String itemId();
+
+ /* similar to @ItemAction#withModels
+ */
+ default public Future<Templates> models() {
+ Templates t = elements("models", Templates.class);
+ if (t != null)
+ return Futures.succeededFuture(t);
+ else
+ return Futures.advance(catalog().item(itemId())
+ .withModels()
+ .execute(),
+ item -> (Templates)item.elements("models", Templates.class));
+ }
+
+ /* similar to @ItemAction#withAnnotations
+ */
+ default public Future<Annotations> annotations() {
+ Annotations a = elements("annotations", Annotations.class);
+ if (a != null)
+ return Futures.succeededFuture(a);
+ else
+ return Futures.advance(catalog().item(itemId())
+ .withAnnotations()
+ .execute(),
+ item -> (Annotations)item.elements("annotations", Annotations.class));
+ }
+ }
+
+ /*
+ * Collection of catalog items.
+ */
+ public static class Items extends Elements<Item> {
+ }
+
+ /*
+ */
+ public static interface Folder extends Element<Folder> {
+
+ public String name();
+
+ public String description();
+
+ public String itemId();
+
+ /* the namespace is immutable */
+ public default String namespace() {
+ return catalog().namespace();
+ }
+
+ /*
+ */
+ default public Future<Items> items() {
+ Items i = elements("items", Items.class);
+ if (i != null)
+ return Futures.succeededFuture(i);
+ else
+ return Futures.advance(catalog().folder(itemId())
+ .withItems()
+ .execute(),
+ folder -> (Items)folder.elements("items", Items.class));
+ }
+
+ /*
+ */
+ default public Future<Folders> parts() {
+ Folders f = elements("parts", Folders.class);
+ if (f != null)
+ return Futures.succeededFuture(f);
+ else
+ return Futures.advance(catalog().folder(itemId())
+ .withParts()
+ .execute(),
+ folder -> (Folders)folder.elements("parts", Folders.class));
+ }
+
+ /*
+ */
+ public Future<Folders> partof();
+
+ }
+
+
+ public static class Folders extends Elements<Folder> {
+ }
+
+ //no predefined properties here
+ public static interface Annotation extends Element<Annotation> {
+
+ public default String namespace() {
+ return catalog().namespace();
+ }
+ }
+
+ public static class Annotations extends Elements<Annotation> {
+ }
+
+ /**
+ * A TOSCA teamplate.
+ * When a deep loading method is used to obtain a Template its collection
+ * of inputs and nodes will be immediately available (and 'cached' within
+ * the backing JSON object). It can be retrieved through a call to
+ * {@link Element#elements(String,Class)} as in:
+ * elements("inputs", Inputs.class)
+ * or
+ * elements("nodes", Nodes.class)
+ *
+ * The same result will be obtained through one of the methods of the
+ * navigation interface, {@link #inputs()} or {@link #nodes()}; in this case
+ * the result does not become part of the backing JSONObject.
+ */
+ public static interface Template extends Element<Template> {
+
+ public String name();
+
+ public String version();
+
+ public String description();
+
+ }
+
+ /**
+ * Collection of {@link Catalog.Template template} instances.
+ */
+ public static class Templates extends Elements<Template> {
+ }
+
+
+ /**
+ * A TOSCA type declaration.
+ */
+ public interface Type extends Element<Type> {
+
+ public String name();
+
+ /**
+ * Allows navigation to the parent {@link Catalog.Type type}, if any.
+ */
+ public Future<Type> derivedfrom();
+
+ }
+
+ /**
+ * Collection of {@link Catalog.Type type} instances.
+ */
+ public static class Types extends Elements<Type> {
+ }
+
+
+ public static interface TemplateAction extends Action<Template> {
+
+ public TemplateAction withInputs();
+
+ public TemplateAction withOutputs();
+
+ public TemplateAction withNodes();
+
+ public TemplateAction withNodeProperties();
+
+ public TemplateAction withNodeRequirements();
+
+ public TemplateAction withNodePropertiesAssignments();
+
+ public TemplateAction withNodeCapabilities();
+
+ public TemplateAction withNodeCapabilityProperties();
+
+ public TemplateAction withNodeCapabilityPropertyAssignments();
+
+ public TemplateAction withPolicies();
+
+ public TemplateAction withPolicyProperties();
+
+ public TemplateAction withPolicyPropertiesAssignments();
+
+ @Override
+ public Future<Template> execute();
+
+ }
+
+ /*
+ */
+ public static interface TypeAction extends Action<Type> {
+
+ public TypeAction withHierarchy();
+
+ public TypeAction withRequirements();
+
+ public TypeAction withCapabilities();
+
+ @Override
+ public Future<Type> execute();
+
+ }
+
+ /*
+ */
+ public static interface FolderAction extends Action<Folder> {
+
+ public FolderAction withAnnotations();
+
+ public FolderAction withAnnotations(String theSelector);
+
+ public FolderAction withItems();
+
+ public FolderAction withItemAnnotations();
+
+ public FolderAction withItemAnnotations(String theSelector);
+
+ public FolderAction withItemModels();
+
+ public FolderAction withParts();
+
+ public FolderAction withPartAnnotations();
+
+ public FolderAction withPartAnnotations(String theSelector);
+
+ @Override
+ public Future<Folder> execute();
+ }
+
+ /*
+ */
+ public static interface ItemAction<T extends Item> extends Action<T> {
+
+ public ItemAction<T> withModels();
+
+ public ItemAction<T> withAnnotations();
+
+ @Override
+ public Future<T> execute();
+
+ }
+
+ /**
+ */
+ public abstract Future<Folders> roots();
+
+ /**
+ */
+ public abstract Future<Folders> rootsByLabel(String theLabel);
+
+ /**
+ */
+ public abstract Future<Mixels> lookup(JSONObject theSelector);
+
+ public abstract Future<Mixels> lookup(String theAnnotation, JSONObject theSelector);
+
+ /**
+ */
+ public abstract FolderAction folder(String theFolderId);
+
+ /**
+ */
+ public abstract <T extends Item> ItemAction<T> item(String theItemId);
+
+ /**
+ */
+ public abstract TemplateAction template(String theTemplateId);
+
+ /**
+ */
+ public abstract TypeAction type(String theNamespace, String theTypeName);
+
+
+
+}
diff --git a/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCCatalog.java b/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCCatalog.java
new file mode 100644
index 0000000..e08f3a6
--- /dev/null
+++ b/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCCatalog.java
@@ -0,0 +1,1227 @@
+package org.onap.sdc.dcae.catalog.asdc;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.jxpath.JXPathContext;
+import org.apache.commons.jxpath.JXPathNotFoundException;
+import org.apache.commons.lang3.StringUtils;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.dcae.catalog.Catalog;
+import org.onap.sdc.dcae.catalog.commons.*;
+import org.onap.sdc.dcae.checker.*;
+
+import java.io.*;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.*;
+import java.util.function.BiFunction;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+
+public class ASDCCatalog implements Catalog {
+
+ private
+ static final String JXPATH_NOT_FOUND_EXCEPTION = "JXPathNotFoundException {}";
+ private
+ static final String OCCURRENCES = "occurrences";
+ private
+ static final String TOPOLOGY_TEMPLATE_NODE_TEMPLATES = "/topology_template/node_templates";
+ private
+ static final String NODES_NAME = "/nodes[name='";
+ private
+ static final String ITEM_ID = "itemId";
+ private
+ static final String LABELS = "labels";
+ private
+ static final String ARTIFACT_URL = "artifactURL";
+ private
+ static final String CAPABILITY = "capability";
+ private
+ static final String DATABASE = "Database";
+ private
+ static final String COLLECTOR = "Collector";
+ private
+ static final String MICROSERVICE = "Microservice";
+ private
+ static final String ANALYTICS = "Analytics";
+ private
+ static final String POLICY = "Policy";
+ private
+ static final String SOURCE = "Source";
+ private
+ static final String UTILITY = "Utility";
+ private
+ static final String NAME = "name";
+ private
+ static final String ID = "id";
+ private
+ static final String ARTIFACT_NAME = "artifactName";
+ private
+ static final String DESCRIPTION = "description";
+ private
+ static final String MODELS = "models";
+ private
+ static final String ARTIFACTS = "artifacts";
+ private
+ static final String ITEMS = "items";
+ private
+ static final String PROPERTIES = "']/properties";
+ private
+ static final String TOPOLOGY_TEMPLATE_NODE_TEMPLATES1 = "/topology_template/node_templates/";
+ private
+ static final String PROPERTIES_NAME = "']/properties[name='";
+ private
+ static final String CAPABILITIES = "']/capabilities";
+ private
+ static final String CAPABILITIES_NAME = "']/capabilities[name='";
+
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ private ASDC asdc;
+
+ private JSONObject folders = new JSONObject();
+ private String[] folderFields = new String[] {ID, ITEM_ID, NAME};
+
+ private ProxyBuilder proxies;
+ private Map<Target, JXPathContext> contexts = new HashMap<Target, JXPathContext>();
+
+ // resource and its catalog
+ private Map<UUID, org.onap.sdc.dcae.checker.Catalog> catalogs = new HashMap<UUID, org.onap.sdc.dcae.checker.Catalog>();
+
+ public ASDCCatalog(URI theURI) {
+
+ this.asdc = new ASDC();
+ this.asdc.setUri(theURI);
+
+ initFolders();
+
+ this.proxies = new ProxyBuilder().withConverter(v -> v == null ? null : UUID.fromString(v.toString()), UUID.class)
+ .withExtensions(
+ new ImmutableMap.Builder<String, BiFunction<Proxy, Object[], Object>>().put("data", (proxy, args) -> proxy.data())
+ .build()).withContext(new ImmutableMap.Builder<String, Object>().put("catalog", this).build());
+ }
+
+ private void initFolders() {
+
+ JSONArray labels = new JSONArray();
+ labels.put("Folder");
+ labels.put("DCAE");
+ labels.put("Superportfolio"); // for CCD compatibility
+
+ folders.put(DATABASE, new JSONObject().put(NAME, DATABASE).put(ID, "dcae_database")
+ .put(ITEM_ID, DATABASE).put(LABELS, labels));
+ folders.put(COLLECTOR, new JSONObject().put(NAME, COLLECTOR).put(ID, "dcae_collector")
+ .put(ITEM_ID, COLLECTOR).put(LABELS, labels));
+ folders.put(MICROSERVICE, new JSONObject().put(NAME, MICROSERVICE).put(ID, "dcae_microservice")
+ .put(ITEM_ID, MICROSERVICE).put(LABELS, labels));
+ folders.put(ANALYTICS, new JSONObject().put(NAME, ANALYTICS).put(ID, "dcae_analytics")
+ .put(ITEM_ID, ANALYTICS).put(LABELS, labels));
+ folders.put(POLICY, new JSONObject().put(NAME, POLICY).put(ID, "dcae_policy").put(ITEM_ID, POLICY)
+ .put(LABELS, labels));
+ folders.put(SOURCE, new JSONObject().put(NAME, SOURCE).put(ID, "dcae_source").put(ITEM_ID, SOURCE)
+ .put(LABELS, labels));
+ folders.put(UTILITY, new JSONObject().put(NAME, UTILITY).put(ID, "dcae_utility")
+ .put(ITEM_ID, UTILITY).put(LABELS, labels));
+ }
+
+ public URI getUri() {
+ return this.asdc.getUri();
+ }
+
+ public String namespace() {
+ return "asdc";
+ }
+
+ public boolean same(Catalog theCatalog) {
+ return true;
+ }
+
+ public <T> T proxy(JSONObject theData, Class<T> theType) {
+ return proxies.build(theData, theType);
+ }
+
+ /** */
+ public Future<Folders> roots() {
+
+ Folders roots = new Folders();
+ for (Iterator fi = folders.keys(); fi.hasNext();) {
+ roots.add(proxies.build(folders.getJSONObject((String) fi.next()), Folder.class));
+ }
+ return Futures.succeededFuture(roots);
+ }
+
+ /** */
+ public Future<Folders> rootsByLabel(String theLabel) {
+
+ Folders roots = new Folders();
+ for (Iterator fi = folders.keys(); fi.hasNext();) {
+ JSONObject folder = folders.getJSONObject((String) fi.next());
+ JSONArray labels = folder.getJSONArray(LABELS);
+
+ for (int i = 0; i < labels.length(); i++) {
+ if (labels.get(i).equals(theLabel)) {
+ roots.add(proxies.build(folder, Folder.class));
+ }
+ }
+ }
+ return Futures.succeededFuture(roots);
+ }
+
+ /** */
+ public Future<Mixels> lookup(JSONObject theSelector) {
+ return Futures.succeededFuture(new Mixels());
+ }
+
+ public Future<Mixels> lookup(String theAnnotation, JSONObject theSelector) {
+ return Futures.succeededFuture(new Mixels());
+ }
+
+ /** */
+ public ItemAction item(String theItemId) {
+ return new ResourceAction(UUID.fromString(theItemId));
+ }
+
+ /** */
+ public FolderAction folder(String theFolderId) {
+ return new FolderAction(theFolderId);
+ }
+
+ public TemplateAction template(String theId) {
+ return new TemplateAction(theId);
+ }
+
+ public TypeAction type(String theItemId, String theName) {
+ return new TypeAction(UUID.fromString(theItemId), theName);
+ }
+
+ protected static String resolveTargetName(Target theTarget) {
+ return (String) ((Map) ((Map) theTarget.getTarget()).get("metadata")).get("template_name");
+ }
+
+ protected Object resolve(Target theTarget, String thePath) {
+ try {
+ return contexts.get(theTarget).getValue(thePath);
+ } catch (JXPathNotFoundException pnfx) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "JXPathNotFoundException {}", pnfx);
+ return null;
+ }
+ }
+
+ // covers common TOSCA pattern of single entry maps
+ public Map.Entry<String, Map> toEntry(Object theValue) {
+ return (Map.Entry<String, Map>) ((Map) theValue).entrySet().iterator().next();
+ }
+
+ protected Map selectEntries(Map theOriginal, String... theKeys) {
+ Arrays.sort(theKeys);
+ Map selection = ((Set<Map.Entry>) theOriginal.entrySet()).stream()
+ .filter(e -> Arrays.binarySearch(theKeys, e.getKey().toString()) >= 0)
+ .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
+ return selection;
+ }
+
+ protected Map evictEntries(Map theOriginal, String... theKeys) {
+ Arrays.sort(theKeys);
+ Map selection = ((Set<Map.Entry>) theOriginal.entrySet()).stream()
+ .filter(e -> Arrays.binarySearch(theKeys, e.getKey().toString()) < 0)
+ .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
+ return selection;
+ }
+
+ protected MapBuilder renderEntry(Map.Entry theEntry, String... theKeys) {
+ MapBuilder out = new MapBuilder();
+ out.put(NAME, theEntry.getKey());
+
+ for (String key : theKeys) {
+ out.put(key, ((Map) theEntry.getValue()).get(key));
+ }
+ return out;
+ }
+
+ protected <T> Stream<T> stream(Iterator<T> theSource) {
+ return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theSource,
+ Spliterator.NONNULL | Spliterator.DISTINCT | Spliterator.IMMUTABLE), false);
+ }
+
+ private JSONArray selectModels(JSONArray theArtifacts) {
+ JSONArray models = new JSONArray();
+ if (theArtifacts == null) {
+ return models;
+ }
+
+ for (int i = 0; i < theArtifacts.length(); i++) {
+ JSONObject artifact = theArtifacts.getJSONObject(i);
+ String name = artifact.optString(ARTIFACT_NAME);
+ if (name != null && StringUtils.containsIgnoreCase(name, "template")) {
+ models.put(new JSONObject().putOpt(NAME, artifact.optString(ARTIFACT_NAME))
+ .putOpt("version", artifact.optString("artifactVersion"))
+ .putOpt(DESCRIPTION, artifact.optString("artifactType"))
+ .putOpt(ID, artifact.optString(ARTIFACT_URL))
+ .putOpt(ITEM_ID, artifact.optString(ARTIFACT_URL)));
+ }
+ }
+ return models;
+ }
+
+ private JSONObject patchResource(JSONObject theResource) {
+
+ theResource.remove("resources");
+ theResource.putOpt(ID, theResource.opt("uuid"));
+ theResource.putOpt(ITEM_ID, theResource.opt("uuid"));
+
+ return theResource;
+ }
+
+ private static void dumpTargets(String theDirName, Collection<Target> theTargets) {
+ try {
+ File targetDir = new File(theDirName);
+ if (!targetDir.exists() && !targetDir.mkdirs()) {
+ throw new IllegalStateException("Couldn't create dir: " + theDirName);
+ }
+ for (Target t : theTargets) {
+ FileWriter dump = new FileWriter(new File(theDirName, t.getName()));
+ IOUtils.copy(t.open(), dump);
+ dump.close();
+ }
+ } catch (IOException iox) {
+ debugLogger.log(LogLevel.DEBUG,"ASDCCatalog", "IOException {}", iox);
+ }
+ }
+
+ private static URI asURI(String theValue) {
+ try {
+ return new URI(theValue);
+ } catch (URISyntaxException urisx) {
+ throw new IllegalArgumentException("Invalid URI", urisx);
+ }
+ }
+
+ private static UUID asUUID(String theValue) {
+ return UUID.fromString(theValue);
+ }
+
+ private org.onap.sdc.dcae.checker.Catalog getCatalog(UUID theResourceId) {
+ return this.catalogs.get(theResourceId);
+ }
+
+ private String getArtifactVersion(JSONObject theData) {
+ return theData.getString("artifactVersion");
+ }
+
+ private String getArtifactName(JSONObject theData) {
+ return theData.getString(ARTIFACT_NAME);
+ }
+
+ private String getArtifactURL(JSONObject theData) {
+ return theData.getString(ARTIFACT_URL);
+ }
+
+ private URI getArtifactURI(JSONObject theData) {
+ return asURI(theData.getString(ARTIFACT_URL));
+ }
+
+ /** */
+ public class ResourceAction implements Catalog.ItemAction<Resource> {
+
+ private UUID iid;
+ private boolean doModels;
+
+ ResourceAction(UUID theItemId) {
+ this.iid = theItemId;
+ }
+
+ public ResourceAction withModels() {
+ this.doModels = true;
+ return this;
+ }
+
+ public ResourceAction withAnnotations() {
+ return this;
+ }
+
+ @Override
+ public Future<Resource> execute() {
+
+ return Futures.advance(asdc.getResource(this.iid, JSONObject.class), resourceData -> {
+ if (doModels) {
+ resourceData.put(MODELS, selectModels(resourceData.optJSONArray(ARTIFACTS)));
+ }
+ return proxies.build(patchResource(resourceData), Resource.class);
+ });
+ }
+
+ protected Future<JSONObject> executeRaw() {
+
+ return Futures.advance(asdc.getResource(this.iid, JSONObject.class), resourceData -> {
+ if (doModels) {
+ resourceData.put(MODELS, selectModels(resourceData.optJSONArray(ARTIFACTS)));
+ }
+ return resourceData;
+ }, resourceError -> new RuntimeException("Failed to retrieve item " + this.iid, resourceError));
+ }
+ }
+
+ public class FolderAction implements Catalog.FolderAction {
+
+ private boolean doItemModels;
+ private String folderName;
+
+ // use the id/UUID of the folder ??
+ private FolderAction(String theFolderName) {
+ this.folderName = theFolderName;
+ }
+
+ public FolderAction withAnnotations() {
+ return this;
+ }
+
+ public FolderAction withAnnotations(String theSelector) {
+ return this;
+ }
+
+ public FolderAction withItems() {
+ return this;
+ }
+
+ public FolderAction withItemAnnotations() {
+ return this;
+ }
+
+ public FolderAction withItemAnnotations(String theSelector) {
+ return this;
+ }
+
+ public FolderAction withItemModels() {
+ doItemModels = true;
+ return this;
+ }
+
+ public FolderAction withParts() {
+ return this;
+ }
+
+ public FolderAction withPartAnnotations() {
+ return this;
+ }
+
+ public FolderAction withPartAnnotations(String theSelector) {
+ return this;
+ }
+
+ @Override
+ public Future<Folder> execute() {
+
+ JSONObject folder = folders.optJSONObject(this.folderName);
+ if (folder == null) {
+ return Futures.failedFuture(new RuntimeException("No such folder " + this.folderName));
+ }
+
+ final JSONObject folderView = new JSONObject(folder, folderFields);
+
+ return Futures.advance(asdc.getResources(JSONArray.class, "DCAE Component", this.folderName),
+ resourcesData -> {
+
+ Actions.CompoundAction<Resource> itemsAction = new Actions.BasicCompoundAction<Resource>();
+ for (int i = 0; i < resourcesData.length(); i++) {
+ JSONObject resource = resourcesData.getJSONObject(i);
+
+ if (doItemModels) {
+ itemsAction
+ .addAction(new ResourceAction(asUUID(resource.getString("uuid"))).withModels());
+ } else {
+ folderView.append(ITEMS, patchResource(resource));
+ }
+ }
+
+ try {
+ List<Resource> items = itemsAction.execute().waitForResult();
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Number of DCAE item for : {} is {}", this.folderName, items.size());
+
+ for (Resource res : filterLatestVersion(items)) {
+ folderView.append(ITEMS, patchResource(res.data()));
+ }
+ } catch (Exception x) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Exception {}", x);
+ throw new RuntimeException("Failed to retrieve folder items", x);
+ }
+
+ return proxies.build(folderView, Folder.class);
+ }, resourcesError -> new RuntimeException("Failed to retrieve resources", resourcesError));
+ }
+
+ public Collection<Resource> filterLatestVersion(Collection<Resource> items) throws IllegalArgumentException {
+ if (items == null) {
+ throw new IllegalArgumentException("null is not acceptable as a list of items");
+ }
+ Map<UUID, Resource> itemsMap = new HashMap<UUID, Resource>(items.size());
+ for (Resource r : items) {
+ if (itemsMap.containsKey(r.invariantUUID()) && isNewerVersion(itemsMap, r)) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Avoiding adding item {} since it has a advanced version already", r.toString());
+ continue;
+ }
+ itemsMap.put(r.invariantUUID(), r);
+ }
+ return itemsMap.values();
+ }
+
+ private boolean isNewerVersion(Map<UUID, Resource> itemsMap, Resource r) {
+ return Float.valueOf(itemsMap.get(r.invariantUUID()).version()) > Float.valueOf(r.version());
+ }
+
+ }
+
+ /** */
+ public class TemplateAction implements Catalog.TemplateAction {
+
+ private String artifactId;
+ private Target target;
+ private org.onap.sdc.dcae.checker.Catalog catalog;
+ private JXPathContext ctx = JXPathContext.newContext(new HashMap());
+
+ private boolean doNodes, doNodeProperties, doNodePropertiesAssignments, doNodeRequirements, doNodeCapabilities,
+ doNodeCapabilityProperties, doNodeCapabilityPropertyAssignments;
+
+ protected TemplateAction(Target theTarget) {
+ this.target = theTarget;
+ }
+
+ /*
+ * expected to be the relative url provided by asdc for the template
+ * artifact
+ */
+ protected TemplateAction(String theArtifactId) {
+ this.artifactId = theArtifactId;
+ }
+
+ public TemplateAction withInputs() {
+ return this;
+ }
+
+ public TemplateAction withOutputs() {
+ return this;
+ }
+
+ public TemplateAction withNodes() {
+ this.doNodes = true;
+ return this;
+ }
+
+ protected TemplateAction doNodes() {
+ if (!this.doNodes) {
+ return this;
+ }
+
+ Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES);
+ if (nodes == null) {
+ return this;
+ }
+
+ ctx.setValue("/nodes",
+ nodes.entrySet().stream()
+ .map(nodeEntry -> new MapBuilder().put(NAME, ((Map.Entry) nodeEntry).getKey())
+ .put(DESCRIPTION, this.artifactId)
+ .putAll(selectEntries((Map) ((Map.Entry) nodeEntry).getValue(), "type")).build())
+ .collect(Collectors.toList()));
+
+ return this;
+ }
+
+ // pre-requisite: a call to 'withNodes'
+ public TemplateAction withNodeProperties() {
+ this.doNodeProperties = true;
+ return this;
+ }
+
+ protected TemplateAction doNodeProperties() {
+ if (!this.doNodeProperties) {
+ return this;
+ }
+
+ Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES);
+ if (nodes == null) {
+ return this;
+ }
+
+ nodes.entrySet().stream().forEach(node -> ctx.setValue(
+ NODES_NAME + ((Map.Entry) node).getKey() + PROPERTIES,
+ stream(catalog.facets(Construct.Node, Facet.properties,
+ ((Map) ((Map.Entry) node).getValue()).get("type").toString()))
+ .map(propEntry -> new MapBuilder().put(NAME, propEntry.getKey())
+ .putAll((Map) propEntry.getValue()).build())
+ .collect(Collectors.toList())));
+
+ return this;
+ }
+
+ // pre-requisite: a call to 'withNodesProperties'
+ public TemplateAction withNodePropertiesAssignments() {
+ this.doNodePropertiesAssignments = true;
+ return this;
+ }
+
+ protected TemplateAction doNodePropertiesAssignments() {
+ if (!this.doNodePropertiesAssignments) {
+ return this;
+ }
+
+ Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES);
+ if (nodes == null) {
+ return this;
+ }
+
+ nodes.entrySet().stream().forEach(node -> {
+ List nodeProps = null;
+ try {
+ nodeProps = (List) ctx.getValue(NODES_NAME + ((Map.Entry) node).getKey() + PROPERTIES);
+ } catch (JXPathNotFoundException pnfx) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), JXPATH_NOT_FOUND_EXCEPTION, pnfx);
+ return;
+ }
+
+ nodeProps.stream().forEach(prop -> {
+ // pick from
+ String propPath = TOPOLOGY_TEMPLATE_NODE_TEMPLATES1 + ((Map.Entry) node).getKey()
+ + "/properties/" + ((Map) prop).get(NAME);
+ Object propValue = resolve(this.target, propPath);
+ // to conform with the db based api we should analyze the
+ // value for function calls
+ // dump at ..
+ propPath = NODES_NAME + ((Map.Entry) node).getKey() + PROPERTIES_NAME
+ + ((Map) prop).get(NAME) + "']";
+ if (propValue != null) {
+ ctx.setValue(propPath + "/assignment",
+ new ImmutableMap.Builder().put("value", propValue).build());
+ }
+ });
+ });
+
+ return this;
+ }
+
+ protected Map renderRequirementDefinition(Map.Entry theReq) {
+ Map def = (Map) theReq.getValue();
+ return new MapBuilder().put(NAME, theReq.getKey())
+ // capability must be present
+ .put(CAPABILITY,
+ new MapBuilder().put(NAME, def.get(CAPABILITY))
+ .put(ID, this.target.getName() + "/" + def.get(CAPABILITY)).build())
+ .putAll(evictEntries(def, CAPABILITY)).build();
+ }
+
+ // TODO: see how this comes out of neo and match it
+ protected Map renderRequirementAssignment(Map.Entry theReq) {
+ Map def = (Map) theReq.getValue();
+ return new MapBuilder().put(NAME, theReq.getKey())
+ // capability must be present
+ .put(CAPABILITY,
+ new MapBuilder().put(NAME, def.get(CAPABILITY))
+ // we provide an id only if the capability
+ // points to a type
+ .putOpt(ID,
+ catalog.hasType(Construct.Capability, (String) def.get(CAPABILITY))
+ ? (this.target.getName() + "/" + def.get(CAPABILITY)) : null)
+ .build())
+ .putAll(evictEntries(def, CAPABILITY)).build();
+ }
+
+ public TemplateAction withNodeRequirements() {
+ this.doNodeRequirements = true;
+ return this;
+ }
+
+ TemplateAction doNodeRequirements() {
+ if (!this.doNodeRequirements) {
+ return this;
+ }
+
+ // requirements come first from the type and then can be further
+ // refined by their assignment within the
+ // node template
+ Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES);
+ if (nodes == null) {
+ return this;
+ }
+
+ // type
+ nodes.entrySet().stream()
+ .forEach(
+ node -> ctx
+ .setValue(
+ NODES_NAME
+ + ((Map.Entry) node)
+ .getKey()
+ + "']/requirements",
+ StreamSupport
+ .stream(Spliterators.spliteratorUnknownSize(
+ catalog.requirements(((Map) ((Map.Entry) node).getValue())
+ .get("type").toString()),
+ Spliterator.NONNULL | Spliterator.DISTINCT
+ | Spliterator.IMMUTABLE),
+ false)
+ .map((Map.Entry reqEntry) -> renderRequirementDefinition(reqEntry))
+ .collect(Collectors.toList())));
+
+ // merge assignments on top of definitions
+ nodes.entrySet().stream().forEach(node -> {
+ List nodeReqsAssigns = (List) resolve(this.target,
+ TOPOLOGY_TEMPLATE_NODE_TEMPLATES1 + ((Map.Entry) node).getKey() + "/requirements");
+ if (nodeReqsAssigns == null) {
+ return;
+ }
+ nodeReqsAssigns.stream().forEach(req -> {
+ Map.Entry reqAssign = toEntry(req);
+ catalog.mergeDefinitions((Map) ctx.getValue(NODES_NAME + ((Map.Entry) node).getKey()
+ + "']/requirements[name='" + reqAssign.getKey() + "']"),
+ renderRequirementAssignment(reqAssign));
+ });
+ });
+
+ return this;
+ }
+
+ public TemplateAction withNodeCapabilities() {
+ this.doNodeCapabilities = true;
+ return this;
+ }
+
+ protected Map renderCapabilityDefinition(Map.Entry theCap) {
+ Map def = (Map) theCap.getValue();
+ return new MapBuilder().put(NAME, theCap.getKey())
+ .put("type",
+ new MapBuilder().put(NAME, def.get("type"))
+ .put(ID, this.target.getName() + "/" + def.get("type")).build())
+ .putAll(evictEntries(def, "properties", "type")).build();
+ }
+
+ TemplateAction doNodeCapabilities() {
+ if (!this.doNodeCapabilities) {
+ return this;
+ }
+
+ Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES);
+ if (nodes == null) {
+ return this;
+ }
+
+ // collect capabilities through the node type hierarchy
+
+ // we evict the properties from the node type capability declaration
+ // (when declaring a capability with the
+ // node type some re-definition of capability properties can take
+ // place).
+ nodes.entrySet().stream()
+ .forEach(node -> ctx.setValue(NODES_NAME + ((Map.Entry) node).getKey() + CAPABILITIES,
+
+ stream(catalog.facets(Construct.Node, Facet.capabilities,
+ ((Map) ((Map.Entry) node).getValue()).get("type").toString()))
+ .map((Map.Entry capEntry) -> renderCapabilityDefinition(capEntry))
+ .collect(Collectors.toList())));
+
+ return this;
+ }
+
+ public TemplateAction withNodeCapabilityProperties() {
+ this.doNodeCapabilityProperties = true;
+ return this;
+ }
+
+ TemplateAction doNodeCapabilityProperties() {
+
+ if (!this.doNodeCapabilityProperties) {
+ return this;
+ }
+
+ Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES);
+ if (nodes == null) {
+ return this;
+ }
+
+ // pick up all the properties from the capability type hierarchy
+ // definition
+ nodes.entrySet().stream().forEach(node -> {
+ List nodeCapabilities = (List) ctx
+ .getValue(NODES_NAME + ((Map.Entry) node).getKey() + CAPABILITIES);
+ if (nodeCapabilities == null) {
+ return;
+ }
+
+ // collect properties from the capability type hierarchy
+ nodeCapabilities.stream().forEach(capability -> {
+ List capabilityProperties = StreamSupport
+ .stream(Spliterators.spliteratorUnknownSize(
+ catalog.facets(Construct.Capability, Facet.properties,
+ ((Map)((Map)capability).get("type")).get(NAME).toString()),
+ Spliterator.NONNULL | Spliterator.DISTINCT | Spliterator.IMMUTABLE), false)
+ .map((Map.Entry capEntry) -> new MapBuilder().put(NAME, capEntry.getKey())
+ .putAll((Map) capEntry.getValue()).build())
+ .collect(Collectors.toList());
+
+ if (!capabilityProperties.isEmpty()) {
+ ctx.setValue(NODES_NAME + ((Map.Entry) node).getKey() + CAPABILITIES_NAME
+ + ((Map) capability).get(NAME) + PROPERTIES, capabilityProperties);
+ }
+ });
+
+ // and go over the node type (hierarchy) and pick up any
+ // re-definitions from there.
+ StreamSupport
+ .stream(Spliterators.spliteratorUnknownSize(
+ catalog.facets(Construct.Node, Facet.capabilities,
+ ((Map) ((Map.Entry) node).getValue()).get("type").toString()),
+ Spliterator.NONNULL | Spliterator.DISTINCT | Spliterator.IMMUTABLE), false)
+ .forEach((Map.Entry capability) -> {
+ // for each capability property that has some node
+ // type level re-definition
+ Map properties = (Map) ((Map) capability.getValue()).get("properties");
+ if (properties == null) {
+ return;
+ }
+
+ properties.entrySet().stream().forEach(property -> {
+ String propertyLoc = NODES_NAME + ((Map.Entry) node).getKey()
+ + CAPABILITIES_NAME + ((Map) capability).get(NAME)
+ + PROPERTIES_NAME + ((Map.Entry) property).getKey() + "']";
+ ctx.setValue(propertyLoc, catalog.mergeDefinitions((Map) ctx.getValue(propertyLoc),
+ (Map) ((Map.Entry) property).getValue()));
+ });
+ });
+ });
+
+ return this;
+ }
+
+ public TemplateAction withNodeCapabilityPropertyAssignments() {
+ this.doNodeCapabilityPropertyAssignments = true;
+ return this;
+ }
+
+ TemplateAction doNodeCapabilityPropertyAssignments() {
+ if (!this.doNodeCapabilityPropertyAssignments) {
+ return this;
+ }
+
+ // this is a wasteful: we go over all declared
+ // nodes/capabilities/properties and check if there is an assigned
+ // value in the actual template. It is optimal to approach the
+ // problem from the other direction: go over delared
+ // assignments and set them in the output structure ..
+
+ List nodes = null;
+ try {
+ nodes = (List) ctx.getValue("/nodes");
+ } catch (JXPathNotFoundException pnfx) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), JXPATH_NOT_FOUND_EXCEPTION, pnfx);
+ return this;
+ }
+
+ nodes.stream().forEach(node -> {
+ List capabilities = (List) ctx.getValue(NODES_NAME + ((Map) node).get(NAME) + CAPABILITIES);
+ if (capabilities == null) {
+ return;
+ }
+
+ capabilities.stream().forEach(capability -> {
+ List properties = null;
+ try {
+ properties = (List) ctx.getValue(NODES_NAME + ((Map) node).get(NAME)
+ + CAPABILITIES_NAME + ((Map) capability).get(NAME) + PROPERTIES);
+ } catch (JXPathNotFoundException pnfx) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), JXPATH_NOT_FOUND_EXCEPTION, pnfx);
+ return;
+ }
+
+ properties.stream().forEach(property -> {
+ String location = NODES_NAME + ((Map) node).get(NAME) + CAPABILITIES_NAME
+ + ((Map) capability).get(NAME) + PROPERTIES_NAME + ((Map) property).get(NAME)
+ + "']/assignment";
+
+ // pick the value from the original
+ try {
+ Object assignment = resolve(this.target,
+ TOPOLOGY_TEMPLATE_NODE_TEMPLATES1 + ((Map) node).get(NAME) + "/capabilities/"
+ + ((Map) capability).get(NAME) + "/properties/"
+ + ((Map) property).get(NAME));
+ if (assignment != null) {
+ ctx.setValue(location, new ImmutableMap.Builder().put("value", assignment).build());
+ }
+ } catch (JXPathNotFoundException pnfx) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), JXPATH_NOT_FOUND_EXCEPTION, pnfx);
+ // it's ok, no assignment
+ }
+ });
+ });
+ });
+
+ return this;
+ }
+
+ public TemplateAction withPolicies() {
+ return this;
+ }
+
+ public TemplateAction withPolicyProperties() {
+ return this;
+ }
+
+ public TemplateAction withPolicyPropertiesAssignments() {
+ return this;
+ }
+
+ public Future<Template> execute() {
+
+ if (this.target == null) {
+
+ String[] parts = this.artifactId.split("/");
+ if (parts.length != 8) {
+ return Futures
+ .failedFuture(new Exception("Unexpected artifact id for template " + this.artifactId));
+ }
+
+ UUID resourceId = asUUID(parts[5]);
+ this.catalog = ASDCCatalog.this.catalogs.get(resourceId);
+
+ // if we find a catalog for this resource we have to figure out
+ // if it contains the required target ..
+
+ try {
+ JSONObject resource = new ResourceAction(resourceId).executeRaw().waitForResult();
+
+ Checker checker = new Checker();
+ TargetLocator locator = new ASDCLocator(resource.getJSONArray(ARTIFACTS),
+ ASDCCatalog.this.catalogs.get(resourceId));
+ checker.setTargetLocator(locator);
+
+ Target template = locator.resolve("template");
+ if (template == null) {
+ return Futures.failedFuture(new Exception("Failed to locate template in " + resource));
+ }
+
+ checker.check(template);
+
+ for (Target t : checker.targets()) {
+ if (t.getReport().hasErrors()) {
+ dumpTargets(resourceId.toString(), checker.targets());
+ return Futures.failedFuture(new Exception("Failed template validation: " + t.getReport()));
+ }
+ }
+
+ this.target = template;
+ this.catalog = checker.catalog();
+ ASDCCatalog.this.catalogs.put(resourceId, this.catalog);
+ // we should only be doing this if we discovered an update
+ // (by checking timestampts). Actually, we should
+ // only do the artifact fetching if we detect an update
+ ASDCCatalog.this.contexts.put(template, JXPathContext.newContext(template.getTarget()));
+ } catch (Exception x) {
+ return Futures.failedFuture(x);
+ }
+ }
+
+ this.doNodes().doNodeProperties().doNodePropertiesAssignments().doNodeRequirements().doNodeCapabilities()
+ .doNodeCapabilityProperties().doNodeCapabilityPropertyAssignments();
+
+ JSONObject pack = new JSONObject((Map) ctx.getContextBean()).put(NAME, this.target.getName().toString())
+ .put(ID, this.target.getLocation().toString())
+ .put(ITEM_ID, this.target.getLocation().toString());
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), pack.toString(2));
+
+ return Futures.succeededFuture(proxies.build(pack, Template.class));
+ }
+ }
+
+ public class TypeAction implements Catalog.TypeAction {
+
+ private String name;
+ private UUID resourceId;
+ private JXPathContext ctx;
+
+ private boolean doHierarchy = false, doRequirements = false, doCapabilities = false;
+
+ private TypeAction(UUID theResourceId, /* Construct theConstruct, */ String theName) {
+ this.resourceId = theResourceId;
+ this.name = theName;
+ }
+
+ public TypeAction withHierarchy() {
+ this.doHierarchy = true;
+ return this;
+ }
+
+ TypeAction doHierarchy(org.onap.sdc.dcae.checker.Catalog theCatalog) {
+ if (!this.doHierarchy) {
+ return this;
+ }
+
+ ctx.setValue("/hierarchy",
+ stream(theCatalog.hierarchy(Construct.Node, this.name)).skip(1) // skip
+ // self
+ .map((Map.Entry type) -> new MapBuilder()
+ .put(NAME, type.getKey()).put(ID, resourceId + "/" + type.getKey())
+ .putOpt(DESCRIPTION, ((Map) type.getValue()).get(DESCRIPTION)).build())
+ // renderEntry((Map.Entry)type,
+ // "description").build())
+ .collect(Collectors.toList()));
+ return this;
+ }
+
+ public TypeAction withRequirements() {
+ this.doRequirements = true;
+ return this;
+ }
+
+ TypeAction doRequirements(org.onap.sdc.dcae.checker.Catalog theCatalog) {
+ if (!this.doRequirements) {
+ return this;
+ }
+
+ ctx.setValue("requirements", stream(theCatalog.requirements(this.name)).map((Map.Entry req) -> {
+ String capability = (String) ((Map) req.getValue()).get(CAPABILITY),
+ node = (String) ((Map) req.getValue()).get(CAPABILITY);
+ return new MapBuilder().put(NAME, req.getKey()).put(ID, resourceId + "/" + req.getKey())
+ .put(OCCURRENCES, ((Map) req.getValue()).get(OCCURRENCES))
+ .put(CAPABILITY,
+ new MapBuilder().put(NAME, capability)
+ // if the capability points to a
+ // capability type then encode
+ // the type reference, else it is a name
+ // (within a node type)
+ .put(ID,
+ getCatalog(resourceId).hasType(Construct.Capability, capability)
+ ? (resourceId + "/" + capability) : capability.toString())
+ .build())
+ .put("node", new MapBuilder().putOpt(NAME, node).putOpt(ID, node == null ? null
+ : (resourceId + "/" + node)).buildOpt())
+ .put("relationship", ((Map) req.getValue()).get("relationship"))
+ // renderEntry((Map.Entry)requirement, "occurrences",
+ // "node", "capability", "relationship")
+ .build();
+ }).collect(Collectors.toList()));
+
+ return this;
+ }
+
+ public TypeAction withCapabilities() {
+ this.doCapabilities = true;
+ return this;
+ }
+
+ TypeAction doCapabilities(org.onap.sdc.dcae.checker.Catalog theCatalog) {
+ if (!this.doCapabilities) {
+ return this;
+ }
+
+ ctx.setValue("capabilities",
+ stream(theCatalog
+ .facets(Construct.Node, Facet.capabilities,
+ this.name))
+ .map((Map.Entry capability) -> new MapBuilder()
+ .put(NAME, capability.getKey()).put("type",
+ new MapBuilder()
+ .put(NAME, ((Map) capability.getValue())
+ .get("type"))
+ .put(ID,
+ resourceId + "/"
+ + ((Map) capability.getValue())
+ .get("type"))
+ .build())
+ .put(OCCURRENCES,
+ ((Map) capability.getValue()).get(OCCURRENCES))
+ .putOpt("validSourceTypes",
+ ((Map) capability.getValue()).get("validSourceTypes"))
+ .build()
+ // renderEntry((Map.Entry)capability,
+ // "occurrences",
+ // "validSourceTypes")
+ ).collect(Collectors.toList()));
+ return this;
+ }
+
+ public Future<Type> execute() {
+ org.onap.sdc.dcae.checker.Catalog catalog = ASDCCatalog.this.catalogs.get(this.resourceId);
+ if (catalog == null) {
+ return Futures.failedFuture(new Exception("No catalog available for resource " + this.resourceId
+ + ". You might want to fetch the model first."));
+ }
+
+ if (!catalog.hasType(Construct.Node, this.name)) {
+ return Futures.failedFuture(
+ new Exception("No " + this.name + " type in catalog for resource " + this.resourceId));
+ }
+
+ this.ctx = JXPathContext
+ .newContext(new MapBuilder().put(NAME, this.name).put(ID, this.resourceId + "/" + this.name)
+ .put(ITEM_ID, this.resourceId + "/" + this.name).build());
+
+ this.doHierarchy(catalog).doRequirements(catalog).doCapabilities(catalog);
+
+ JSONObject pack = new JSONObject((Map) this.ctx.getContextBean());
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), pack.toString(2));
+
+ return Futures.succeededFuture(proxies.build((Map) ctx.getContextBean(), Type.class));
+ }
+ }
+
+ public static interface Resource extends Catalog.Item<Resource> {
+
+ @Override
+ @Proxy.DataMap(map = "uuid")
+ public String id();
+
+ public UUID uuid();
+
+ public UUID invariantUUID();
+
+ public String category();
+
+ public String subCategory();
+
+ public String lastUpdaterFullName();
+
+ public String version();
+
+ @Proxy.DataMap(proxy = true, elementType = Artifact.class)
+ public Artifacts artifacts();
+
+ }
+
+ public static class Resources extends Elements<Resource> {
+ }
+
+ public static interface Artifact extends Catalog.Element<Artifact> {
+
+ @Proxy.DataMap(map = ARTIFACT_NAME)
+ public String name();
+
+ @Proxy.DataMap(map = "artifactType")
+ public String type();
+
+ @Proxy.DataMap(map = "artifactDescription")
+ public String description();
+
+ @Proxy.DataMap(map = "artifactUUID")
+ public UUID uuid();
+
+ @Proxy.DataMap(map = "artifactVersion")
+ public int version();
+
+ }
+
+ public static class Artifacts extends Elements<Artifact> {
+ }
+
+ public class ASDCLocator implements TargetLocator {
+
+ private JSONArray artifacts;
+ private org.onap.sdc.dcae.checker.Catalog catalog;
+
+ private ASDCLocator(JSONArray theArtifacts, org.onap.sdc.dcae.checker.Catalog theCatalog) {
+ this.artifacts = theArtifacts;
+ this.catalog = theCatalog;
+ }
+
+ public boolean addSearchPath(URI theURI) {
+ return false;
+ }
+
+ public boolean addSearchPath(String thePath) {
+ return false;
+ }
+
+ public Iterable<URI> searchPaths() {
+ return Collections.emptySet();
+ }
+
+ public Target resolve(String theName) {
+ JSONObject targetArtifact = null;
+
+ for (int i = 0; i < this.artifacts.length(); i++) {
+ JSONObject artifact = this.artifacts.getJSONObject(i);
+ String artifactName = artifact.getString(ARTIFACT_NAME);
+ if (StringUtils.containsIgnoreCase(artifactName, theName)) {
+ targetArtifact = artifact;
+ }
+ }
+
+ if (targetArtifact == null) {
+ return null;
+ }
+
+ ASDCTarget target = null;
+ if (this.catalog != null) {
+ // this is the caching!!
+ target = (ASDCTarget) this.catalog.getTarget(ASDCCatalog.this.getArtifactURI(targetArtifact));
+ if (target != null && target.getVersion().equals(ASDCCatalog.this.getArtifactVersion(targetArtifact))) {
+ return target;
+ }
+ }
+
+ return new ASDCTarget(targetArtifact);
+ }
+ }
+
+ public class ASDCTarget extends Target {
+
+ private String content;
+ private JSONObject artifact;
+
+ private ASDCTarget(JSONObject theArtifact) {
+ super(ASDCCatalog.this.getArtifactName(theArtifact), ASDCCatalog.this.getArtifactURI(theArtifact));
+ this.artifact = theArtifact;
+ }
+
+ // here is a chance for caching within the catalog! Do not go fetch the
+ // artifact if it has not been changed since the
+ // last fetch.
+
+ @Override
+ public Reader open() throws IOException {
+ if (this.content == null) {
+ try {
+ this.content = ASDCCatalog.this.asdc
+ .fetch(ASDCCatalog.this.getArtifactURL(this.artifact), String.class).waitForResult();
+ } catch (Exception x) {
+ throw new IOException("Failed to load " + ASDCCatalog.this.getArtifactURL(this.artifact), x);
+ }
+ }
+
+ // should return immediately a reader blocked until content
+ // available .. hard to handle errors
+ return new StringReader(this.content);
+ }
+
+ public String getVersion() {
+ return ASDCCatalog.this.getArtifactVersion(this.artifact);
+ }
+
+ }
+
+ public static void main(String[] theArgs) throws Exception {
+
+ ASDCCatalog catalog = new ASDCCatalog(new URI(theArgs[0]));
+
+ Folder f = catalog.folder(theArgs[1]).withItems().withItemModels().execute().waitForResult();
+
+ debugLogger.log(LogLevel.DEBUG, ASDCCatalog.class.getName(), "folder: {}", f.data());
+
+ Resources items = f.elements(ITEMS, Resources.class);
+ if (items != null) {
+ for (Resource item : items) {
+ debugLogger.log(LogLevel.DEBUG, ASDCCatalog.class.getName(), "\titem: {} : {}",item.name(), item.data());
+ Templates templates = item.elements(MODELS, Templates.class);
+ if (templates != null) {
+ for (Template t : templates) {
+ Template ft = catalog.template(t.id()).withNodes().withNodeProperties()
+ .withNodePropertiesAssignments().execute().waitForResult();
+
+ debugLogger.log(LogLevel.DEBUG, ASDCCatalog.class.getName(), "template data: {}", ft.data());
+ }
+ }
+ }
+ }
+ }
+
+}
diff --git a/dcaedt_catalog/api/src/main/resources/log4j.properties b/dcaedt_catalog/api/src/main/resources/log4j.properties
new file mode 100644
index 0000000..6e159e5
--- /dev/null
+++ b/dcaedt_catalog/api/src/main/resources/log4j.properties
@@ -0,0 +1,8 @@
+log4j.rootLogger=INFO, stdout
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%5p [%c] %m%n
+
+log4j.logger.org.apache.http=DEBUG
+log4j.logger.org.apache.http.wire=ERROR \ No newline at end of file
diff --git a/dcaedt_catalog/api/src/test/java/org/onap/sdc/dcae/catalog/ASDCCatalogTest.java b/dcaedt_catalog/api/src/test/java/org/onap/sdc/dcae/catalog/ASDCCatalogTest.java
new file mode 100644
index 0000000..fcd92f0
--- /dev/null
+++ b/dcaedt_catalog/api/src/test/java/org/onap/sdc/dcae/catalog/ASDCCatalogTest.java
@@ -0,0 +1,88 @@
+package org.onap.sdc.dcae.catalog;
+
+import static org.assertj.core.api.Assertions.*;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.UUID;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.onap.sdc.dcae.catalog.asdc.ASDCCatalog;
+import org.onap.sdc.dcae.catalog.asdc.ASDCCatalog.FolderAction;
+import org.onap.sdc.dcae.catalog.asdc.ASDCCatalog.Resource;
+
+import static org.mockito.Mockito.*;
+
+
+public class ASDCCatalogTest {
+
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
+ private static FolderAction getTarget() {
+ ASDCCatalog catalog = mock(ASDCCatalog.class);
+ when(catalog.folder("test")).thenCallRealMethod();
+ FolderAction target = catalog.folder("test");
+ return target;
+ }
+
+ @Test
+ public void filterLatestVersion_null_throwIllegalArgumentException() {
+ // arrange
+ FolderAction target = getTarget();
+ // assert
+ thrown.expect(IllegalArgumentException.class);
+ // act
+ target.filterLatestVersion(null);
+ }
+
+ @Test
+ public void filterLatestVersion_emptyItemsList_emptyItemsList() throws URISyntaxException {
+ // arrange
+ FolderAction target = getTarget();
+ // act
+ Collection<Resource> result = target.filterLatestVersion(new ArrayList<>());
+ // assert
+ assertThat(result).isEmpty();
+ }
+
+ @Test
+ public void filterLatestVersion_itemWithTwoVersions_itemWithLatestVersion() {
+ // arrange
+ FolderAction target = getTarget();
+
+ UUID invariantUUID = UUID.randomUUID();
+ Resource r1v1 = mock(Resource.class);
+ Resource r1v2 = mock(Resource.class);
+ when(r1v1.invariantUUID()).thenReturn(invariantUUID);
+ when(r1v2.invariantUUID()).thenReturn(invariantUUID);
+ when(r1v1.version()).thenReturn("1.0");
+ when(r1v2.version()).thenReturn("2.0");
+ ArrayList<Resource> listItemWithTwoVersions = new ArrayList<Resource>(Arrays.asList(r1v1, r1v2));
+ // act
+ Collection<Resource> result = target.filterLatestVersion(listItemWithTwoVersions);
+ // assert
+ assertThat(result).containsExactly(r1v2);
+ }
+
+ @Test
+ public void filterLatestVersion_2distinctItems_2distinctItems() {
+ // arrange
+ FolderAction target = getTarget();
+
+ Resource r1 = mock(Resource.class);
+ Resource r2 = mock(Resource.class);
+ when(r1.invariantUUID()).thenReturn(UUID.randomUUID());
+ when(r2.invariantUUID()).thenReturn(UUID.randomUUID());
+ ArrayList<Resource> listOfTwoDistinctItems = new ArrayList<Resource>(Arrays.asList(r1, r2));
+ // act
+ Collection<Resource> result = target.filterLatestVersion(listOfTwoDistinctItems);
+ // assert
+ assertThat(result).containsExactlyInAnyOrder(r1, r2);
+ }
+
+}
diff --git a/dcaedt_catalog/asdc/pom.xml b/dcaedt_catalog/asdc/pom.xml
new file mode 100644
index 0000000..14323fe
--- /dev/null
+++ b/dcaedt_catalog/asdc/pom.xml
@@ -0,0 +1,139 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Catalog</artifactId>
+ <version>1806.0.1-SNAPSHOT</version>
+ </parent>
+ <artifactId>DCAE-DT-Catalog-ASDC</artifactId>
+ <packaging>jar</packaging>
+ <name>DCAE DT ASDC</name>
+ <build>
+ <sourceDirectory>src/main/java</sourceDirectory>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>3.1</version>
+ <configuration>
+ <source>1.8</source>
+ <target>1.8</target>
+ <encoding>${project.build.sourceEncoding}</encoding>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.10</version>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/deps</outputDirectory>
+ <overWriteReleases>false</overWriteReleases>
+ <overWriteSnapshots>false</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <!-- <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>buildnumber-maven-plugin</artifactId>
+ <version>1.4</version> <executions> <execution> <phase>validate</phase> <goals>
+ <goal>create</goal> </goals> </execution> </executions> <configuration> <doCheck>false</doCheck>
+ <doUpdate>false</doUpdate> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId> <version>2.1</version> <configuration>
+ <archive> <manifest> <addDefaultImplementationEntries>true</addDefaultImplementationEntries>
+ </manifest> <manifestEntries> <Implementation-Build>${buildNumber}</Implementation-Build>
+ </manifestEntries> </archive> </configuration> </plugin> -->
+ <plugin>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-maven-plugin</artifactId>
+ <version>1.3.3.RELEASE</version>
+ <configuration>
+ <mainClass>org.onap.sdc.dcae.catalog.asdc.ASDCEngine</mainClass>
+ </configuration>
+ <executions>
+ <execution>
+ <goals>
+ <goal>repackage</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <!-- <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId>
+ <version>2.4.4</version> </dependency> -->
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-core</artifactId>
+ <version>4.3.5.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-web</artifactId>
+ <version>4.3.5.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-web</artifactId>
+ <version>1.4.1.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-webmvc</artifactId>
+ <version>4.3.5.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-autoconfigure</artifactId>
+ <version>1.4.1.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
+ <version>3.5</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>1.3</version>
+ </dependency>
+ <dependency>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Catalog-Commons</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Validator-Checker</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.code.gson</groupId>
+ <artifactId>gson</artifactId>
+ <version>2.7</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDC.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDC.java
new file mode 100644
index 0000000..66afab1
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDC.java
@@ -0,0 +1,1101 @@
+package org.onap.sdc.dcae.catalog.asdc;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+import java.util.Collections;
+
+import java.util.function.UnaryOperator;
+
+import javax.annotation.PostConstruct;
+
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.enums.ArtifactGroupType;
+import org.onap.sdc.dcae.enums.ArtifactType;
+import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed;
+import org.springframework.http.MediaType;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpRequest;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.RequestEntity;
+import org.springframework.http.ResponseEntity;
+import org.springframework.http.client.AsyncClientHttpRequestExecution;
+import org.springframework.http.client.AsyncClientHttpRequestInterceptor;
+import org.springframework.http.client.ClientHttpResponse;
+import org.springframework.web.client.AsyncRestTemplate;
+import org.springframework.web.client.RestClientException;
+import org.springframework.web.client.HttpClientErrorException;
+import org.springframework.http.converter.HttpMessageConverter;
+
+import org.springframework.util.Base64Utils;
+//import org.springframework.util.DigestUtils;
+import org.apache.commons.codec.digest.DigestUtils;
+
+import org.springframework.stereotype.Component;
+import org.springframework.context.annotation.Scope;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import org.springframework.util.concurrent.ListenableFuture;
+import org.springframework.util.concurrent.ListenableFutureCallback;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
+
+import org.json.JSONObject;
+import org.onap.sdc.dcae.catalog.commons.Action;
+import org.onap.sdc.dcae.catalog.commons.Future;
+import org.onap.sdc.dcae.catalog.commons.Futures;
+import org.onap.sdc.dcae.catalog.commons.JSONHttpMessageConverter;
+import org.onap.sdc.dcae.composition.util.DcaeBeConstants;
+import org.onap.sdc.dcae.composition.util.SystemProperties;
+import org.json.JSONArray;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+
+
+@Component("asdc")
+@Scope("singleton")
+//@ConfigurationProperties(prefix="asdc")
+public class ASDC {
+
+ public static enum AssetType {
+ resource,
+ service,
+ product
+ }
+
+// public static enum ArtifactType {
+// DCAE_TOSCA,
+// DCAE_JSON,
+// DCAE_POLICY,
+// DCAE_DOC,
+// DCAE_EVENT,
+// DCAE_INVENTORY_TOSCA,
+// DCAE_INVENTORY_JSON,
+// DCAE_INVENTORY_POLICY,
+// DCAE_INVENTORY_DOC,
+// DCAE_INVENTORY_BLUEPRINT,
+// DCAE_INVENTORY_EVENT,
+// HEAT,
+// HEAT_VOL,
+// HEAT_NET,
+// HEAT_NESTED,
+// HEAT_ARTIFACT,
+// HEAT_ENV,
+// OTHER
+// }
+
+// public static enum ArtifactGroupType {
+// DEPLOYMENT,
+// INFORMATIONAL
+// }
+
+ public static enum LifecycleState {
+ Checkin,
+ Checkout,
+ Certify,
+ undocheckout
+ }
+
+
+// @Retention(RetentionPolicy.RUNTIME)
+// @Target(ElementType.METHOD)
+// public @interface Mandatory {
+// }
+
+ protected static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ protected static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ @Autowired
+ private SystemProperties systemProperties;
+
+ private URI rootUri;
+ private String rootPath = "/sdc/v1/catalog/";
+ private String user,
+ passwd;
+ private String instanceId;
+
+
+ public void setUri(URI theUri) {
+ //theUri = URI.create(systemProperties.getProperties().getProperty(SystemProperties.ASDC_CATALOG_URL));
+ String userInfo = theUri.getUserInfo();
+ if (userInfo != null) {
+ String[] userInfoParts = userInfo.split(":");
+ setUser(userInfoParts[0]);
+ if (userInfoParts.length > 1)
+ setPassword(userInfoParts[1]);
+ }
+ String fragment = theUri.getFragment();
+ if (fragment == null)
+ throw new IllegalArgumentException("The URI must contain a fragment specification, to be used as ASDC instance id");
+ setInstanceId(fragment);
+
+ try {
+ this.rootUri = new URI(theUri.getScheme(), null, theUri.getHost(), theUri.getPort(), theUri.getPath(), theUri.getQuery(), null);
+ }
+ catch (URISyntaxException urix) {
+ throw new IllegalArgumentException("Invalid uri", urix);
+ }
+ }
+
+ public URI getUri() {
+ return this.rootUri;
+ }
+
+ public void setUser(String theUser) {
+ this.user = theUser;
+ }
+
+ public String getUser() {
+ return this.user;
+ }
+
+ public void setPassword(String thePassword) {
+ this.passwd = thePassword;
+ }
+
+ public String getPassword() {
+ return this.passwd;
+ }
+
+ public void setInstanceId(String theId) {
+ this.instanceId = theId;
+ }
+
+ public String getInstanceId() {
+ return this.instanceId;
+ }
+
+ public void setRootPath(String thePath) {
+ this.rootPath = systemProperties.getProperties().getProperty(DcaeBeConstants.Config.ASDC_ROOTPATH);
+ }
+
+ public String getRootPath() {
+ return systemProperties.getProperties().getProperty(DcaeBeConstants.Config.ASDC_ROOTPATH);
+ }
+
+ @Scheduled(fixedRateString = "${beans.context.scripts.updateCheckFrequency?:60000}")
+ public void checkForUpdates() {
+ }
+
+ @PostConstruct
+ public void initASDC() {
+ }
+
+ public <T> Future<T> getResources(Class<T> theType) {
+ return getAssets(AssetType.resource, theType);
+ }
+
+ public Future<JSONArray> getResources() {
+ return getAssets(AssetType.resource, JSONArray.class);
+ }
+
+ public <T> Future<T> getResources(Class<T> theType, String theCategory, String theSubCategory) {
+ return getAssets(AssetType.resource, theType, theCategory, theSubCategory);
+ }
+
+ public Future<JSONArray> getResources(String category, String subCategory, String resourceType) {
+ return getAssets(AssetType.resource, JSONArray.class, category, subCategory, resourceType);
+ }
+
+ public <T> Future<T> getServices(Class<T> theType) {
+ return getAssets(AssetType.service, theType);
+ }
+
+ public Future<JSONArray> getServices() {
+ return getAssets(AssetType.service, JSONArray.class);
+ }
+
+ public <T> Future<T> getServices(Class<T> theType, String theCategory, String theSubCategory) {
+ return getAssets(AssetType.service, theType, theCategory, theSubCategory);
+ }
+
+ public Future<JSONArray> getServices(String theCategory, String theSubCategory) {
+ return getAssets(AssetType.service, JSONArray.class, theCategory, theSubCategory);
+ }
+
+ public <T> Future<T> getAssets(AssetType theAssetType, Class<T> theType) {
+ return fetch(refAssets(theAssetType), theType);
+ }
+
+ public <T> Action<T> getAssetsAction(AssetType theAssetType, Class<T> theType) {
+ return (() -> fetch(refAssets(theAssetType), theType));
+ }
+
+ public <T> Future<T> getAssets(AssetType theAssetType, Class<T> theType,
+ String theCategory, String theSubCategory) {
+ return getAssets(theAssetType, theType, theCategory, theSubCategory, null);
+ }
+
+ public <T> Future<T> getAssets(AssetType theAssetType, Class<T> theType,
+ String theCategory, String theSubCategory, String theResourceType) {
+ return fetch(refAssets(theAssetType) + filter(theCategory, theSubCategory, theResourceType), theType);
+ }
+
+ public <T> Action<T> getAssetsAction(AssetType theAssetType, Class<T> theType,
+ String theCategory, String theSubCategory, String theResourceType) {
+ return (() -> fetch(refAssets(theAssetType) + filter(theCategory, theSubCategory, theResourceType), theType));
+ }
+
+ protected String refAssets(AssetType theAssetType) {
+ return this.rootPath + theAssetType + "s/";
+ }
+
+ private String filter(String theCategory, String theSubCategory, String theResourceType) {
+ StringBuilder filter = null;
+ if (theCategory != null) {
+ filter = new StringBuilder();
+ filter.append("?category=")
+ .append(theCategory);
+ if (theSubCategory != null) {
+ filter.append("&subCategory=")
+ .append(theSubCategory);
+ if (theResourceType != null) {
+ filter.append("&resourceType=")
+ .append(theResourceType);
+ }
+ }
+ }
+ return filter == null ? "" : filter.toString();
+ }
+
+ protected String refAsset(AssetType theAssetType, UUID theId) {
+ return this.rootPath + theAssetType + "s/" + theId;
+ }
+
+ public <T> Future<T> getResource(UUID theId, Class<T> theType) {
+ return getAsset(AssetType.resource, theId, theType);
+ }
+
+ public Future<JSONObject> getResource(UUID theId) {
+ return getAsset(AssetType.resource, theId, JSONObject.class);
+ }
+
+ public Future<ResourceDetailed> getSDCResource(UUID theId) {
+ return getAsset(AssetType.resource, theId, ResourceDetailed.class);
+ }
+
+
+ public <T> Future<T> getService(UUID theId, Class<T> theType) {
+ return getAsset(AssetType.service, theId, theType);
+ }
+
+ public Future<JSONObject> getService(UUID theId) {
+ return getAsset(AssetType.service, theId, JSONObject.class);
+ }
+
+ public <T> Future<T> getAsset(AssetType theAssetType, UUID theId, Class<T> theType) {
+ return fetch(refAsset(theAssetType, theId) + "/metadata", theType);
+ }
+
+ public <T> Action<T> getAssetAction(AssetType theAssetType, UUID theId, Class<T> theType) {
+ return (() -> fetch(refAsset(theAssetType, theId) + "/metadata", theType));
+ }
+
+ public Future<byte[]> getResourceArchive(UUID theId) {
+ return getAssetArchive(AssetType.resource, theId);
+ }
+
+ public Future<byte[]> getServiceArchive(UUID theId) {
+ return getAssetArchive(AssetType.service, theId);
+ }
+
+ public Future<byte[]> getAssetArchive(AssetType theAssetType, UUID theId) {
+ return fetch(refAsset(theAssetType, theId) + "/toscaModel", byte[].class);
+ }
+
+ public Action<byte[]> getAssetArchiveAction(AssetType theAssetType, UUID theId) {
+ return (() -> fetch(refAsset(theAssetType, theId) + "/toscaModel", byte[].class));
+ }
+
+ public Future<JSONObject> checkinResource(UUID theId, String theUser, String theMessage) {
+ return cycleAsset(AssetType.resource, theId, LifecycleState.Checkin, theUser, theMessage);
+ }
+
+ public Future<JSONObject> checkinService(UUID theId, String theUser, String theMessage) {
+ return cycleAsset(AssetType.service, theId, LifecycleState.Checkin, theUser, theMessage);
+ }
+
+ public Future<JSONObject> checkoutResource(UUID theId, String theUser, String theMessage) {
+ return cycleAsset(AssetType.resource, theId, LifecycleState.Checkout, theUser, theMessage);
+ }
+
+ public Future<JSONObject> checkoutService(UUID theId, String theUser, String theMessage) {
+ return cycleAsset(AssetType.service, theId, LifecycleState.Checkout, theUser, theMessage);
+ }
+
+ public Future<JSONObject> certifyResource(UUID theId, String theUser, String theMessage) {
+ return cycleAsset(AssetType.resource, theId, LifecycleState.Certify, theUser, theMessage);
+ }
+
+ public Future<JSONObject> certifyService(UUID theId, String theUser, String theMessage) {
+ return cycleAsset(AssetType.service, theId, LifecycleState.Certify, theUser, theMessage);
+ }
+
+ /* Normally theMessage is mandatory (and we'd use put instead of putOpt) but .. not so for undocheckout ..
+ */
+ public Future<JSONObject> cycleAsset(AssetType theAssetType, UUID theId, LifecycleState theState,
+ String theUser, String theMessage) {
+ return post(refAsset(theAssetType, theId) + "/lifecycleState/" + theState,
+ (headers) -> prepareHeaders(headers)
+ .header("USER_ID", theUser),
+ new JSONObject().putOpt("userRemarks", theMessage));
+ }
+
+ protected String refAssetInstanceArtifact(AssetType theAssetType, UUID theAssetId, String theAssetInstance, UUID theArtifactId) {
+ return refAsset(theAssetType, theAssetId) + "/resourceInstances/" + theAssetInstance + "/artifacts" + (theArtifactId == null ? "" : ("/" + theArtifactId));
+ }
+
+ protected String refAssetArtifact(AssetType theAssetType, UUID theAssetId, UUID theArtifactId) {
+ return refAsset(theAssetType, theAssetId) + "/artifacts" + (theArtifactId == null ? "" : ("/" + theArtifactId));
+ }
+
+ public <T> Future<T> getResourceArtifact(UUID theAssetId, UUID theArtifactId, Class<T> theType) {
+ return getAssetArtifact(AssetType.resource, theAssetId, theArtifactId, theType);
+ }
+
+ public <T> Future<T> getServiceArtifact(UUID theAssetId, UUID theArtifactId, Class<T> theType) {
+ return getAssetArtifact(AssetType.service, theAssetId, theArtifactId, theType);
+ }
+
+ public <T> Future<T> getResourceInstanceArtifact(UUID theAssetId, UUID theArtifactId, String theInstance, Class<T> theType) {
+ return getAssetInstanceArtifact(AssetType.resource, theAssetId, theInstance, theArtifactId, theType);
+ }
+
+ public <T> Future<T> getServiceInstanceArtifact(UUID theAssetId, UUID theArtifactId, String theInstance, Class<T> theType) {
+ return getAssetInstanceArtifact(AssetType.service, theAssetId, theInstance, theArtifactId, theType);
+ }
+
+ public <T> Future<T> getAssetArtifact(AssetType theAssetType, UUID theAssetId, UUID theArtifactId, Class<T> theType) {
+ return fetch(refAssetArtifact(theAssetType, theAssetId, theArtifactId), theType);
+ }
+
+ public <T> Action<T> getAssetArtifactAction(AssetType theAssetType, UUID theAssetId, UUID theArtifactId, Class<T> theType) {
+ return (() -> fetch(refAssetArtifact(theAssetType, theAssetId, theArtifactId), theType));
+ }
+
+ public <T> Future<T> getAssetInstanceArtifact(AssetType theAssetType, UUID theAssetId, String theInstance, UUID theArtifactId, Class<T> theType) {
+ return fetch(refAssetInstanceArtifact(theAssetType, theAssetId, theInstance, theArtifactId), theType);
+ }
+
+ public <T> Action<T> getAssetInstanceArtifactAction(AssetType theAssetType, UUID theAssetId, String theInstance, UUID theArtifactId, Class<T> theType) {
+ return (() -> fetch(refAssetInstanceArtifact(theAssetType, theAssetId, theInstance, theArtifactId), theType));
+ }
+
+ public ArtifactUploadAction createResourceArtifact(UUID theAssetId) {
+ return createAssetArtifact(AssetType.resource, theAssetId);
+ }
+
+ public ArtifactUploadAction createServiceArtifact(UUID theAssetId) {
+ return createAssetArtifact(AssetType.service, theAssetId);
+ }
+
+ public ArtifactUploadAction createResourceInstanceArtifact(UUID theAssetId, String theInstance) {
+ return createAssetInstanceArtifact(AssetType.resource, theAssetId, theInstance);
+ }
+
+ public ArtifactUploadAction createServiceInstanceArtifact(UUID theAssetId, String theInstance) {
+ return createAssetInstanceArtifact(AssetType.service, theAssetId, theInstance);
+ }
+
+ public ArtifactUploadAction createAssetArtifact(AssetType theAssetType, UUID theAssetId) {
+ return new ArtifactUploadAction()
+ .ofAsset(theAssetType, theAssetId);
+ }
+
+ public ArtifactUploadAction createAssetInstanceArtifact(AssetType theAssetType, UUID theAssetId, String theInstance) {
+ return new ArtifactUploadAction()
+ .ofAssetInstance(theAssetType, theAssetId, theInstance);
+ }
+
+ public ArtifactUpdateAction updateResourceArtifact(UUID theAssetId, JSONObject theArtifactInfo) {
+ return updateAssetArtifact(AssetType.resource, theAssetId, theArtifactInfo);
+ }
+
+ public ArtifactUpdateAction updateResourceInstanceArtifact(UUID theAssetId, String theInstance, JSONObject theArtifactInfo) {
+ return updateAssetInstanceArtifact(AssetType.resource, theAssetId, theInstance, theArtifactInfo);
+ }
+
+ public ArtifactUpdateAction updateServiceArtifact(UUID theAssetId, JSONObject theArtifactInfo) {
+ return updateAssetArtifact(AssetType.service, theAssetId, theArtifactInfo);
+ }
+
+ public ArtifactUpdateAction updateServiceInstanceArtifact(UUID theAssetId, String theInstance, JSONObject theArtifactInfo) {
+ return updateAssetInstanceArtifact(AssetType.service, theAssetId, theInstance, theArtifactInfo);
+ }
+
+ public ArtifactUpdateAction updateAssetArtifact(AssetType theAssetType, UUID theAssetId, JSONObject theArtifactInfo) {
+ return new ArtifactUpdateAction(theArtifactInfo)
+ .ofAsset(theAssetType, theAssetId);
+ }
+
+ public ArtifactUpdateAction updateAssetInstanceArtifact(AssetType theAssetType, UUID theAssetId, String theInstance, JSONObject theArtifactInfo) {
+ return new ArtifactUpdateAction(theArtifactInfo)
+ .ofAssetInstance(theAssetType, theAssetId, theInstance);
+ }
+
+ public ArtifactDeleteAction deleteResourceArtifact(UUID theAssetId, UUID theArtifactId) {
+ return deleteAssetArtifact(AssetType.resource, theAssetId, theArtifactId);
+ }
+
+ public ArtifactDeleteAction deleteResourceInstanceArtifact(UUID theAssetId, String theInstance, UUID theArtifactId) {
+ return deleteAssetInstanceArtifact(AssetType.resource, theAssetId, theInstance, theArtifactId);
+ }
+
+ public ArtifactDeleteAction deleteServiceArtifact(UUID theAssetId, UUID theArtifactId) {
+ return deleteAssetArtifact(AssetType.service, theAssetId, theArtifactId);
+ }
+
+ public ArtifactDeleteAction deleteServiceInstanceArtifact(UUID theAssetId, String theInstance, UUID theArtifactId) {
+ return deleteAssetInstanceArtifact(AssetType.service, theAssetId, theInstance, theArtifactId);
+ }
+
+ public ArtifactDeleteAction deleteAssetArtifact(AssetType theAssetType, UUID theAssetId, UUID theArtifactId) {
+ return new ArtifactDeleteAction(theArtifactId)
+ .ofAsset(theAssetType, theAssetId);
+ }
+
+ public ArtifactDeleteAction deleteAssetInstanceArtifact(AssetType theAssetType, UUID theAssetId, String theInstance, UUID theArtifactId) {
+ return new ArtifactDeleteAction(theArtifactId)
+ .ofAssetInstance(theAssetType, theAssetId, theInstance);
+ }
+
+
+ public abstract class ASDCAction<A extends ASDCAction<A, T>, T> implements Action<T> {
+
+ protected JSONObject info; //info passed to asdc as request body
+ protected String operatorId; //id of the SDC user performing the action
+
+ protected ASDCAction(JSONObject theInfo) {
+ this.info = theInfo;
+ }
+
+ protected abstract A self();
+
+ protected ASDC asdc() {
+ return ASDC.this;
+ }
+
+ protected A withInfo(JSONObject theInfo) {
+ merge(this.info, theInfo);
+ return self();
+ }
+
+ public A with(String theProperty, Object theValue) {
+ info.put(theProperty, theValue);
+ return self();
+ }
+
+ public A withOperator(String theOperator) {
+ this.operatorId = theOperator;
+ return self();
+ }
+
+ protected abstract String[] mandatoryInfoEntries();
+
+ protected void checkOperatorId() {
+ if (this.operatorId == null) {
+ throw new IllegalStateException("No operator id was provided");
+ }
+ }
+
+ protected void checkMandatoryInfo() {
+ for (String field: mandatoryInfoEntries()) {
+ if (!info.has(field))
+ throw new IllegalStateException("No '" + field + "' was provided");
+ }
+ }
+
+ protected void checkMandatory() {
+ checkOperatorId();
+ checkMandatoryInfo();
+ }
+ }
+
+ protected static final String[] artifactMandatoryEntries = new String[] {};
+
+ /**
+ * We use teh same API to operate on artifacts attached to assets or to their instances
+ */
+ public abstract class ASDCArtifactAction<A extends ASDCArtifactAction<A>> extends ASDCAction<A, JSONObject> {
+
+ protected AssetType assetType;
+ protected UUID assetId;
+ protected String assetInstance;
+
+ protected ASDCArtifactAction(JSONObject theInfo) {
+ super(theInfo);
+ }
+
+ protected A ofAsset(AssetType theAssetType, UUID theAssetId) {
+ this.assetType = theAssetType;
+ this.assetId = theAssetId;
+ return self();
+ }
+
+ protected A ofAssetInstance(AssetType theAssetType, UUID theAssetId, String theInstance) {
+ this.assetType = theAssetType;
+ this.assetId = theAssetId;
+ this.assetInstance = theInstance;
+ return self();
+ }
+
+ protected String normalizeInstanceName(String theName) {
+ return StringUtils.removePattern(theName, "[ \\.\\-]+").toLowerCase();
+ }
+
+ protected String[] mandatoryInfoEntries() {
+ return ASDC.this.artifactMandatoryEntries;
+ }
+
+ protected String ref(UUID theArtifactId) {
+ return (this.assetInstance == null) ?
+ refAssetArtifact(this.assetType, this.assetId, theArtifactId) :
+ refAssetInstanceArtifact(this.assetType, this.assetId, normalizeInstanceName(this.assetInstance), theArtifactId);
+ }
+ }
+
+ protected static final String[] uploadMandatoryEntries = new String[] { "artifactName",
+ "artifactType",
+ "artifactGroupType",
+ "artifactLabel",
+ "description",
+ "payloadData" };
+
+ public class ArtifactUploadAction extends ASDCArtifactAction<ArtifactUploadAction> {
+
+ protected ArtifactUploadAction() {
+ super(new JSONObject());
+ }
+
+ protected ArtifactUploadAction self() {
+ return this;
+ }
+
+ public ArtifactUploadAction withContent(byte[] theContent) {
+ return with("payloadData", Base64Utils.encodeToString(theContent));
+ }
+
+ public ArtifactUploadAction withContent(File theFile) throws IOException {
+ return withContent(FileUtils.readFileToByteArray(theFile));
+ }
+
+ public ArtifactUploadAction withLabel(String theLabel) {
+ return with("artifactLabel", theLabel);
+ }
+
+ public ArtifactUploadAction withName(String theName) {
+ return with("artifactName", theName);
+ }
+
+ public ArtifactUploadAction withDisplayName(String theName) {
+ return with("artifactDisplayName", theName);
+ }
+
+ public ArtifactUploadAction withType(ArtifactType theType) {
+ return with("artifactType", theType.toString());
+ }
+
+ public ArtifactUploadAction withGroupType(ArtifactGroupType theGroupType) {
+ return with("artifactGroupType", theGroupType.toString());
+ }
+
+ public ArtifactUploadAction withDescription(String theDescription) {
+ return with("description", theDescription);
+ }
+
+ protected String[] mandatoryInfoEntries() {
+ return ASDC.this.uploadMandatoryEntries;
+ }
+
+ public Future<JSONObject> execute() {
+ checkMandatory();
+ return ASDC.this.post(ref(null),
+ (headers) -> prepareHeaders(headers)
+ .header("USER_ID", this.operatorId),
+ this.info);
+ }
+ }
+
+ protected static final String[] updateMandatoryEntries = new String[] { "artifactName",
+ "artifactType",
+ "artifactGroupType",
+ "artifactLabel",
+ "description",
+ "payloadData" };
+
+ /**
+ * In its current form the update relies on a previous artifact retrieval. One cannot build an update from scratch.
+ * The label, tye and group type must be submitted but cannot be updated
+ */
+ public class ArtifactUpdateAction extends ASDCArtifactAction<ArtifactUpdateAction> {
+
+
+ protected ArtifactUpdateAction(JSONObject theInfo) {
+ super(theInfo);
+ }
+
+ protected ArtifactUpdateAction self() {
+ return this;
+ }
+
+ public ArtifactUpdateAction withContent(byte[] theContent) {
+ return with("payloadData", Base64Utils.encodeToString(theContent));
+ }
+
+ public ArtifactUpdateAction withContent(File theFile) throws IOException {
+ return withContent(FileUtils.readFileToByteArray(theFile));
+ }
+
+ public ArtifactUpdateAction withDescription(String theDescription) {
+ return with("description", theDescription);
+ }
+
+ public ArtifactUpdateAction withName(String theName) {
+ return with("artifactName", theName);
+ }
+
+ protected String[] mandatoryInfoEntries() {
+ return ASDC.this.updateMandatoryEntries;
+ }
+
+ /* The json object originates (normally) from a get so it will have entries we need to cleanup */
+ protected void cleanupInfoEntries() {
+ this.info.remove("artifactChecksum");
+ this.info.remove("artifactUUID");
+ this.info.remove("artifactVersion");
+ this.info.remove("artifactURL");
+ this.info.remove("artifactDescription");
+ }
+
+ public Future<JSONObject> execute() {
+ UUID artifactUUID = UUID.fromString(this.info.getString("artifactUUID"));
+ checkMandatory();
+ cleanupInfoEntries();
+ return ASDC.this.post(ref(artifactUUID),
+ (headers) -> prepareHeaders(headers)
+ .header("USER_ID", this.operatorId),
+ this.info);
+ }
+ }
+
+ public class ArtifactDeleteAction extends ASDCArtifactAction<ArtifactDeleteAction> {
+
+ private UUID artifactId;
+
+ protected ArtifactDeleteAction(UUID theArtifactId) {
+ super(null);
+ this.artifactId = theArtifactId;
+ }
+
+ protected ArtifactDeleteAction self() {
+ return this;
+ }
+
+ public Future<JSONObject> execute() {
+ checkMandatory();
+ return ASDC.this.delete(ref(this.artifactId),
+ (headers) -> prepareHeaders(headers)
+ .header("USER_ID", this.operatorId));
+ }
+ }
+
+
+
+
+ public VFCMTCreateAction createVFCMT() {
+ return new VFCMTCreateAction();
+ }
+
+ protected static final String[] vfcmtMandatoryEntries = new String[] { "name",
+ "vendorName",
+ "vendorRelease",
+ "contactId" };
+
+
+ public class VFCMTCreateAction extends ASDCAction<VFCMTCreateAction, JSONObject> {
+
+ protected VFCMTCreateAction() {
+
+ super(new JSONObject());
+ this
+ .with("resourceType", "VFCMT")
+ .with("category", "Template")
+ .with("subcategory", "Monitoring Template")
+ .with("icon", "defaulticon");
+ }
+
+ protected VFCMTCreateAction self() {
+ return this;
+ }
+
+ public VFCMTCreateAction withName(String theName) {
+ return with("name", theName);
+ }
+
+ public VFCMTCreateAction withDescription(String theDescription) {
+ return with("description", theDescription);
+ }
+
+ public VFCMTCreateAction withVendorName(String theVendorName) {
+ return with("vendorName", theVendorName);
+ }
+
+ public VFCMTCreateAction withVendorRelease(String theVendorRelease) {
+ return with("vendorRelease", theVendorRelease);
+ }
+
+ public VFCMTCreateAction withTags(String... theTags) {
+ for (String tag: theTags)
+ this.info.append("tags", tag);
+ return this;
+ }
+
+ public VFCMTCreateAction withIcon(String theIcon) {
+ return with("icon", theIcon);
+ }
+
+ protected String[] mandatoryInfoEntries() {
+ return ASDC.this.vfcmtMandatoryEntries;
+ }
+
+ public VFCMTCreateAction withContact(String theContact) {
+ return with("contactId", theContact);
+ }
+
+ public Future<JSONObject> execute() {
+
+ this.info.putOnce("contactId", this.operatorId);
+ this.info.append("tags", info.optString("name"));
+ checkMandatory();
+ return ASDC.this.post(refAssets(AssetType.resource),
+ (headers) -> prepareHeaders(headers)
+ .header("USER_ID", this.operatorId),
+ this.info);
+ }
+
+ }
+
+ public static JSONObject merge(JSONObject theOriginal, JSONObject thePatch) {
+ for (String key: (Set<String>)thePatch.keySet()) {
+ if (!theOriginal.has(key))
+ theOriginal.put(key, thePatch.get(key));
+ }
+ return theOriginal;
+ }
+
+ protected URI refUri(String theRef) {
+ try {
+ return new URI(this.rootUri + theRef);
+ }
+ catch(URISyntaxException urisx) {
+ throw new UncheckedIOException(new IOException(urisx));
+ }
+ }
+
+ private HttpHeaders prepareHeaders() {
+ HttpHeaders headers = new HttpHeaders();
+ headers.add(HttpHeaders.AUTHORIZATION, "Basic " + Base64Utils.encodeToString((this.user + ":" + this.passwd).getBytes()));
+ headers.add(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE);
+ headers.add(HttpHeaders.ACCEPT, MediaType.APPLICATION_OCTET_STREAM_VALUE);
+ headers.add(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_UTF8_VALUE);
+ headers.add("X-ECOMP-InstanceID", this.instanceId);
+
+ return headers;
+ }
+
+ private RequestEntity.HeadersBuilder prepareHeaders(RequestEntity.HeadersBuilder theBuilder) {
+ return theBuilder
+ .header(HttpHeaders.AUTHORIZATION, "Basic " + Base64Utils.encodeToString((this.user + ":" + this.passwd).getBytes()))
+ .header(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE)
+ .header(HttpHeaders.ACCEPT, MediaType.APPLICATION_OCTET_STREAM_VALUE)
+ .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_UTF8_VALUE)
+ .header("X-ECOMP-InstanceID", this.instanceId);
+ }
+
+ public <T> Future<T> fetch(String theRef, Class<T> theContentType) {
+ return exchange(theRef, HttpMethod.GET, new HttpEntity(prepareHeaders()), theContentType);
+ }
+
+ public Future<JSONObject> post(String theRef, JSONObject thePost) {
+ return exchange(theRef, HttpMethod.POST, new HttpEntity<JSONObject>(thePost, prepareHeaders()), JSONObject.class);
+ }
+
+ public Future<JSONObject> post(String theRef, UnaryOperator<RequestEntity.HeadersBuilder> theHeadersBuilder, JSONObject thePost) {
+ RequestEntity.BodyBuilder builder = RequestEntity.post(refUri(theRef));
+ theHeadersBuilder.apply(builder);
+
+ return exchange(theRef, HttpMethod.POST, builder.body(thePost), JSONObject.class);
+ }
+
+ public Future<JSONObject> delete(String theRef, UnaryOperator<RequestEntity.HeadersBuilder> theHeadersBuilder) {
+
+ RequestEntity.HeadersBuilder builder = RequestEntity.delete(refUri(theRef));
+ theHeadersBuilder.apply(builder);
+
+ return exchange(theRef, HttpMethod.DELETE, builder.build(), JSONObject.class);
+ }
+
+ public <T> Future<T> exchange(String theRef, HttpMethod theMethod, HttpEntity theRequest, Class<T> theResponseType) {
+
+ AsyncRestTemplate restTemplate = new AsyncRestTemplate();
+
+ List<HttpMessageConverter<?>> converters = restTemplate.getMessageConverters();
+ converters.add(0, new JSONHttpMessageConverter());
+ restTemplate.setMessageConverters(converters);
+
+ restTemplate.setInterceptors(Collections.singletonList(new ContentMD5Interceptor()));
+ ASDCFuture<T> result = new ASDCFuture<T>();
+ String uri = this.rootUri + theRef;
+ try {
+ restTemplate
+ .exchange(uri, theMethod, theRequest, theResponseType)
+ .addCallback(result.callback);
+ }
+ catch (RestClientException rcx) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to fetch {} {}", uri, rcx);
+ return Futures.failedFuture(rcx);
+ }
+ catch (Exception x) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to fetch {} {}", uri, x);
+ return Futures.failedFuture(x);
+ }
+
+ return result;
+ }
+
+
+
+ public class ASDCFuture<T>
+ extends Futures.BasicFuture<T> {
+
+ private boolean http404toEmpty = false;
+
+ ASDCFuture() {
+ }
+
+ public ASDCFuture setHttp404ToEmpty(boolean doEmpty) {
+ this.http404toEmpty = doEmpty;
+ return this;
+ }
+
+ ListenableFutureCallback<ResponseEntity<T>> callback = new ListenableFutureCallback<ResponseEntity<T>>() {
+
+ public void onSuccess(ResponseEntity<T> theResult) {
+ ASDCFuture.this.result(theResult.getBody());
+ }
+
+ public void onFailure(Throwable theError) {
+ if (theError instanceof HttpClientErrorException) {
+ // if (theError.getRawStatusCode() == 404 && this.http404toEmpty)
+ // ASDCFuture.this.result(); //th eresult is of type T ...
+ // else
+ ASDCFuture.this.cause(new ASDCException((HttpClientErrorException)theError));
+ }
+ else {
+ ASDCFuture.this.cause(theError);
+ }
+ }
+ };
+
+ }
+
+ public class ContentMD5Interceptor implements AsyncClientHttpRequestInterceptor {
+
+ @Override
+ public ListenableFuture<ClientHttpResponse> intercept(
+ HttpRequest theRequest, byte[] theBody, AsyncClientHttpRequestExecution theExecution)
+ throws IOException {
+ if (HttpMethod.POST == theRequest.getMethod()) {
+ HttpHeaders headers = theRequest.getHeaders();
+ headers.add("Content-MD5", Base64Utils.encodeToString(
+ //DigestUtils.md5Digest(theBody)));
+ DigestUtils.md5Hex(theBody).getBytes()));
+
+ }
+ return theExecution.executeAsync(theRequest, theBody);
+ }
+ }
+
+ public static void main(String[] theArgs) throws Exception {
+
+ CommandLineParser parser = new BasicParser();
+
+ String user_id = "jh0003";
+
+ Options options = new Options();
+ options.addOption(OptionBuilder
+ .withArgName("target")
+ .withLongOpt("target")
+ .withDescription("target asdc system")
+ .hasArg()
+ .isRequired()
+ .create('t') );
+
+ options.addOption(OptionBuilder
+ .withArgName("action")
+ .withLongOpt("action")
+ .withDescription("one of: list, get, getartifact, checkin, checkout")
+ .hasArg()
+ .isRequired()
+ .create('a') );
+
+ options.addOption(OptionBuilder
+ .withArgName("assetType")
+ .withLongOpt("assetType")
+ .withDescription("one of resource, service, product")
+ .hasArg()
+ .isRequired()
+ .create('k') ); //k for 'kind' ..
+
+ options.addOption(OptionBuilder
+ .withArgName("assetId")
+ .withLongOpt("assetId")
+ .withDescription("asset uuid")
+ .hasArg()
+ .create('u') ); //u for 'uuid'
+
+ options.addOption(OptionBuilder
+ .withArgName("artifactId")
+ .withLongOpt("artifactId")
+ .withDescription("artifact uuid")
+ .hasArg()
+ .create('s') ); //s for 'stuff'
+
+ options.addOption(OptionBuilder
+ .withArgName("listFilter")
+ .withLongOpt("listFilter")
+ .withDescription("filter for list operations")
+ .hasArg()
+ .create('f') ); //u for 'uuid'
+
+ CommandLine line = null;
+ try {
+ line = parser.parse(options, theArgs);
+ }
+ catch(ParseException exp) {
+ errLogger.log(LogLevel.ERROR, ASDC.class.getName(), exp.getMessage());
+ new HelpFormatter().printHelp("asdc", options);
+ return;
+ }
+
+ ASDC asdc = new ASDC();
+ asdc.setUri(new URI(line.getOptionValue("target")));
+
+ String action = line.getOptionValue("action");
+ if (action.equals("list")) {
+ JSONObject filterInfo = new JSONObject(
+ line.hasOption("listFilter") ?
+ line.getOptionValue("listFilter") : "{}");
+ JSONArray assets =
+ asdc.getAssets(ASDC.AssetType.valueOf(line.getOptionValue("assetType")), JSONArray.class,
+ filterInfo.optString("category", null), filterInfo.optString("subCategory", null))
+ .waitForResult();
+ for (int i = 0; i < assets.length(); i++) {
+ debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),"> {}", assets.getJSONObject(i).toString(2));
+ }
+ }
+ else if (action.equals("get")) {
+ debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),
+ asdc.getAsset(ASDC.AssetType.valueOf(line.getOptionValue("assetType")),
+ UUID.fromString(line.getOptionValue("assetId")),
+ JSONObject.class)
+ .waitForResult()
+ .toString(2)
+ );
+ }
+ else if (action.equals("getartifact")) {
+ debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),
+ asdc.getAssetArtifact(ASDC.AssetType.valueOf(line.getOptionValue("assetType")),
+ UUID.fromString(line.getOptionValue("assetId")),
+ UUID.fromString(line.getOptionValue("artifactId")),
+ String.class)
+ .waitForResult()
+ );
+ }
+ else if (action.equals("checkin")) {
+ debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),
+ asdc.cycleAsset(ASDC.AssetType.valueOf(line.getOptionValue("assetType")),
+ UUID.fromString(line.getOptionValue("assetId")),
+ ASDC.LifecycleState.Checkin,
+ user_id,
+ "cli op")
+ .waitForResult()
+ .toString()
+ );
+ }
+ else if (action.equals("checkout")) {
+ debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),
+ asdc.cycleAsset(ASDC.AssetType.valueOf(line.getOptionValue("assetType")),
+ UUID.fromString(line.getOptionValue("assetId")),
+ ASDC.LifecycleState.Checkout,
+ user_id,
+ "cli op")
+ .waitForResult()
+ .toString()
+ );
+ }
+ else if (action.equals("cleanup")) {
+ JSONArray resources = asdc.getResources()
+ .waitForResult();
+ debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),"Got {} resources", resources.length());
+
+ // vfcmt cleanup
+ for (int i = 0; i < resources.length(); i++) {
+
+ JSONObject resource = resources.getJSONObject(i);
+
+ if (resource.getString("resourceType").equals("VFCMT") &&
+ resource.getString("name").contains("test")) {
+
+ debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),"undocheckout for {}", resource.getString("uuid"));
+
+ try {
+ asdc.cycleAsset(AssetType.resource, UUID.fromString(resource.getString("uuid")), LifecycleState.undocheckout, user_id, null)
+ .waitForResult();
+ }
+ catch (Exception x) {
+ debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),"** {}", x);
+ }
+ }
+ }
+
+ }
+ else {
+ try {
+ debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),
+ asdc.createVFCMT()
+ .withName("Clonator")
+ .withDescription("Clone operation target 06192017")
+ .withVendorName("CloneInc")
+ .withVendorRelease("1.0")
+ .withTags("clone")
+ .withOperator(user_id)
+ .execute()
+ .waitForResult()
+ .toString()
+ );
+ }
+ catch(Exception x) {
+ debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),"Failed to create VFCMT: {}", x);
+ }
+ }
+ }
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCController.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCController.java
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCController.java
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCEngine.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCEngine.java
new file mode 100644
index 0000000..73c7601
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCEngine.java
@@ -0,0 +1,25 @@
+package org.onap.sdc.dcae.catalog.asdc;
+
+import org.onap.sdc.dcae.composition.util.SystemProperties;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.context.annotation.Bean;
+
+@SpringBootApplication
+public class ASDCEngine {
+
+ /**
+ * Creates and returns a new instance of a {@link SystemProperties} class.
+ *
+ * @return New instance of {@link SystemProperties}.
+ */
+ @Bean
+ public SystemProperties systemProperties() {
+ return new SystemProperties();
+ }
+
+ public static void main(String[] args) {
+ SpringApplication.run(ASDCEngine.class, args);
+ }
+
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCException.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCException.java
new file mode 100644
index 0000000..659653d
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCException.java
@@ -0,0 +1,18 @@
+package org.onap.sdc.dcae.catalog.asdc;
+
+import org.onap.sdc.dcae.errormng.BaseException;
+import org.onap.sdc.dcae.errormng.RequestError;
+import org.springframework.http.HttpStatus;
+import org.springframework.web.client.HttpClientErrorException;
+
+public class ASDCException extends BaseException {
+
+ ASDCException(HttpClientErrorException error) {
+ super(error);
+ }
+
+ public ASDCException(HttpStatus status, RequestError re){
+ super(status, re);
+ }
+
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtils.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtils.java
new file mode 100644
index 0000000..1d70627
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtils.java
@@ -0,0 +1,448 @@
+package org.onap.sdc.dcae.catalog.asdc;
+
+import org.apache.commons.jxpath.JXPathContext;
+import org.apache.commons.lang3.StringUtils;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.catalog.commons.Actions;
+import org.onap.sdc.dcae.catalog.commons.Future;
+import org.onap.sdc.dcae.catalog.commons.Futures;
+import org.onap.sdc.dcae.catalog.commons.Recycler;
+import org.onap.sdc.dcae.checker.*;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Scope;
+import org.springframework.stereotype.Component;
+import org.springframework.util.Base64Utils;
+
+import java.io.*;
+import java.net.URI;
+import java.util.*;
+import java.util.function.BiFunction;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+
+@Component("asdcutils")
+@Scope("singleton")
+@ConfigurationProperties(prefix="asdcutils")
+public class ASDCUtils {
+
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ @Autowired
+ private ASDC asdc;
+
+ @Autowired
+ private Blueprinter blueprint;
+
+ public ASDCUtils() {
+ // Making sonar happy
+ }
+
+ public ASDCUtils(URI theASDCURI) {
+ this(theASDCURI, null);
+ }
+
+ public ASDCUtils(URI theASDCURI, URI theBlueprinterURI) {
+ this.asdc = new ASDC();
+ this.asdc.setUri(theASDCURI);
+ if (theBlueprinterURI != null) {
+ this.blueprint = new Blueprinter();
+ this.blueprint.setUri(theBlueprinterURI);
+ }
+ }
+
+ public ASDCUtils(ASDC theASDC) {
+ this(theASDC, null);
+ }
+
+ public ASDCUtils(ASDC theASDC, Blueprinter theBlueprinter) {
+ this.asdc = theASDC;
+ this.blueprint = theBlueprinter;
+ }
+
+ public CloneAssetArtifactsAction cloneAssetArtifacts(ASDC.AssetType theAssetType, UUID theSourceId, UUID theTargetId) {
+ return new CloneAssetArtifactsAction(this.asdc, theAssetType, theSourceId, theTargetId);
+ }
+
+ public static class CloneAssetArtifactsAction extends ASDC.ASDCAction<CloneAssetArtifactsAction, List<JSONObject>> {
+
+ private ASDC.AssetType assetType;
+ private UUID sourceId, targetId;
+
+ protected CloneAssetArtifactsAction(ASDC theASDC, ASDC.AssetType theAssetType, UUID theSourceId, UUID theTargetId) {
+ theASDC.super(new JSONObject());
+ this.assetType = theAssetType;
+ this.sourceId = theSourceId;
+ this.targetId = theTargetId;
+ }
+
+ protected CloneAssetArtifactsAction self() {
+ return this;
+ }
+
+ public CloneAssetArtifactsAction withLabel(String theLabel) {
+ return with("artifactLabel", theLabel);
+ }
+
+ protected String[] mandatoryInfoEntries() {
+ return new String[] {};
+ }
+
+ public Future<List<JSONObject>> execute() {
+ checkMandatory();
+
+ final Actions.Sequence<JSONObject> sequencer = new Actions.Sequence<JSONObject>();
+
+ new Actions.Sequence().add(super.asdc().getAssetArchiveAction(this.assetType, this.sourceId)).add(super.asdc().getAssetAction(this.assetType, this.sourceId, JSONObject.class)).execute().setHandler(assetFuture -> {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "*** {}", assetFuture.result());
+ processArtifacts((List) assetFuture.result(), (JSONObject theInfo, byte[] theData) -> {
+ theInfo.remove("artifactChecksum");
+ theInfo.remove("artifactUUID");
+ theInfo.remove("artifactVersion");
+ theInfo.remove("artifactURL");
+ theInfo.put("description", theInfo.remove("artifactDescription"));
+ theInfo.put("payloadData", Base64Utils.encodeToString(theData));
+ return theInfo;
+ }, null).forEach(artifactInfo -> sequencer.add(super.asdc().createAssetArtifact(this.assetType, this.targetId).withInfo(ASDC.merge(artifactInfo, this.info)).withOperator(this.operatorId)));
+ sequencer.execute();
+ });
+
+ return sequencer.future();
+ }
+ } //the Action class
+
+ /* */
+ private static JSONObject lookupArtifactInfo(JSONArray theArtifacts, String theName) {
+
+ for (int i = 0; theArtifacts != null && i < theArtifacts.length(); i++) {
+ JSONObject artifactInfo = theArtifacts.getJSONObject(i);
+ if (theName.equals(artifactInfo.getString("artifactName"))) {
+ debugLogger.log(LogLevel.DEBUG, ASDCUtils.class.getName(), "Found artifact info {}", artifactInfo);
+ return artifactInfo;
+ }
+ }
+
+ return null;
+ }
+
+ private static byte[] extractArtifactData(InputStream theEntryStream) throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ try {
+ byte[] buff = new byte[4096];
+ int cnt = 0;
+ while ((cnt = theEntryStream.read(buff)) != -1) {
+ baos.write(buff, 0, cnt);
+ }
+ } finally {
+ baos.close();
+ }
+ return baos.toByteArray();
+ }
+
+ /**
+ * Recycle a cdump, fetch all relevant ASDC artifacts, interact with Shu's toscalib service in order to generate
+ * a blueprint. No 'Action' object here as there is nothig to set up.
+ */
+ public Future<Future<String>> buildBlueprint(Reader theCdump) {
+
+ final Recycler recycler = new Recycler();
+ Object template = null;
+
+ try {
+ template = recycler.recycle(theCdump);
+
+ } catch (Exception x) {
+ return Futures.failedFuture(x);
+ }
+
+ JXPathContext jxroot = JXPathContext.newContext(template);
+ jxroot.setLenient(true);
+
+ //based on the output of ASDCCatalog the node description will contain the UUID of the resource declaring it
+ List uuids = (List) StreamSupport.stream(Spliterators.spliteratorUnknownSize(jxroot.iterate("topology_template/node_templates/*/description"), 16), false).distinct().filter(desc -> desc != null)
+ //the desc contains the full URI and the resource uuid is the 5th path element
+ .map(desc -> desc.toString().split("/")[5]).collect(Collectors.toList());
+
+ //prepare fetching all archives/resource details
+ final Futures.Accumulator accumulator = new Futures.Accumulator();
+ uuids.stream().forEach(uuid -> {
+ UUID rid = UUID.fromString((String) uuid);
+ accumulator.add(this.asdc.getAssetArchive(ASDC.AssetType.resource, rid));
+ accumulator.add(this.asdc.getAsset(ASDC.AssetType.resource, rid, JSONObject.class));
+ });
+
+ final byte[] templateData = recycler.toString(template).getBytes(/*"UTF-8"*/);
+ //retrieve all resource archive+details, prepare blueprint service request and send its request
+ return Futures.advance(accumulator.accumulate(), (List theArchives) -> {
+ Blueprinter.BlueprintAction action = blueprint.generateBlueprint();
+ processArtifacts(theArchives, (JSONObject theInfo, byte[] theData) -> new JSONObject().put(theInfo.getString("artifactName").split("\\.")[0], Base64Utils.encodeToString(theData)),
+ (Stream<JSONObject> theAssetArtifacts) -> theAssetArtifacts.reduce(new JSONObject(), ASDC::merge)).forEach(artifactInfo -> action.withModelInfo(artifactInfo));
+
+ return action.withTemplateData(templateData).execute();
+ });
+ }
+
+ public Future<Future<String>> buildBlueprintViaToscaLab(Reader theCdump) {
+ return processCdump(theCdump, (theTemplate, theArchives) -> {
+ Blueprinter.BlueprintAction action = blueprint.generateBlueprint();
+ processArtifacts(theArchives, (JSONObject theInfo, byte[] theData) -> new JSONObject().put(theInfo.getString("artifactName").split("\\.")[0], Base64Utils.encodeToString(theData)),
+ (Stream<JSONObject> theAssetArtifacts) -> theAssetArtifacts.reduce(new JSONObject(), ASDC::merge)).forEach(artifactInfo -> action.withModelInfo(artifactInfo));
+
+ return action.withTemplateData(Recycler.toString(theTemplate).getBytes()).execute();
+
+ });
+ }
+
+ private static class Tracker implements TargetLocator {
+
+ private static enum Position {
+ SCHEMA, TEMPLATE, TRANSLATE;
+ }
+
+ private static final int Positions = Position.values().length;
+
+ private List<Target> tgts = new ArrayList<Target>(3);
+
+ public Tracker() {
+ clear();
+ }
+
+ public boolean addSearchPath(URI theURI) {
+ return false;
+ }
+
+ public boolean addSearchPath(String thePath) {
+ return false;
+ }
+
+ public Iterable<URI> searchPaths() {
+ return Collections.emptyList();
+ }
+
+ protected int position(String... theKeys) {
+ for (String key : theKeys) {
+ if ("schema".equals(key)) {
+ return Position.SCHEMA.ordinal();
+ }
+ if ("template".equals(key)) {
+ return Position.TEMPLATE.ordinal();
+ }
+ if ("translate".equals(key)) {
+ return Position.TRANSLATE.ordinal();
+ }
+ }
+ return -1;
+ }
+
+ public Target resolve(String theName) {
+ for (Target tgt : tgts) {
+ if (tgt != null && tgt.getName().equals(theName)) {
+ return tgt;
+ }
+ }
+ return null;
+ }
+
+ public void track(JSONObject theInfo, final byte[] theData) {
+ String uri = theInfo.getString("artifactURL").split("/")[5];
+ String name = theInfo.getString("artifactName"), desc = theInfo.getString("artifactDescription"), label = theInfo.getString("artifactLabel");
+ int pos = position(desc, label);
+
+ debugLogger.log(LogLevel.DEBUG, ASDCUtils.class.getName(), "Tracking {} at {}, {}", name, pos, theInfo.optString("artifactURL"));
+
+ if (pos > -1) {
+ tgts.set(pos, new Target(name, URI.create("asdc:" + uri + "/" + name)) {
+ @Override
+ public Reader open(){
+ return new BufferedReader(new InputStreamReader(new ByteArrayInputStream(theData)));
+ }
+ });
+ }
+ }
+
+ public boolean hasSchema() {
+ return tgts.get(Position.SCHEMA.ordinal()) != null;
+ }
+
+ public Target schema() {
+ return tgts.get(Position.SCHEMA.ordinal());
+ }
+
+ public boolean hasTemplate() {
+ return tgts.get(Position.TEMPLATE.ordinal()) != null;
+ }
+
+ public Target template() {
+ return tgts.get(Position.TEMPLATE.ordinal());
+ }
+
+ public boolean hasTranslation() {
+ return tgts.get(Position.TRANSLATE.ordinal()) != null;
+ }
+
+ public Target translation() {
+ return tgts.get(Position.TRANSLATE.ordinal());
+ }
+
+ public void clear() {
+ if (tgts.isEmpty()) {
+ for (int i = 0; i < Positions; i++) {
+ tgts.add(null);
+ }
+ } else {
+ Collections.fill(tgts, null);
+ }
+ }
+ }
+
+ private Checker buildChecker() {
+ try {
+ return new Checker();
+ } catch (CheckerException cx) {
+ errLogger.log(LogLevel.ERROR, this.getClass().getName(), "CheckerException while creating Checker {}", cx);
+ return null;
+ }
+ }
+
+ public Future<Catalog> buildCatalog(Reader theCdump) {
+
+ //
+ //the purpose of the tracking is to be able to resolve import references within the 'space' of an
+ //asset's artifacts
+ //processing order is important too so we 'order the targets: schema, template, translation
+ //
+ final Tracker tracker = new Tracker();
+ final Catalog catalog = Checker.buildCatalog();
+
+ return processCdump(theCdump, (theTemplate, theArchives) -> {
+
+ final Checker checker = buildChecker();
+ if (checker == null) {
+ return null;
+ }
+ checker.setTargetLocator(tracker);
+
+ processArtifacts(theArchives, (JSONObject theInfo, byte[] theData) -> {
+ tracker.track(theInfo, theData);
+ return (Catalog) null;
+ },
+ // aggregation: this is where the actual processing takes place now that
+ // we have all the targets
+ (Stream<Catalog> theAssetArtifacts) -> {
+ //the stream is full of nulls, ignore it, work with the tracker
+
+ try {
+ if (tracker.hasSchema()) {
+ checker.check(tracker.schema(), catalog);
+ }
+ if (tracker.hasTemplate()) {
+ checker.check(tracker.template(), catalog);
+ }
+ if (tracker.hasTranslation()) {
+ checker.check(tracker.translation(), catalog);
+ }
+ } catch (CheckerException cx) {
+ //got to do better than this
+ errLogger.log(LogLevel.ERROR, ASDC.class.getName(),"CheckerException while checking catalog:{}", cx);
+ } finally {
+ tracker.clear();
+ }
+ return checker.catalog();
+ });
+
+ Target cdump = new Target("cdump", URI.create("asdc:cdump"));
+ cdump.setTarget(theTemplate);
+
+ validateCatalog(catalog, checker, cdump);
+
+ return catalog;
+ });
+ }
+
+ private void validateCatalog(Catalog catalog, Checker checker, Target cdump) {
+ try {
+ checker.validate(cdump, catalog);
+ } catch (CheckerException cx) {
+ errLogger.log(LogLevel.ERROR, ASDC.class.getName(),"CheckerException while building catalog:{}", cx);
+ }
+ }
+
+ /* The common process of recycling, retrieving all related artifacts and then doing 'something' */
+ private <T> Future<T> processCdump(Reader theCdump, BiFunction<Object, List, T> theProcessor) {
+
+ final Recycler recycler = new Recycler();
+ Object template = null;
+ try {
+ template = recycler.recycle(theCdump);
+
+ } catch (Exception x) {
+ return Futures.failedFuture(x);
+ }
+
+ JXPathContext jxroot = JXPathContext.newContext(template);
+ jxroot.setLenient(true);
+
+ //based on the output of ASDCCatalog the node description will contain the UUID of the resource declaring it
+ //the desc contains the full URI and the resource uuid is the 5th path element
+ List uuids = (List) StreamSupport.stream(Spliterators.spliteratorUnknownSize(jxroot.iterate("topology_template/node_templates/*/description"), 16), false).distinct().filter(desc -> desc != null)
+ .map(desc -> desc.toString().split("/")[5]).collect(Collectors.toList());
+
+ //serialized fetch version
+ final Actions.Sequence sequencer = new Actions.Sequence();
+ uuids.stream().forEach(uuid -> {
+ UUID rid = UUID.fromString((String) uuid);
+ sequencer.add(this.asdc.getAssetArchiveAction(ASDC.AssetType.resource, rid));
+ sequencer.add(this.asdc.getAssetAction(ASDC.AssetType.resource, rid, JSONObject.class));
+ });
+
+ final Object tmpl = template;
+ return Futures.advance(sequencer.execute(), (List theArchives) -> theProcessor.apply(tmpl, theArchives));
+ }
+
+ private static <T> Stream<T> processArtifacts(List theArtifactData, BiFunction<JSONObject, byte[], T> theProcessor, Function<Stream<T>, T> theAggregator) {
+
+ Stream.Builder<T> assetBuilder = Stream.builder();
+
+ for (int i = 0; i < theArtifactData.size(); i = i + 2) { //cute old style loop
+
+ JSONObject assetInfo = (JSONObject) theArtifactData.get(i + 1);
+ byte[] assetData = (byte[]) theArtifactData.get(i + 0);
+
+ JSONArray artifacts = assetInfo.optJSONArray("artifacts");
+
+ Stream.Builder<T> artifactBuilder = Stream.builder();
+
+ try (ZipInputStream zipper = new ZipInputStream(new ByteArrayInputStream(assetData))){
+ //we process the artifacts in the order they are stored in the archive .. fugly
+ for (ZipEntry zipped = zipper.getNextEntry(); zipped != null; zipped = zipper.getNextEntry()) {
+ JSONObject artifactInfo = lookupArtifactInfo(artifacts, StringUtils.substringAfterLast(zipped.getName(), "/"));
+ if (artifactInfo != null) {
+ artifactBuilder.add(theProcessor.apply(artifactInfo, extractArtifactData(zipper)));
+ }
+ zipper.closeEntry();
+ }
+ } catch (IOException iox) {
+ errLogger.log(LogLevel.ERROR, ASDC.class.getName(), "IOException: {}", iox);
+ return null;
+ }
+
+ if (theAggregator != null) {
+ assetBuilder.add(theAggregator.apply(artifactBuilder.build()));
+ } else {
+ artifactBuilder.build().forEach(entry -> assetBuilder.add(entry));
+ }
+ }
+
+ return assetBuilder.build();
+ }
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtilsController.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtilsController.java
new file mode 100644
index 0000000..4432712
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtilsController.java
@@ -0,0 +1,76 @@
+package org.onap.sdc.dcae.catalog.asdc;
+
+import java.io.StringReader;
+
+import java.util.UUID;
+import java.util.Map;
+import java.util.List;
+import java.util.concurrent.Callable;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.springframework.beans.BeansException;
+
+import org.springframework.web.bind.annotation.RestController;
+
+import org.onap.sdc.dcae.catalog.asdc.ASDC;
+import org.onap.sdc.dcae.catalog.asdc.ASDCUtils;
+import org.onap.sdc.dcae.catalog.asdc.ASDCUtilsController;
+
+import org.springframework.web.bind.annotation.RequestMethod;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestHeader;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
+
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+import org.json.JSONObject;
+
+
+@RestController
+@ConfigurationProperties(prefix="asdcUtilsController")
+public class ASDCUtilsController implements ApplicationContextAware {
+
+ private ApplicationContext appCtx;
+ private OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ //Constants//
+ private static String NOT_CERTIFIED_CHECKOUT = "NOT_CERTIFIED_CHECKOUT";
+ private static String NOT_CERTIFIED_CHECKIN = "NOT_CERTIFIED_CHECKIN";
+ private static String CERTIFICATION_IN_PROGRESS = "CERTIFICATION_IN_PROGRESS";
+ private static String CERTIFIED = "CERTIFIED";
+
+
+ public void setApplicationContext(ApplicationContext theCtx) throws BeansException {
+ this.appCtx = theCtx;
+ }
+
+ @PostConstruct
+ public void initController() {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"initASDCUtilsController");
+
+ //Done
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"ASDCUtilsController started");
+ }
+
+ @PreDestroy
+ public void cleanupController() {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"cleanupASDCUtilsController");
+ }
+
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Blueprinter.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Blueprinter.java
new file mode 100644
index 0000000..3e78d38
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Blueprinter.java
@@ -0,0 +1,76 @@
+package org.onap.sdc.dcae.catalog.asdc;
+
+import java.net.URI;
+
+import java.util.Collections;
+
+import org.json.JSONObject;
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.catalog.commons.Action;
+import org.onap.sdc.dcae.catalog.commons.Future;
+import org.onap.sdc.dcae.catalog.commons.Http;
+import org.json.JSONArray;
+
+import org.springframework.util.Base64Utils;
+
+import org.springframework.http.MediaType;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpEntity;
+import org.springframework.stereotype.Component;
+import org.springframework.context.annotation.Scope;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+@Component("blueprinter")
+@Scope("singleton")
+@ConfigurationProperties(prefix="blueprinter")
+public class Blueprinter {
+
+
+ private URI serviceUri;
+ private OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+
+ public Blueprinter() {
+ }
+
+ public void setUri(URI theUri) {
+ this.serviceUri = theUri;
+ }
+
+ public BlueprintAction generateBlueprint() {
+ return new BlueprintAction();
+ }
+
+ public class BlueprintAction implements Action<String> {
+
+ private JSONObject body = new JSONObject();
+
+
+ protected BlueprintAction() {
+ }
+
+ public BlueprintAction withModelData(byte[] theSchema, byte[] theTemplate, byte[] theTranslation) {
+ return this;
+ }
+
+ public BlueprintAction withModelInfo(JSONObject theModelInfo) {
+ body.append("models", theModelInfo);
+ return this;
+ }
+
+ public BlueprintAction withTemplateData(byte[] theData) {
+ body.put("template", Base64Utils.encodeToString(theData));
+ return this;
+ }
+
+ public Future<String> execute() {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Blueprinter::execute() | PAYLOAD to TOSCA_LAB={}", body.toString());
+ HttpHeaders headers = new HttpHeaders();
+ headers.setContentType(MediaType.APPLICATION_JSON);
+ headers.setAccept(Collections.singletonList(MediaType.APPLICATION_JSON));
+ return Http.exchange(serviceUri.toString(), HttpMethod.POST, new HttpEntity<String>(body.toString(), headers), String.class);
+ }
+ }
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Cloudify.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Cloudify.java
new file mode 100644
index 0000000..3208bd2
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Cloudify.java
@@ -0,0 +1,249 @@
+package org.onap.sdc.dcae.catalog.asdc;
+
+import java.util.AbstractMap;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.stream.Stream;
+
+import org.apache.commons.jxpath.JXPathContext;
+import org.apache.commons.jxpath.Pointer;
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.catalog.commons.ListBuilder;
+import org.onap.sdc.dcae.catalog.commons.MapBuilder;
+import org.onap.sdc.dcae.checker.Catalog;
+import org.onap.sdc.dcae.checker.Construct;
+import org.onap.sdc.dcae.checker.Target;
+
+import com.google.common.collect.Lists;
+import org.yaml.snakeyaml.DumperOptions;
+import org.yaml.snakeyaml.Yaml;
+
+
+public class Cloudify {
+
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ Catalog catalog;
+
+ public Cloudify(Catalog c)
+ {
+ catalog = c;
+ }
+ public class ModelTemplate {
+ public Map<String, Map> template;
+ public JXPathContext jx;
+ public String node;
+ public ModelTemplate(Map<String, Map> t, JXPathContext j, String node_name)
+ {
+ template = t;
+ jx = j;
+ node = node_name;
+ }
+
+ public Object getPropValue(JXPathContext jx_src, String name)
+ {
+ try{
+ Object ret = jx_src.getValue("properties/"+name+"/get_input");
+ if (ret==null)
+ return jx_src.getValue("properties/"+name);
+ return getDefaultPropValue((String)ret);
+ }
+ catch (RuntimeException e) {
+
+ }
+ try{
+ return jx_src.getValue("properties/"+name+"");
+ }
+ catch (RuntimeException e) {
+ return null;
+ }
+ }
+
+ public Object getDefaultPropValue(String name) {
+ try {
+ return jx.getValue("//"+name+"/default");
+ }
+ catch (RuntimeException e) {
+ return null;
+ }
+
+ }
+ }
+
+ public class ModelTranslate {
+ public Map<String, Map> template;
+ public JXPathContext jx;
+ public String node;
+
+ public ModelTranslate(Map<String, Map> t, JXPathContext j, String node_name)
+ {
+ template = t;
+ jx = j;
+ node = node_name;
+ }
+
+ public String getTranslateName()
+ {
+ Map<String, Object> node_temp = (Map<String, Object>)jx.getValue("//node_templates");
+ Iterator it = node_temp.keySet().iterator();
+ if (it.hasNext())
+ return node + "_"+ it.next();
+ else
+ return null;
+ }
+
+ public Map<String, Object> translate(JXPathContext jx_src, Map<String, Map> model_lib, String node_name)
+ {
+ for (Iterator prop_iter = jx.iteratePointers("//*[@get_input]"); prop_iter.hasNext();) {
+
+ Pointer p = (Pointer)prop_iter.next();
+ JXPathContext prop_path = jx.getRelativeContext(p);
+
+ ModelTemplate src_model =(ModelTemplate) model_lib.get(node_name).get("model");
+
+ Object temp_o = src_model.getPropValue(jx_src, (String) prop_path.getValue("get_input"));
+ //prop_path.setValue(".", temp_o);
+ jx.setValue(p.asPath(), temp_o);
+ }
+
+// JXPathContext jx_src = JXPathContext.newContext(src);
+ for (Iterator req_iter = jx_src.iteratePointers("//*/node"); req_iter.hasNext();) {
+ Pointer p = (Pointer)req_iter.next();
+ String req_node_name = (String)jx_src.getValue(p.asPath());
+
+ for (Iterator it = model_lib.keySet().iterator(); it.hasNext();) {
+ String key = (String) it.next();
+ if (key.indexOf(req_node_name) <0 )
+ continue;
+ ModelTranslate tt = (ModelTranslate) model_lib.get(key).get("translate");
+ if (tt == null)
+ req_node_name = null;
+ else
+ {
+ req_node_name = tt.getTranslateName();
+ }
+ break;
+ }
+
+ }
+
+ String tn_name = getTranslateName();
+
+ if (tn_name == null)
+ return (Map<String, Object>)jx.getValue("//node_templates");
+ else
+ return (new MapBuilder<String, Object>().put(tn_name, jx.getValue("//node_templates/*")).build());
+ }
+
+ }
+
+ public ModelTranslate findTranslateTemplate(String ty, String node) {
+ for (Target t: catalog.targets()) {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTranslateTemplate: target {}", t.getName());
+ if (t.getName().startsWith("translat") == false) {
+ continue;
+ }
+
+ Map<String, Map>temp = (Map<String, Map>)t.getTarget();
+
+ JXPathContext jxroot = JXPathContext.newContext(temp);
+ try{
+ String sub_type = (String)jxroot.getValue("topology_template/substitution_mappings/node_type");
+ if (sub_type != null && sub_type.equals(ty)) {
+ return new ModelTranslate(temp, jxroot, node);
+ }
+ }
+ catch (RuntimeException e) {
+ errLogger.log(LogLevel.ERROR, this.getClass().getName(), "translate template {} does not have substitution mapping section", t.getName());
+ }
+ }
+ return null;
+ }
+
+ public ModelTemplate findModelTemplate(String ty, String node) {
+ for (Target t: catalog.targets()) {
+
+ if (t.getName().startsWith("templat") == false)
+ continue;
+ Map<String, Map>temp = (Map<String, Map>)t.getTarget();
+
+ JXPathContext jxroot = JXPathContext.newContext(temp);
+ for (Iterator it = jxroot.iterate("topology_template/node_templates/*/type"); it.hasNext();) {
+ String node_type = (String)it.next();
+ if (node_type != null && node_type.equals(ty)) {
+ return new ModelTemplate(temp, jxroot, node);
+ }
+ }
+ }
+ return null;
+ }
+
+ public Map<String, Object> createBlueprint() {
+
+ Map<String, Map> target_temp = null;
+ for (Target t: catalog.targets()) {
+
+ if (t.getName().equals("cdump")) {
+ target_temp = catalog.getTargetTemplates(t, Construct.Node);
+ }
+ }
+
+ JXPathContext jxroot = JXPathContext.newContext(target_temp);
+
+ Map<String, Object> output_temp = new HashMap<String, Object>();
+ Map<String, Map> model_lib = new HashMap<String, Map>();
+
+ for (Iterator iter = target_temp.keySet().iterator(); iter.hasNext();)
+ {
+ String node_key = (String)iter.next();
+ //jxroot.getVariables().declareVariable("name", target_temp.get(node_key));
+ //String node_type = (String)jxroot.getValue("$name/type");
+ String node_type = (String)jxroot.getValue(node_key+"/type");
+
+ ModelTranslate t_temp = findTranslateTemplate(node_type, node_key);
+ ModelTemplate t_model = findModelTemplate(node_type, node_key);
+
+ model_lib.put(node_key, new MapBuilder()
+ .put("model", t_model)
+ .put("translate", t_temp)
+ .build());
+ }
+
+ for (Iterator iter = model_lib.keySet().iterator(); iter.hasNext();) {
+ String node_key = (String) iter.next();
+ ModelTranslate t = (ModelTranslate) model_lib.get(node_key).get("translate");
+ JXPathContext jxnode = jxroot.getRelativeContext(jxroot.getPointer(node_key));
+ if (t != null) {
+ Map<String, Object> t_output =t.translate(jxnode, model_lib, node_key);
+ if (t_output != null)
+ output_temp.putAll(t_output);
+ }
+
+ }
+
+ return new MapBuilder<String, Object>()
+ .put("tosca_definitions_version", new String("cloudify_dsl_1_3"))
+ .put("imports", new ListBuilder()
+ .add(new MapBuilder()
+ .put("cloudify",
+ "http://www.getcloudify.org/spec/cloudify/3.4/types.yaml")
+ .build())
+ .build())
+ .put("node_templates", output_temp)
+ .build();
+
+ }
+
+ public String createBlueprintDocument() {
+ DumperOptions options = new DumperOptions();
+ options.setWidth(1000000);
+ Yaml yaml = new Yaml(options);
+ return yaml.dump(createBlueprint());
+ }
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/ISdcClient.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/ISdcClient.java
new file mode 100644
index 0000000..554991a
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/ISdcClient.java
@@ -0,0 +1,47 @@
+package org.onap.sdc.dcae.client;
+
+import org.onap.sdc.dcae.composition.restmodels.CreateVFCMTRequest;
+import org.onap.sdc.dcae.composition.restmodels.sdc.*;
+import org.onap.sdc.dcae.composition.restmodels.ReferenceUUID;
+import org.onap.sdc.dcae.enums.AssetType;
+
+import java.util.List;
+
+public interface ISdcClient {
+
+ ResourceDetailed getResource(String uuid, String requestId) throws Exception;
+
+ ServiceDetailed getService(String uuid, String requestId) throws Exception;
+
+ List<Resource> getResources(String resourceType, String category, String subcategory, String requestId) throws Exception;
+
+ List<Service> getServices(String requestId) throws Exception;
+
+ String addExternalMonitoringReference(String userId, CreateVFCMTRequest resource, ReferenceUUID vfiUuid, String requestId);
+
+ void deleteExternalMonitoringReference(String userId, String context, String uuid, String vfiName, String vfcmtUuid, String requestId);
+
+ ResourceDetailed createResource(String userId, CreateVFCMTRequest resource, String requestId) throws Exception;
+
+ ResourceDetailed changeResourceLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, String requestId) throws Exception;
+
+ ServiceDetailed changeServiceLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, String requestId) throws Exception;
+
+ Asset changeAssetLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, AssetType assetType, String requestId) throws Exception;
+
+ String getResourceArtifact(String resourceUuid, String artifactUuid, String requestId) throws Exception;
+
+ Artifact createResourceArtifact(String userId, String resourceUuid, Artifact artifact, String requestId) throws Exception;
+
+ Artifact updateResourceArtifact(String userId, String resourceUuid, Artifact artifact, String requestId) throws Exception;
+
+ void deleteResourceArtifact(String userId, String resourceUuid, String artifactId, String requestId) throws Exception;
+
+ Artifact createVfInstanceArtifact(String userId, String serviceUuid, String normalizedInstanceName, Artifact artifact, String requestId) throws Exception;
+
+ Artifact updateVfInstanceArtifact(String userId, String serviceUuid, String normalizedInstanceName, Artifact artifact, String requestId) throws Exception;
+
+ ExternalReferencesMap getMonitoringReferences(String context, String uuid, String version, String requestId);
+
+ void deleteInstanceResourceArtifact(String userId, String context, String serviceUuid, String normalizedVfiName, String artifactUuid, String requestId);
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/SdcRestClient.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/SdcRestClient.java
new file mode 100644
index 0000000..058d9c7
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/SdcRestClient.java
@@ -0,0 +1,221 @@
+package org.onap.sdc.dcae.client;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.message.BasicHeader;
+import org.onap.sdc.dcae.composition.restmodels.CreateVFCMTRequest;
+import org.onap.sdc.dcae.composition.restmodels.ReferenceUUID;
+import org.onap.sdc.dcae.composition.restmodels.sdc.*;
+import org.onap.sdc.dcae.composition.util.DcaeBeConstants;
+import org.onap.sdc.dcae.composition.util.SystemProperties;
+import org.onap.sdc.dcae.enums.AssetType;
+import org.onap.sdc.dcae.enums.SdcConsumerInfo;
+import org.onap.sdc.dcae.utils.Normalizers;
+import org.onap.sdc.dcae.utils.SDCResponseErrorHandler;
+import org.onap.sdc.dcae.utils.SdcRestClientUtils;
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.*;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.stereotype.Component;
+import org.springframework.util.Base64Utils;
+import org.springframework.web.client.*;
+
+import javax.annotation.PostConstruct;
+import java.net.URI;
+import java.util.*;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+@Component("sdcrestclient")
+public class SdcRestClient implements ISdcClient {
+
+ @Autowired
+ private SystemProperties systemProperties;
+
+ private static final String SLASH = "/";
+ private static final String ECOMP_INSTANCE_ID_HEADER = "X-ECOMP-InstanceID";
+ private static final String ECOMP_REQUEST_ID_HEADER = "X-ECOMP-RequestID";
+ private static final String USER_ID_HEADER = "USER_ID";
+ private static final String RESOURCES_PATH = "resources";
+ private static final String SERVICES_PATH = "services";
+ private static final String ARTIFACTS_PATH = "artifacts";
+ private static final String CONTENT_MD5_HEADER = "Content-MD5";
+ private static final String RESOURCE_INSTANCES_PATH = "resourceInstances";
+ private static final String LIFECYCLE_STATE_PATH = "lifecycleState/{lifecycleOperation}";
+ private static final String METADATA_PATH = "metadata";
+ private static final String VERSION_PATH = "version";
+ private static final String MONITORING_REFERENCES_PATH = "externalReferences/monitoring";
+
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ private String uri;
+
+ private RestTemplate client;
+
+ @PostConstruct
+ private void init() {
+ URI configUri = URI.create(systemProperties.getProperties().getProperty(DcaeBeConstants.Config.URI));
+ EnumMap<SdcConsumerInfo, String> userInfo = SdcRestClientUtils.extractConsumerInfoFromUri(configUri);
+ CloseableHttpClient httpClient = HttpClientBuilder.create().setDefaultHeaders(defaultHeaders(userInfo)).build();
+ HttpComponentsClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory();
+ requestFactory.setHttpClient(httpClient);
+ client = new RestTemplate(requestFactory);
+ client.setErrorHandler(new SDCResponseErrorHandler());
+ uri = userInfo.get(SdcConsumerInfo.CATALOG_URL);
+ }
+
+ private List<BasicHeader> defaultHeaders(EnumMap<SdcConsumerInfo, String> userInfo) {
+ List<BasicHeader> headers = new ArrayList<>();
+ headers.add(new BasicHeader(HttpHeaders.AUTHORIZATION, userInfo.get(SdcConsumerInfo.AUTH)));
+ headers.add(new BasicHeader(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE));
+ headers.add(new BasicHeader(HttpHeaders.ACCEPT, MediaType.APPLICATION_OCTET_STREAM_VALUE));
+ headers.add(new BasicHeader(ECOMP_INSTANCE_ID_HEADER, userInfo.get(SdcConsumerInfo.INSTANCE_ID)));
+ return headers;
+ }
+
+ public ResourceDetailed getResource(String uuid, String requestId) {
+ String url = buildRequestPath(RESOURCES_PATH, uuid, METADATA_PATH);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get resource from SDC. URL={}", url);
+ return getObject(url, requestId, ResourceDetailed.class);
+ }
+
+ public ServiceDetailed getService(String uuid, String requestId) {
+ String url = buildRequestPath(SERVICES_PATH, uuid, METADATA_PATH);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get service from SDC. URL={}", url);
+ return getObject(url, requestId, ServiceDetailed.class);
+ }
+
+ public List<Resource> getResources(String resourceType, String category, String subcategory, String requestId) {
+ String url = buildRequestPath(RESOURCES_PATH, SdcRestClientUtils.buildResourceFilterQuery(resourceType, category, subcategory));
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get resources from SDC. URL={}", url);
+ return Arrays.asList(getObject(url, requestId, Resource[].class));
+ }
+
+ public List<Service> getServices(String requestId) {
+ String url = buildRequestPath(SERVICES_PATH);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get services from SDC. URL={}", url);
+ return Arrays.asList(getObject(url, requestId, Service[].class));
+ }
+
+ public String addExternalMonitoringReference(String userId, CreateVFCMTRequest resource, ReferenceUUID vfcmtUuid, String requestId) {
+ String url = buildRequestPath(resource.getContextType(), resource.getServiceUuid(), RESOURCE_INSTANCES_PATH,
+ Normalizers.normalizeComponentInstanceName(resource.getVfiName()), MONITORING_REFERENCES_PATH);
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Connecting service id {} name {} to vfcmt {} URL={}",
+ resource.getServiceUuid(), resource.getVfiName(), vfcmtUuid.getReferenceUUID(), url);
+
+ return client.postForObject(url, new HttpEntity<>(vfcmtUuid, postResourceHeaders(userId, requestId)),
+ String.class);
+ }
+
+ public void deleteExternalMonitoringReference(String userId, String context, String uuid, String normalizeVfiName, String vfcmtUuid, String requestId) {
+ String url = buildRequestPath(context, uuid, RESOURCE_INSTANCES_PATH,
+ normalizeVfiName, MONITORING_REFERENCES_PATH, vfcmtUuid);
+ client.exchange(url, HttpMethod.DELETE, new HttpEntity(postResourceHeaders(userId, requestId)), String.class);
+ }
+
+ public ResourceDetailed createResource(String userId, CreateVFCMTRequest resource, String requestId) {
+ String url = buildRequestPath(RESOURCES_PATH);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Create SDC resource with name {} URL={}", resource.getName(), url);
+ return client.postForObject(url, new HttpEntity<>(resource, postResourceHeaders(userId, requestId)), ResourceDetailed.class);
+ }
+
+ public ResourceDetailed changeResourceLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, String requestId) {
+ String url = buildRequestPath(RESOURCES_PATH, uuid, LIFECYCLE_STATE_PATH);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Change SDC resource lifecycle state ({}). URL={}", lifecycleOperation, url);
+ return client.postForObject(url, new HttpEntity<>(SdcRestClientUtils.buildUserRemarksObject(userRemarks), postResourceHeaders(userId, requestId)), ResourceDetailed.class, lifecycleOperation);
+ }
+
+ public ServiceDetailed changeServiceLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, String requestId) {
+ String url = buildRequestPath(SERVICES_PATH, uuid, LIFECYCLE_STATE_PATH);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Change SDC service lifecycle state ({}). URL={}", lifecycleOperation, url);
+ return client.postForObject(url, new HttpEntity<>(SdcRestClientUtils.buildUserRemarksObject(userRemarks), postResourceHeaders(userId, requestId)), ServiceDetailed.class, lifecycleOperation);
+ }
+
+ public Asset changeAssetLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, AssetType assetType, String requestId) {
+ return AssetType.RESOURCE == assetType ? changeResourceLifecycleState(userId, uuid, lifecycleOperation, userRemarks, requestId) : changeServiceLifecycleState(userId, uuid, lifecycleOperation, userRemarks, requestId);
+ }
+
+ public String getResourceArtifact(String resourceUuid, String artifactUuid, String requestId) {
+ String url = buildRequestPath(RESOURCES_PATH, resourceUuid, ARTIFACTS_PATH, artifactUuid);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get resource artifact from SDC. URL={}", url);
+ return getObject(url, requestId, String.class);
+ }
+
+ public Artifact createResourceArtifact(String userId, String resourceUuid, Artifact artifact, String requestId) throws Exception {
+ String url = buildRequestPath(RESOURCES_PATH, resourceUuid, ARTIFACTS_PATH);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Create SDC resource artifact. URL={}", url);
+ String artifactData = SdcRestClientUtils.artifactToString(artifact);
+ return client.postForObject(url, new HttpEntity<>(artifactData, postArtifactHeaders(userId, artifactData, requestId)), Artifact.class);
+ }
+
+ public Artifact updateResourceArtifact(String userId, String resourceUuid, Artifact artifact, String requestId) throws Exception {
+ String url = buildRequestPath(RESOURCES_PATH, resourceUuid, ARTIFACTS_PATH, artifact.getArtifactUUID());
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Update SDC resource artifact. URL={}", url);
+ String artifactData = SdcRestClientUtils.artifactToString(artifact);
+ return client.postForObject(url, new HttpEntity<>(artifactData, postArtifactHeaders(userId, artifactData, requestId)), Artifact.class);
+ }
+
+ public void deleteResourceArtifact(String userId, String resourceUuid, String artifactId, String requestId) {
+ String url = buildRequestPath(RESOURCES_PATH, resourceUuid, ARTIFACTS_PATH, artifactId);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Delete SDC resource artifact. URL={}", url);
+ client.exchange(url, HttpMethod.DELETE, new HttpEntity(postResourceHeaders(userId, requestId)), Artifact.class);
+ }
+
+ public Artifact createVfInstanceArtifact(String userId, String serviceUuid, String normalizedInstanceName, Artifact artifact, String requestId) throws Exception {
+ String url = buildRequestPath(SERVICES_PATH, serviceUuid, RESOURCE_INSTANCES_PATH, normalizedInstanceName, ARTIFACTS_PATH);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Create SDC resource instance artifact. URL={}", url);
+ String artifactData = SdcRestClientUtils.artifactToString(artifact);
+ return client.postForObject(url, new HttpEntity<>(artifactData, postArtifactHeaders(userId, artifactData, requestId)), Artifact.class);
+ }
+
+ public Artifact updateVfInstanceArtifact(String userId, String serviceUuid, String normalizedInstanceName, Artifact artifact, String requestId) throws Exception {
+ String url = buildRequestPath(SERVICES_PATH, serviceUuid, RESOURCE_INSTANCES_PATH, normalizedInstanceName, ARTIFACTS_PATH, artifact.getArtifactUUID());
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Update SDC resource instance artifact. URL={}", url);
+ String artifactData = SdcRestClientUtils.artifactToString(artifact);
+ return client.postForObject(url, new HttpEntity<>(artifactData, postArtifactHeaders(userId, artifactData, requestId)), Artifact.class);
+ }
+
+ public ExternalReferencesMap getMonitoringReferences(String context, String uuid, String version, String requestId) {
+ String url = buildRequestPath(context, uuid, VERSION_PATH, version, MONITORING_REFERENCES_PATH);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get SDC service monitoring references. URL={}", url);
+ return getObject(url, requestId, ExternalReferencesMap.class);
+ }
+
+ public void deleteInstanceResourceArtifact(String userId, String context, String serviceUuid, String normalizedVfiName, String artifactUuid, String requestId) {
+ String url = buildRequestPath(context, serviceUuid, RESOURCE_INSTANCES_PATH, normalizedVfiName, ARTIFACTS_PATH, artifactUuid);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Delete SDC instance resource artifact. URL={}", url);
+ client.exchange(url, HttpMethod.DELETE, new HttpEntity(postResourceHeaders(userId, requestId)), Artifact.class);
+ }
+
+ private HttpHeaders postResourceHeaders(String userId, String requestId) {
+ HttpHeaders headers = requestHeader(requestId);
+ headers.setContentType(MediaType.APPLICATION_JSON_UTF8);
+ headers.add(USER_ID_HEADER, userId);
+ return headers;
+ }
+
+ private HttpHeaders postArtifactHeaders(String userId, String artifact, String requestId) {
+ HttpHeaders headers = postResourceHeaders(userId, requestId);
+ String md5 = Base64Utils.encodeToString(DigestUtils.md5Hex(artifact).getBytes());
+ headers.add(CONTENT_MD5_HEADER, md5);
+ return headers;
+ }
+
+ private HttpHeaders requestHeader(String requestId){
+ HttpHeaders headers = new HttpHeaders();
+ headers.add(ECOMP_REQUEST_ID_HEADER, requestId);
+ return headers;
+ }
+
+ private <T> T getObject(String url, String requestId, Class<T> clazz) {
+ return client.exchange(url, HttpMethod.GET, new HttpEntity<>(requestHeader(requestId)), clazz).getBody();
+ }
+
+ private String buildRequestPath(String... args){
+ return uri + Stream.of(args).collect(Collectors.joining(SLASH));
+ }
+} \ No newline at end of file
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactGroupType.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactGroupType.java
new file mode 100644
index 0000000..98e78c6
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactGroupType.java
@@ -0,0 +1,5 @@
+package org.onap.sdc.dcae.enums;
+
+public enum ArtifactGroupType {
+ DEPLOYMENT
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactType.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactType.java
new file mode 100644
index 0000000..2da4cc7
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactType.java
@@ -0,0 +1,16 @@
+package org.onap.sdc.dcae.enums;
+
+public enum ArtifactType {
+ DCAE_TOSCA,
+ DCAE_JSON,
+ DCAE_POLICY,
+ DCAE_DOC,
+ DCAE_EVENT,
+ DCAE_INVENTORY_TOSCA,
+ DCAE_INVENTORY_JSON,
+ DCAE_INVENTORY_POLICY,
+ DCAE_INVENTORY_DOC,
+ DCAE_INVENTORY_BLUEPRINT,
+ DCAE_INVENTORY_EVENT,
+ OTHER
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/AssetType.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/AssetType.java
new file mode 100644
index 0000000..576643f
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/AssetType.java
@@ -0,0 +1,5 @@
+package org.onap.sdc.dcae.enums;
+
+public enum AssetType {
+ RESOURCE, SERVICE
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/LifecycleOperationType.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/LifecycleOperationType.java
new file mode 100644
index 0000000..80e01df
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/LifecycleOperationType.java
@@ -0,0 +1,16 @@
+package org.onap.sdc.dcae.enums;
+
+
+public enum LifecycleOperationType {
+ CHECKIN("checkin"), CHECKOUT("checkout"), CERTIFY("certify"), UNDO_CHECKOUT("undoCheckout");
+
+ private String value;
+
+ LifecycleOperationType(String value){
+ this.value = value;
+ }
+
+ public String getValue(){
+ return value;
+ }
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/SdcConsumerInfo.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/SdcConsumerInfo.java
new file mode 100644
index 0000000..aecb61d
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/SdcConsumerInfo.java
@@ -0,0 +1,5 @@
+package org.onap.sdc.dcae.enums;
+
+public enum SdcConsumerInfo {
+ AUTH, INSTANCE_ID, CATALOG_URL
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/AbstractSdncException.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/AbstractSdncException.java
new file mode 100644
index 0000000..360e28b
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/AbstractSdncException.java
@@ -0,0 +1,97 @@
+package org.onap.sdc.dcae.errormng;
+
+
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+
+import java.util.Arrays;
+import java.util.Formatter;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class AbstractSdncException {
+ private String messageId;
+
+ private String text;
+
+ private String[] variables;
+
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ private final static Pattern ERROR_PARAM_PATTERN = Pattern.compile("%\\d");
+
+ public AbstractSdncException() {
+ }
+
+ public AbstractSdncException(String messageId, String text, String[] variables) {
+ super();
+ this.messageId = messageId;
+ this.text = text;
+ this.variables = validateParameters(messageId, text, variables);
+ }
+
+ private String[] validateParameters(String messageId, String text, String[] variables) {
+ String[] res = null;
+ Matcher m = ERROR_PARAM_PATTERN.matcher(text);
+ int expectedParamsNum = 0;
+ while (m.find()) {
+ expectedParamsNum += 1;
+ }
+ int actualParamsNum = (variables != null) ? variables.length : 0;
+ if (actualParamsNum < expectedParamsNum) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(),
+ "Received less parameters than expected for error with messageId {}, expected: {}, actual: {}. Missing parameters are padded with null values.",
+ messageId, expectedParamsNum, actualParamsNum);
+ } else if (actualParamsNum > expectedParamsNum) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(),
+ "Received more parameters than expected for error with messageId {}, expected: {}, actual: {}. Extra parameters are ignored.",
+ messageId, expectedParamsNum, actualParamsNum);
+ }
+ if (variables != null) {
+ res = Arrays.copyOf(variables, expectedParamsNum);
+ }
+
+ return res;
+ }
+
+ public String getMessageId() {
+ return this.messageId;
+ }
+
+ public String getText() {
+ return text;
+ }
+
+ public String[] getVariables() {
+ return variables;
+ }
+
+ public void setMessageId(String messageId) {
+ this.messageId = messageId;
+ }
+
+ public void setText(String text) {
+ this.text = text;
+ }
+
+ public void setVariables(String[] variables) {
+ this.variables = variables;
+ }
+
+ public String getFormattedErrorMessage() {
+ String res;
+ if (variables != null && variables.length > 0) {
+ Formatter formatter = new Formatter();
+ try {
+ res = formatter.format(this.text.replaceAll("%\\d", "%s"), (Object[]) this.variables).toString();
+ } finally {
+ formatter.close();
+ }
+ } else {
+ res = this.text;
+ }
+ return res;
+ }
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/BaseException.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/BaseException.java
new file mode 100644
index 0000000..b559634
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/BaseException.java
@@ -0,0 +1,61 @@
+package org.onap.sdc.dcae.errormng;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.google.gson.Gson;
+import org.springframework.http.HttpStatus;
+import org.springframework.web.client.HttpClientErrorException;
+
+public class BaseException extends HttpClientErrorException {
+
+ private static Gson gson = new Gson();
+
+ protected RequestError requestError;
+
+ public RequestError getRequestError() {
+ return requestError;
+ }
+
+ public void setRequestError(RequestError requestError) {
+ this.requestError = requestError;
+ }
+
+ public BaseException(HttpClientErrorException theError) {
+ super(theError.getStatusCode());
+ String body = theError.getResponseBodyAsString();
+ if (body != null) {
+ requestError = extractRequestError(body);
+ }
+ }
+
+ public BaseException(HttpStatus status, RequestError re){
+ super(status);
+ requestError = re;
+ }
+
+ private RequestError extractRequestError(String error) {
+ ResponseFormat responseFormat = gson.fromJson(error, ResponseFormat.class);
+ return responseFormat.getRequestError();
+ }
+
+ @JsonIgnore
+ public String getMessageId() {
+ return requestError.getMessageId();
+ }
+
+ @JsonIgnore
+ public String[] getVariables() {
+ return requestError.getVariables();
+ }
+
+ @JsonIgnore
+ public String getText(){
+ return requestError.getText();
+ }
+
+ @Override
+ @JsonIgnore
+ public String getMessage() {
+ return requestError.getFormattedMessage();
+ }
+
+} \ No newline at end of file
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/OkResponseInfo.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/OkResponseInfo.java
new file mode 100644
index 0000000..53bdf3e
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/OkResponseInfo.java
@@ -0,0 +1,8 @@
+package org.onap.sdc.dcae.errormng;
+
+public class OkResponseInfo extends AbstractSdncException {
+
+ public OkResponseInfo(String messageId, String text, String[] variables) {
+ super(messageId, text, variables);
+ }
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/PolicyException.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/PolicyException.java
new file mode 100644
index 0000000..3fc2d71
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/PolicyException.java
@@ -0,0 +1,11 @@
+package org.onap.sdc.dcae.errormng;
+
+public class PolicyException extends AbstractSdncException {
+
+ public PolicyException(String messageId, String text, String[] variables) {
+ super(messageId, text, variables);
+ }
+
+ public PolicyException() {
+ }
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/RequestError.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/RequestError.java
new file mode 100644
index 0000000..00fe3f2
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/RequestError.java
@@ -0,0 +1,65 @@
+package org.onap.sdc.dcae.errormng;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+
+import java.util.List;
+
+@JsonInclude(JsonInclude.Include.NON_NULL)
+public class RequestError {
+ private PolicyException policyException;
+ private ServiceException serviceException;
+ private OkResponseInfo okResponseInfo;
+ private List<ServiceException> serviceExceptions;
+
+ public PolicyException getPolicyException() {
+ return policyException;
+ }
+
+ public ServiceException getServiceException() {
+ return serviceException;
+ }
+
+ public OkResponseInfo getOkResponseInfo() {
+ return okResponseInfo;
+ }
+
+ public void setPolicyException(PolicyException policyException) {
+ this.policyException = policyException;
+ }
+
+ void setServiceException(ServiceException serviceException) {
+ this.serviceException = serviceException;
+ }
+
+ void setOkResponseInfo(OkResponseInfo okResponseInfo) {
+ this.okResponseInfo = okResponseInfo;
+ }
+
+ public List<ServiceException> getServiceExceptions() {
+ return serviceExceptions;
+ }
+
+ void setServiceExceptions(List<ServiceException> serviceExceptions) {
+ this.serviceExceptions = serviceExceptions;
+ }
+
+ String getFormattedMessage() {
+ return getError().getFormattedErrorMessage();
+ }
+
+ String getMessageId() {
+ return getError().getMessageId();
+ }
+
+ String[] getVariables() {
+ return getError().getVariables();
+ }
+
+ String getText() {
+ return getError().getText();
+ }
+
+ AbstractSdncException getError() {
+ return null != serviceException ? serviceException : null != policyException ? policyException : okResponseInfo;
+ }
+} \ No newline at end of file
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ResponseFormat.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ResponseFormat.java
new file mode 100644
index 0000000..ffdce70
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ResponseFormat.java
@@ -0,0 +1,75 @@
+package org.onap.sdc.dcae.errormng;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+
+import java.util.List;
+
+@JsonInclude(JsonInclude.Include.NON_NULL)
+public class ResponseFormat {
+
+ @JsonIgnore
+ private int status;
+ private RequestError requestError;
+ private String notes = "";
+
+ public String getNotes() {
+ return notes;
+ }
+
+ void setNotes(String notes) {
+ this.notes = notes;
+ }
+
+ public ResponseFormat() {
+ super();
+ }
+
+ public ResponseFormat(int status) {
+ super();
+ this.status = status;
+ }
+
+
+ public void setStatus(int status) {
+ this.status = status;
+ }
+
+ public Integer getStatus() {
+ return status;
+ }
+
+ public RequestError getRequestError() {
+ return requestError;
+ }
+
+ public void setRequestError(RequestError requestError) {
+ this.requestError = requestError;
+ }
+
+ void setPolicyException(PolicyException policyException) {
+ this.requestError = new RequestError();
+ requestError.setPolicyException(policyException);
+ }
+
+ void setServiceException(ServiceException serviceException) {
+ this.requestError = new RequestError();
+ requestError.setServiceException(serviceException);
+ }
+
+ void setOkResponseInfo(OkResponseInfo okResponseInfo) {
+ this.requestError = new RequestError();
+ requestError.setOkResponseInfo(okResponseInfo);
+ }
+
+ void setServiceExceptions(List<ServiceException> serviceExceptions) {
+ this.requestError = new RequestError();
+ requestError.setServiceExceptions(serviceExceptions);
+ }
+
+ @Override
+ public String toString() {
+ return "ResponseFormat[" + "status=" + status + ", requestError=" + requestError + ']';
+ }
+
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ServiceException.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ServiceException.java
new file mode 100644
index 0000000..163a07f
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ServiceException.java
@@ -0,0 +1,12 @@
+package org.onap.sdc.dcae.errormng;
+
+public class ServiceException extends AbstractSdncException {
+
+ public ServiceException(String messageId, String text, String[] variables) {
+ super(messageId, text, variables);
+ }
+
+ public ServiceException() {
+ }
+
+} \ No newline at end of file
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/Normalizers.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/Normalizers.java
new file mode 100644
index 0000000..4719607
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/Normalizers.java
@@ -0,0 +1,34 @@
+package org.onap.sdc.dcae.utils;
+
+import org.apache.commons.lang3.text.WordUtils;
+
+import java.util.regex.Pattern;
+
+public final class Normalizers {
+
+ private static final Pattern COMPONENT_NAME_DELIMITER_PATTERN = Pattern.compile("[.\\-_]+");
+ private static final Pattern ARTIFACT_LABEL_DELIMITER_PATTERN = Pattern.compile("[ \\-+._]+");
+ private static final Pattern COMPONENT_INSTANCE_NAME_DELIMITER_PATTERN = Pattern.compile("[ \\-.]+");
+
+
+ public static String normalizeComponentName(String name) {
+ String normalizedName = name.toLowerCase();
+ normalizedName = COMPONENT_NAME_DELIMITER_PATTERN.matcher(normalizedName).replaceAll(" ");
+ String[] split = normalizedName.split(" ");
+ StringBuffer sb = new StringBuffer();
+ for (String splitElement : split) {
+ String capitalize = WordUtils.capitalize(splitElement);
+ sb.append(capitalize);
+ }
+ return sb.toString();
+ }
+
+ public static String normalizeArtifactLabel(String label) {
+ return ARTIFACT_LABEL_DELIMITER_PATTERN.matcher(label).replaceAll("").toLowerCase();
+ }
+
+ public static String normalizeComponentInstanceName(String name) {
+ return COMPONENT_INSTANCE_NAME_DELIMITER_PATTERN.matcher(name).replaceAll("").toLowerCase();
+ }
+
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SDCResponseErrorHandler.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SDCResponseErrorHandler.java
new file mode 100644
index 0000000..64da66a
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SDCResponseErrorHandler.java
@@ -0,0 +1,43 @@
+package org.onap.sdc.dcae.utils;
+
+import com.google.gson.Gson;
+import org.onap.sdc.dcae.catalog.asdc.ASDCException;
+import org.onap.sdc.dcae.errormng.RequestError;
+import org.onap.sdc.dcae.errormng.ResponseFormat;
+import org.springframework.http.client.ClientHttpResponse;
+import org.springframework.web.client.DefaultResponseErrorHandler;
+import org.springframework.web.client.HttpClientErrorException;
+import org.springframework.web.client.ResponseErrorHandler;
+
+import java.io.IOException;
+
+public class SDCResponseErrorHandler implements ResponseErrorHandler {
+
+ private ResponseErrorHandler errorHandler = new DefaultResponseErrorHandler();
+
+ private static Gson gson = new Gson();
+
+ public void handleError(ClientHttpResponse response) throws IOException {
+ try{
+ errorHandler.handleError(response);
+ } catch (HttpClientErrorException e) {
+ RequestError re = extractRequestError(e);
+ throw null == re ? e : new ASDCException(e.getStatusCode(), re);
+ }
+ }
+
+ public boolean hasError(ClientHttpResponse response) throws IOException{
+ return errorHandler.hasError(response);
+ }
+
+ private RequestError extractRequestError(HttpClientErrorException error) {
+ try {
+ String body = error.getResponseBodyAsString();
+ ResponseFormat responseFormat = gson.fromJson(body, ResponseFormat.class);
+ return responseFormat.getRequestError();
+ } catch (Exception e) {
+ return null;
+ }
+ }
+
+}
diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SdcRestClientUtils.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SdcRestClientUtils.java
new file mode 100644
index 0000000..33c2f49
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SdcRestClientUtils.java
@@ -0,0 +1,85 @@
+package org.onap.sdc.dcae.utils;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import org.onap.sdc.dcae.composition.restmodels.sdc.Artifact;
+import org.onap.sdc.dcae.enums.ArtifactGroupType;
+import org.onap.sdc.dcae.enums.SdcConsumerInfo;
+import org.springframework.util.Base64Utils;
+import org.springframework.util.StringUtils;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class SdcRestClientUtils {
+
+ private static final String SDC_CATALOG_PATH = "/sdc/v1/catalog/";
+
+ // TODO consider moving params elsewhere (user/password/instanceId can be constant)
+ public static EnumMap<SdcConsumerInfo, String> extractConsumerInfoFromUri(URI configUri) {
+ EnumMap<SdcConsumerInfo, String> userInfoMap = new EnumMap<>(SdcConsumerInfo.class);
+ String userInfo = configUri.getUserInfo();
+ if (userInfo != null) {
+ userInfoMap.put(SdcConsumerInfo.AUTH, "Basic "+ Base64Utils.encodeToString(userInfo.getBytes()));
+ }
+ String fragment = configUri.getFragment();
+ if (fragment == null)
+ throw new IllegalArgumentException("The URI must contain a fragment specification, to be used as SDC instance id");
+ userInfoMap.put(SdcConsumerInfo.INSTANCE_ID, fragment);
+ try {
+ userInfoMap.put(SdcConsumerInfo.CATALOG_URL, new URI(configUri.getScheme(), null, configUri.getHost(), configUri.getPort(), configUri.getPath()+SDC_CATALOG_PATH, null, null).toString());
+ }
+ catch (URISyntaxException se) {
+ throw new IllegalArgumentException("Invalid uri", se);
+ }
+ return userInfoMap;
+ }
+
+ public static String buildResourceFilterQuery(String resourceType, String category, String subcategory) {
+ List<String> filters = new ArrayList<>();
+ if(!StringUtils.isEmpty(resourceType))
+ filters.add("resourceType="+resourceType);
+ if(!StringUtils.isEmpty(category))
+ filters.add("category="+category);
+ if(!StringUtils.isEmpty(subcategory))
+ filters.add("subCategory="+subcategory);
+ return "?"+filters.stream().collect(Collectors.joining("&"));
+ }
+
+ public static UserRemarks buildUserRemarksObject(String userRemarks) {
+ return new UserRemarks(userRemarks);
+ }
+
+ private static class UserRemarks {
+ private String userRemarks;
+
+ private UserRemarks(String userRemarks) {
+ this.userRemarks = userRemarks;
+ }
+
+ public String getUserRemarks() {
+ return userRemarks;
+ }
+ }
+
+ public static String artifactToString(Artifact artifact) throws JsonProcessingException {
+ ObjectMapper mapper = new ObjectMapper();
+ return mapper.writeValueAsString(artifact);
+ }
+
+ public static Artifact generateDeploymentArtifact(String description, String name, String type, String label, byte[] payload){
+ Artifact artifact = new Artifact();
+ artifact.setDescription(description);
+ artifact.setArtifactName(name);
+ artifact.setArtifactGroupType(ArtifactGroupType.DEPLOYMENT.name());
+ artifact.setArtifactType(type);
+ artifact.setArtifactLabel(label);
+ artifact.setPayloadData(Base64Utils.encodeToString(payload));
+ return artifact;
+ }
+}
diff --git a/dcaedt_catalog/asdc/src/test/org/onap/sdc/dcae/utils/NormalizersTest.java b/dcaedt_catalog/asdc/src/test/org/onap/sdc/dcae/utils/NormalizersTest.java
new file mode 100644
index 0000000..bf06e22
--- /dev/null
+++ b/dcaedt_catalog/asdc/src/test/org/onap/sdc/dcae/utils/NormalizersTest.java
@@ -0,0 +1,51 @@
+package org.onap.sdc.dcae.utils;
+
+import static org.assertj.core.api.Assertions.*;
+
+import org.assertj.core.api.Assertions;
+import org.junit.Test;
+import org.onap.sdc.dcae.utils.Normalizers;
+
+
+public class NormalizersTest {
+
+ @Test
+ public void normalizeVFCMTName_withDot_withoutDot(){
+ Assertions.assertThat(Normalizers.normalizeComponentName("my.dot")).isEqualTo("MyDot");
+ }
+
+ @Test
+ public void normalizeVFCMTName_withUnderscore_withoutUnderscore(){
+ Assertions.assertThat(Normalizers.normalizeComponentName("My_Monitoring_Template_example")).isEqualTo("MyMonitoringTemplateExample");
+ }
+
+ @Test
+ public void normalizeVFCMTName_withWhiteSpace_withoutWhiteSpace(){
+ Assertions.assertThat(Normalizers.normalizeComponentName(" my dot ")).isEqualTo("MyDot");
+ }
+
+ @Test
+ public void normalizeVFCMTName_withDash_withoutDash(){
+ Assertions.assertThat(Normalizers.normalizeComponentName("My-Monitoring-Template-example")).isEqualTo("MyMonitoringTemplateExample");
+ }
+
+ @Test
+ public void normalizeVFCMTName_notCapitalized_capitalized(){
+ Assertions.assertThat(Normalizers.normalizeComponentName("my monitoring template eXAMPLE")).isEqualTo("MyMonitoringTemplateExample");
+ }
+
+ @Test
+ public void normalizeArtifactLabel_withDash_withoutDash(){
+ Assertions.assertThat(Normalizers.normalizeArtifactLabel("blueprint-other")).isEqualTo("blueprintother");
+ }
+
+ @Test
+ public void normalizeArtifactLabel_withWhiteSpace_withoutWhiteSpace(){
+ Assertions.assertThat(Normalizers.normalizeArtifactLabel(" blueprint other")).isEqualTo("blueprintother");
+ }
+
+ @Test
+ public void normalizeArtifactLabel_withPlus_withoutPlus(){
+ Assertions.assertThat(Normalizers.normalizeArtifactLabel("+blueprint+++other+")).isEqualTo("blueprintother");
+ }
+}
diff --git a/dcaedt_catalog/commons/pom.xml b/dcaedt_catalog/commons/pom.xml
new file mode 100644
index 0000000..d285e1b
--- /dev/null
+++ b/dcaedt_catalog/commons/pom.xml
@@ -0,0 +1,135 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Catalog</artifactId>
+ <version>1806.0.1-SNAPSHOT</version>
+ </parent>
+ <artifactId>DCAE-DT-Catalog-Commons</artifactId>
+ <packaging>jar</packaging>
+ <name>DCAE DT Catalog Commons</name>
+
+ <build>
+ <sourceDirectory>src/main/java</sourceDirectory>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>3.1</version>
+ <configuration>
+ <source>1.8</source>
+ <target>1.8</target>
+ <encoding>${project.build.sourceEncoding}</encoding>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.10</version>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/deps</outputDirectory>
+ <overWriteReleases>false</overWriteReleases>
+ <overWriteSnapshots>false</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>buildnumber-maven-plugin</artifactId>
+ <version>1.4</version>
+ <executions>
+ <execution>
+ <phase>validate</phase>
+ <goals>
+ <goal>create</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <doCheck>false</doCheck>
+ <doUpdate>false</doUpdate>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.1</version>
+ <configuration>
+ <archive>
+ <manifest>
+ <addDefaultImplementationEntries>true</addDefaultImplementationEntries>
+ </manifest>
+ <manifestEntries>
+ <Implementation-Build>${buildNumber}</Implementation-Build>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpasyncclient</artifactId>
+ <version>4.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>2.4</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>1.3</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-beanutils</groupId>
+ <artifactId>commons-beanutils</artifactId>
+ <version>1.9.3</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-jxpath</groupId>
+ <artifactId>commons-jxpath</artifactId>
+ <version>1.3</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>17.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.yaml</groupId>
+ <artifactId>snakeyaml</artifactId>
+ <version>1.17</version>
+ </dependency>
+ <dependency>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ <version>20160212</version>
+ </dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ <version>2.7.8</version>
+ </dependency>
+ <dependency>
+ <groupId>com.github.wnameless</groupId>
+ <artifactId>json-flattener</artifactId>
+ <version>0.2.2</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-web</artifactId>
+ <version>4.3.5.RELEASE</version>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Action.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Action.java
new file mode 100644
index 0000000..fb36950
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Action.java
@@ -0,0 +1,11 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import org.onap.sdc.dcae.catalog.commons.Future;
+
+/**
+ */
+public interface Action<T> {
+
+ public Future<T> execute();
+
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Actions.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Actions.java
new file mode 100644
index 0000000..132b0c0
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Actions.java
@@ -0,0 +1,201 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.util.List;
+import java.util.LinkedList;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.concurrent.CountDownLatch;
+
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.catalog.commons.Action;
+import org.onap.sdc.dcae.catalog.commons.Future;
+import org.onap.sdc.dcae.catalog.commons.FutureHandler;
+import org.onap.sdc.dcae.catalog.commons.Futures;
+
+/**
+ */
+public interface Actions {
+
+ /** */
+ public static interface CompoundAction<T> extends Action<List<T>> {
+
+ public CompoundAction<T> addAction(Action<T> theAction);
+
+ public List<Action<T>> actions();
+
+ public Future<List<T>> execute();
+ }
+
+
+ public static class BasicCompoundAction<T> implements CompoundAction<T> {
+
+ private LinkedList<Action<T>> actions = new LinkedList<Action<T>>();
+
+
+
+ public CompoundAction<T> addAction(Action<T> theAction) {
+ this.actions.add(theAction);
+ return this;
+ }
+
+ public List<Action<T>> actions() {
+ return this.actions;
+ }
+
+ public Future<List<T>> execute() {
+ CompoundFuture<T> cf = new CompoundFuture<T>(this.actions.size());
+ for (Action a: this.actions)
+ cf.addFuture(a.execute());
+ return cf;
+ }
+ }
+
+
+ public static class CompoundFuture<T> extends Futures.BasicFuture<List<T>> {
+
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ private LinkedList<Future<T>> futures = new LinkedList<Future<T>>();
+ private FutureHandler<T> hnd;
+
+ CompoundFuture(int theActionCount) {
+
+ hnd = new Futures.BasicHandler<T>(new CountDownLatch(theActionCount)) {
+
+ private List<T> results = new ArrayList<T>(Collections.nCopies(theActionCount, null));
+
+ protected void process(Future<T> theResult) {
+ synchronized(CompoundFuture.this) {
+ if (theResult.failed()) {
+ CompoundFuture.this.cause(theResult.cause());
+ //and stop processing of other results
+ this.results = null;
+ //??
+ }
+ else {
+ if (this.results != null)
+ this.results.set(futures.indexOf(theResult), theResult.result());
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Got result for action {}. Count at {}", futures.indexOf(theResult), this.latch.getCount());
+ }
+ if (this.latch.getCount() == 1) {//this was the last result
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Got all results: {}", this.results);
+ CompoundFuture.this.result(this.results);
+ }
+ }
+ }
+ };
+ }
+
+ CompoundFuture<T> addFuture(Future<T> theFuture) {
+ synchronized(this) {
+ futures.add(theFuture);
+ theFuture.setHandler(this.hnd);
+ }
+ return this;
+ }
+
+ }
+
+/*
+ public static class CompoundFutureHandler<T> implements FutureHandler<T> {
+
+ protected List<T> result = null;
+ protected List<Throwable> error = null;
+ protected CountDownLatch latch = null;
+
+ CompoundFutureHandler(int theResultCount) {
+ this(new CountDownLatch(theResultCount));
+ }
+
+ public void handle(Future<T> theResult) {
+ if (this.latch != null) {
+ this.latch.countDown();
+ }
+ }
+
+ public T result()
+ throws InterruptedException, RuntimeException {
+ return result(true);
+ }
+
+ public BasicHandler<T> waitForCompletion() throws InterruptedException {
+ this.latch.await();
+ return this;
+ }
+
+ }
+*/
+
+ public static class Sequence<T> implements Action<List<T>> {
+
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ private List<Action<T>> actions = new LinkedList<Action<T>>();
+ private int current = 0;
+ private SequenceFuture<T> future = new SequenceFuture<T>();
+
+ public Sequence<T> add(Action<T> theAction) {
+ if (this.current > 0)
+ throw new IllegalStateException("In execution");
+ this.actions.add(theAction);
+ return this;
+ }
+
+ /* we allow 'early' access to the future so that a client can pass its reference while
+ * it still builds the sequence, for example.
+ */
+ public Future<List<T>> future() {
+ return this.future;
+ }
+
+ //need to add protection when for the 'no action' case
+ public Future<List<T>> execute() {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Starting serialized execution of {}", actions);
+ if (hasNext())
+ next().execute().setHandler(future.hnd);
+ return this.future;
+ }
+
+ protected boolean hasNext() {
+ return this.current < actions.size();
+ }
+
+ protected Action next() {
+ return actions.get(this.current++);
+ }
+
+ private class SequenceFuture<T> extends Futures.BasicFuture<List<T>> {
+
+ private List<T> results = new LinkedList<T>();
+ private FutureHandler<T> hnd = new Futures.BasicHandler<T>() {
+
+ protected void process(Future<T> theResult) {
+
+ if (theResult.failed()) {
+ SequenceFuture.this.cause(theResult.cause());
+ //and stop processing of other results
+ }
+ else {
+ SequenceFuture.this.results.add(theResult.result());
+ if (Sequence.this.hasNext()) {
+ Sequence.this.next().execute().setHandler(this);
+ }
+ else {
+ SequenceFuture.this.result(SequenceFuture.this.results);
+ }
+ }
+ }
+ };
+
+
+ }
+
+
+
+ }
+
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Future.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Future.java
new file mode 100644
index 0000000..c50f467
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Future.java
@@ -0,0 +1,35 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import org.onap.sdc.dcae.catalog.commons.Future;
+import org.onap.sdc.dcae.catalog.commons.FutureHandler;
+
+/**
+ * Modeled after the vertx future
+ */
+public interface Future<T> {
+
+ public T result();
+
+ public Future<T> result(T theResult);
+
+//rename 'cause' to 'failure'
+
+ public Throwable cause();
+
+ public Future<T> cause(Throwable theError);
+
+ public boolean succeeded();
+
+ public boolean failed();
+
+ public boolean complete();
+
+ public T waitForResult() throws Exception;
+
+ //public T waitForResult(long theTimeout) throws Exception;
+
+ public Future<T> waitForCompletion() throws InterruptedException;
+
+ public Future<T> setHandler(FutureHandler<T> theHandler);
+
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/FutureHandler.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/FutureHandler.java
new file mode 100644
index 0000000..b689412
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/FutureHandler.java
@@ -0,0 +1,13 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import org.onap.sdc.dcae.catalog.commons.Future;
+
+/**
+ * Modeled after the vertx future
+ */
+@FunctionalInterface
+public interface FutureHandler<T> {
+
+ public void handle(Future<T> theResult);
+
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Futures.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Futures.java
new file mode 100644
index 0000000..ffaf42b
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Futures.java
@@ -0,0 +1,257 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.util.List;
+import java.util.LinkedList;
+import java.util.Collections;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.function.Function;
+
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+import org.onap.sdc.dcae.catalog.commons.Future;
+import org.onap.sdc.dcae.catalog.commons.FutureHandler;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+
+
+/**
+ */
+public class Futures<T> {
+
+ private Futures() {
+ }
+
+
+ public static <T> Future<T> failedFuture(Throwable theError) {
+ return new BasicFuture<T>()
+ .cause(theError);
+ }
+
+ public static <T> Future<T> succeededFuture(T theResult) {
+ return new BasicFuture<T>()
+ .result(theResult);
+ }
+
+ public static <T> Future<T> future() {
+ return new BasicFuture<T>();
+ }
+
+ public static <U,V> Future<V> advance(Future<U> theStep,
+ final Function<U,V> theResultFunction) {
+ return advance(theStep, theResultFunction, Function.identity());
+ }
+
+ public static <U,V> Future<V> advance(Future<U> theStep,
+ final Function<U,V> theResultFunction,
+ final Function<Throwable, Throwable> theErrorFunction) {
+ final Future<V> adv = new BasicFuture<V>();
+ theStep.setHandler(new FutureHandler<U>() {
+ public void handle(Future<U> theResult) {
+ if (theResult.failed())
+ adv.cause(theErrorFunction.apply(theResult.cause()));
+ else
+ adv.result(theResultFunction.apply(theResult.result()));
+ }
+ });
+ return adv;
+ }
+
+ /** */
+ public static class BasicFuture<T> implements Future<T> {
+
+ protected boolean succeeded,
+ failed;
+
+ protected FutureHandler<T> handler;
+ protected Throwable cause;
+ protected T result;
+
+
+ protected BasicFuture() {
+ }
+
+ public T result() {
+ return this.result;
+ }
+
+ public Future<T> result(T theResult) {
+ this.result = theResult;
+ this.succeeded = true;
+ this.cause = null;
+ this.failed = false;
+ callHandler();
+ return this;
+ }
+
+ public Throwable cause() {
+ return this.cause;
+ }
+
+ public Future<T> cause(Throwable theCause) {
+ this.cause = theCause;
+ this.failed = true;
+ this.result = null;
+ this.succeeded = false;
+ callHandler();
+ return this;
+ }
+
+ public boolean succeeded() {
+ return this.succeeded;
+ }
+
+ public boolean failed() {
+ return this.failed;
+ }
+
+ public boolean complete() {
+ return this.failed || this.succeeded;
+ }
+
+ public Future<T> setHandler(FutureHandler<T> theHandler) {
+ this.handler = theHandler;
+ callHandler();
+ return this;
+ }
+
+ public T waitForResult() throws Exception {
+ BasicHandler<T> hnd = buildHandler();
+ setHandler(hnd);
+ hnd.waitForCompletion();
+ if (failed())
+ throw (Exception)cause();
+ else
+ return result();
+ }
+
+ public Future<T> waitForCompletion() throws InterruptedException {
+ BasicHandler<T> hnd = buildHandler();
+ setHandler(hnd);
+ hnd.waitForCompletion();
+ return this;
+ }
+
+ protected void callHandler() {
+ if (this.handler != null && complete()) {
+ this.handler.handle(this);
+ }
+ }
+
+ protected BasicHandler<T> buildHandler() {
+ return new BasicHandler<T>();
+ }
+ }
+
+
+ /** */
+ public static class BasicHandler<T>
+ implements FutureHandler<T> {
+
+ protected T result = null;
+ protected Throwable error = null;
+ protected CountDownLatch latch = null;
+
+ BasicHandler() {
+ this(new CountDownLatch(1));
+ }
+
+ BasicHandler(CountDownLatch theLatch) {
+ this.latch = theLatch;
+ }
+
+ public void handle(Future<T> theResult) {
+ process(theResult);
+ if (this.latch != null) {
+ this.latch.countDown();
+ }
+ }
+
+ protected void process(Future<T> theResult) {
+ if (theResult.failed()) {
+ this.error = theResult.cause();
+ }
+ else {
+ this.result = theResult.result();
+ }
+ }
+
+ public T result(boolean doWait)
+ throws InterruptedException, RuntimeException {
+ if (doWait) {
+ waitForCompletion();
+ }
+ if (null == this.error)
+ return this.result;
+
+ throw new RuntimeException(this.error);
+ }
+
+ public T result()
+ throws InterruptedException, RuntimeException {
+ return result(true);
+ }
+
+ public BasicHandler<T> waitForCompletion() throws InterruptedException {
+ this.latch.await();
+ return this;
+ }
+ }
+
+ /** */
+ public static class Accumulator<T> extends BasicFuture<List<T>>
+ implements Future<List<T>> {
+
+ protected List<Future<T>> futures = new LinkedList<Future<T>>();
+ //protected List<T> results = new LinkedList<T>();
+ protected BasicHandler<T> handler = null;
+
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ public Accumulator() {
+ this.result = new LinkedList<T>();
+ }
+
+ public Accumulator<T> add(Future<T> theFuture) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Intersection add");
+ this.futures.add(theFuture);
+ this.result.add(null);
+ return this;
+ }
+
+ public Accumulator<T> addAll(Accumulator<T> theFutures) {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Intersection addAll");
+
+ return this;
+ }
+
+ public Future<List<T>> accumulate() {
+ this.futures = Collections.unmodifiableList(this.futures);
+ this.handler = new BasicHandler<T>(new CountDownLatch(this.futures.size())) {
+ protected void process(Future<T> theResult) {
+ if (theResult.failed()) {
+ Accumulator.this.cause = theResult.cause();
+ }
+ else {
+ Accumulator.this.result.set(
+ Accumulator.this.futures.indexOf(theResult), theResult.result());
+ }
+ if (this.latch.getCount() == 1) {
+ if (Accumulator.this.cause != null)
+ Accumulator.this.cause(Accumulator.this.cause);
+ else
+ Accumulator.this.result(Accumulator.this.result);
+ }
+ }
+ };
+ futures.stream()
+ .forEach(f -> f.setHandler(this.handler));
+
+ return this;
+ }
+
+ }
+
+
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Http.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Http.java
new file mode 100644
index 0000000..0f28495
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Http.java
@@ -0,0 +1,107 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.util.List;
+
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.http.client.SimpleClientHttpRequestFactory;
+import org.springframework.http.converter.HttpMessageConverter;
+import org.springframework.util.concurrent.ListenableFutureCallback;
+import org.springframework.web.client.AsyncRestTemplate;
+import org.springframework.web.client.HttpClientErrorException;
+import org.springframework.web.client.RestClientException;
+import org.springframework.web.client.RestTemplate;
+
+public class Http {
+
+ protected Http() {
+ }
+
+
+ public static <T> Future<T> exchange(String theUri, HttpMethod theMethod, HttpEntity theRequest, Class<T> theResponseType) {
+
+ AsyncRestTemplate restTemplate = new AsyncRestTemplate();
+
+ List<HttpMessageConverter<?>> converters = restTemplate.getMessageConverters();
+ converters.add(0, new JSONHttpMessageConverter());
+ restTemplate.setMessageConverters(converters);
+
+ HttpFuture<T> result = new HttpFuture<T>();
+ try {
+ restTemplate
+ .exchange(theUri, theMethod, theRequest, theResponseType)
+ .addCallback(result.callback);
+ }
+ catch (RestClientException rcx) {
+ return Futures.failedFuture(rcx);
+ }
+ catch (Exception x) {
+ return Futures.failedFuture(x);
+ }
+
+ return result;
+ }
+
+ /**
+ *
+ * @param theUri
+ * @param theMethod
+ * @param theRequest
+ * @param theResponseType
+ * @param readTimeOut pass -1 if you dont need to customize the read time out interval
+ * @return
+ */
+ public static <T> ResponseEntity<T> exchangeSync(String theUri, HttpMethod theMethod, HttpEntity theRequest, Class<T> theResponseType, int readTimeOut) {
+
+ RestTemplate restTemplate = new RestTemplate();
+
+ if(readTimeOut!=-1){
+ SimpleClientHttpRequestFactory rf = (SimpleClientHttpRequestFactory) restTemplate.getRequestFactory();
+ rf.setReadTimeout(1 * readTimeOut);
+ }
+
+ List<HttpMessageConverter<?>> converters = restTemplate.getMessageConverters();
+ converters.add(0, new JSONHttpMessageConverter());
+ restTemplate.setMessageConverters(converters);
+ ResponseEntity<T> result = null;
+
+ try {
+ result = restTemplate.exchange(theUri, theMethod, theRequest, theResponseType);
+ }
+ catch (RestClientException rcx) {
+ return new ResponseEntity<T>((T) rcx.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR);
+ }
+ catch (Exception x) {
+ return new ResponseEntity<T>((T) x.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR);
+ }
+
+ return result;
+ }
+
+
+
+ public static class HttpFuture<T> extends Futures.BasicFuture<T> {
+
+ HttpFuture() {
+ }
+
+ ListenableFutureCallback<ResponseEntity<T>> callback = new ListenableFutureCallback<ResponseEntity<T>>() {
+
+ public void onSuccess(ResponseEntity<T> theResult) {
+ HttpFuture.this.result(theResult.getBody());
+ }
+
+ public void onFailure(Throwable theError) {
+ if (theError instanceof HttpClientErrorException) {
+ HttpFuture.this.cause(new Exception((HttpClientErrorException)theError));
+ }
+ else {
+ HttpFuture.this.cause(theError);
+ }
+ }
+ };
+
+ }
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/JSONHttpMessageConverter.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/JSONHttpMessageConverter.java
new file mode 100644
index 0000000..e711279
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/JSONHttpMessageConverter.java
@@ -0,0 +1,100 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.io.Reader;
+import java.io.Writer;
+import java.lang.reflect.Type;
+import java.nio.charset.Charset;
+
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpInputMessage;
+import org.springframework.http.HttpOutputMessage;
+import org.springframework.http.MediaType;
+import org.springframework.http.converter.AbstractHttpMessageConverter;
+import org.springframework.http.converter.HttpMessageNotReadableException;
+import org.springframework.http.converter.HttpMessageNotWritableException;
+
+import org.json.JSONObject;
+import org.json.JSONArray;
+import org.json.JSONTokener;
+import org.json.JSONException;
+
+/**
+ */
+public class JSONHttpMessageConverter extends AbstractHttpMessageConverter<Object> {
+
+ public static final Charset DEFAULT_CHARSET = Charset.forName("UTF-8");
+
+ /** */
+ public JSONHttpMessageConverter() {
+ super(new MediaType("application", "json", DEFAULT_CHARSET));
+ }
+ /*
+ @Override
+ public boolean canRead(Class<?> theClazz, MediaType theMediaType) {
+ return canRead(theMediaType);
+ }
+
+ @Override
+ public boolean canWrite(Class<?> theClazz, MediaType theMediaType) {
+ return canWrite(theMediaType);
+ }
+ */
+ @Override
+ protected boolean supports(Class<?> theClazz) {
+ return theClazz.equals(JSONObject.class) ||
+ theClazz.equals(JSONArray.class);
+ }
+
+ @Override
+ protected Object readInternal(Class<?> theClazz, HttpInputMessage theInputMessage)
+ throws IOException, HttpMessageNotReadableException {
+
+ Reader json = new InputStreamReader(theInputMessage.getBody(), getCharset(theInputMessage.getHeaders()));
+
+ try {
+ if (theClazz.equals(JSONObject.class))
+ return new JSONObject(new JSONTokener(json));
+ if (theClazz.equals(JSONArray.class))
+ return new JSONArray(new JSONTokener(json));
+
+ throw new HttpMessageNotReadableException("Could not process input, cannot handle " + theClazz);
+ }
+ catch (JSONException jsonx) {
+ throw new HttpMessageNotReadableException("Could not read JSON: " + jsonx.getMessage(), jsonx);
+ }
+ }
+
+ @Override
+ protected void writeInternal(Object theObject, HttpOutputMessage theOutputMessage)
+ throws IOException, HttpMessageNotWritableException {
+
+ Writer writer = new OutputStreamWriter(theOutputMessage.getBody(), getCharset(theOutputMessage.getHeaders()));
+
+ try {
+ if (theObject instanceof JSONObject) {
+ ((JSONObject)theObject).write(writer);
+ }
+ else if (theObject instanceof JSONArray) {
+ ((JSONArray)theObject).write(writer);
+ }
+
+ writer.close();
+ }
+ catch(JSONException jsonx) {
+ throw new HttpMessageNotWritableException("Could not write JSON: " + jsonx.getMessage(), jsonx);
+ }
+ }
+
+ private Charset getCharset(HttpHeaders theHeaders) {
+ if (theHeaders != null &&
+ theHeaders.getContentType() != null &&
+ theHeaders.getContentType().getCharSet() != null) {
+ return theHeaders.getContentType().getCharSet();
+ }
+ return DEFAULT_CHARSET;
+ }
+
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ListBuilder.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ListBuilder.java
new file mode 100644
index 0000000..2538893
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ListBuilder.java
@@ -0,0 +1,59 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.util.Arrays;
+import java.util.List;
+
+import org.onap.sdc.dcae.catalog.commons.ListBuilder;
+
+import java.util.LinkedList;
+
+public class ListBuilder<T> {
+
+ private List<T> list;
+
+ public ListBuilder() {
+ this.list = new LinkedList<T>();
+ }
+
+ public boolean isEmpty() {
+ return this.list.isEmpty();
+ }
+
+ public ListBuilder add(T theValue) {
+ this.list.add(theValue);
+ return this;
+ }
+
+ public ListBuilder addAll(final Iterable<? extends T> theValues) {
+ for (final T val : theValues) {
+ this.list.add(val);
+ }
+ return this;
+ }
+
+ public ListBuilder addAll(final List<? extends T> theList) {
+ this.list.addAll(theList);
+ return this;
+ }
+
+ public ListBuilder addAll(final T[] theArray) {
+ for (T t: theArray) this.list.add(t);
+ return this;
+ }
+
+ public List build() {
+ return this.list;
+ }
+
+ public List buildOpt() {
+ return this.list.isEmpty() ? null : this.list;
+ }
+
+ public static <V> List<V> asList(V[] theArray) {
+ return Arrays.asList(theArray);
+ }
+
+ public static <V> List<V> asListOpt(V[] theArray) {
+ return (theArray != null && theArray.length > 0) ? Arrays.asList(theArray) : null;
+ }
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/MapBuilder.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/MapBuilder.java
new file mode 100644
index 0000000..3aa2a56
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/MapBuilder.java
@@ -0,0 +1,80 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.util.function.Function;
+
+import org.onap.sdc.dcae.catalog.commons.MapBuilder;
+
+import java.util.function.BiFunction;
+
+public class MapBuilder<K,V> {
+
+ private Map<K,V> map;
+
+ public MapBuilder() {
+ this.map = new HashMap<K,V>();
+ }
+
+ public boolean isEmpty() {
+ return this.map.isEmpty();
+ }
+
+ public MapBuilder<K,V> put(K theKey, V theValue) {
+ this.map.put(theKey, theValue);
+ return this;
+ }
+
+ public MapBuilder<K,V> putOpt(K theKey, V theValue) {
+ if (theValue != null) {
+ this.map.put(theKey, theValue);
+ }
+ return this;
+ }
+
+ public MapBuilder<K,V> put(final Map.Entry<? extends K, ? extends V> theEntry) {
+ this.map.put(theEntry.getKey(), theEntry.getValue());
+ return this;
+ }
+
+ public MapBuilder<K,V> putOpt(final Map.Entry<? extends K, ? extends V> theEntry) {
+ if (theEntry != null) {
+ this.map.put(theEntry.getKey(), theEntry.getValue());
+ }
+ return this;
+ }
+
+ public MapBuilder<K,V> putAll(final Iterable<? extends Map.Entry<? extends K, ? extends V>> theEntries) {
+ for (final Map.Entry<? extends K, ? extends V> e : theEntries) {
+ this.map.put(e.getKey(), e.getValue());
+ }
+ return this;
+ }
+
+ /* If theEntries contains multiple entries with the same key then the key gets a suffix in order to make it unique
+ .. */
+// public MapBuilder forceAll(final Iterable<? extends Map.Entry<? extends K, ? extends V>> theEntries,
+ public MapBuilder<K,V> forceAll(final Iterable<? extends Map.Entry<K, V>> theEntries,
+ Function<Map.Entry<K, V> , K> rekeyFunction) {
+ for (final Map.Entry<? extends K, ? extends V> e : theEntries) {
+ K key = e.getKey();
+ if (this.map.containsKey(key))
+ key = rekeyFunction.apply((Map.Entry<K,V>)e);
+ this.map.put(key, e.getValue());
+ }
+ return this;
+ }
+
+ public MapBuilder<K,V> putAll(final Map<? extends K, ? extends V> theMap) {
+ this.map.putAll(theMap);
+ return this;
+ }
+
+ public Map<K,V> build() {
+ return this.map;
+ }
+
+ public Map<K,V> buildOpt() {
+ return this.map.isEmpty() ? null : this.map;
+ }
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Neo.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Neo.java
new file mode 100644
index 0000000..f818163
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Neo.java
@@ -0,0 +1,54 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.util.Iterator;
+
+import com.google.common.base.Predicate;
+import com.google.common.collect.Iterators;
+
+import org.json.JSONObject;
+
+
+public class Neo {
+
+ /*
+ */
+ public static String literalMap(JSONObject theProps,
+ String theNameAlias,
+ String theValueAlias,
+ String theAssignmentOp,
+ String theRelationOp,
+ Predicate theFieldFilter) {
+ if(theProps.length() == 0)
+ return "";
+ StringBuilder sb = new StringBuilder("");
+ for (Iterator i = Iterators.filter(theProps.keys(),
+ theFieldFilter);
+ i.hasNext();) {
+ String propName = (String)i.next();
+
+ if (theNameAlias != null) {
+ sb.append(theNameAlias)
+ .append('.');
+ }
+ sb.append('`')
+ .append(propName)
+ .append('`')
+ .append(theAssignmentOp)
+ .append(" {")
+ .append(theValueAlias)
+ .append("}.")
+ .append('`')
+ .append(propName)
+ .append('`')
+ .append(theRelationOp);
+ }
+ return sb.substring(0, sb.length() - theRelationOp.length());
+ }
+
+ public static String literalMap(JSONObject theProps,
+ String theAlias) {
+ return literalMap(theProps, null, theAlias, ":", ",", f -> true);
+ }
+
+}
+
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxies.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxies.java
new file mode 100644
index 0000000..8983599
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxies.java
@@ -0,0 +1,37 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.util.Map;
+import java.lang.reflect.ParameterizedType;
+
+import org.json.JSONObject;
+
+import org.onap.sdc.dcae.catalog.commons.ProxyBuilder;
+
+
+public class Proxies {
+
+ private Proxies() {
+ }
+
+
+ private static ProxyBuilder builder = new ProxyBuilder();
+
+ public static <T> T build(Map theData, Class<T> theType) {
+ return builder.build(new JSONObject(theData), theType);
+ }
+
+ public static <T> T build(Map theData, Map theContextData, Class<T> theType) {
+ return builder.build(new JSONObject(theData), theContextData, theType);
+ }
+
+ public static <T> T build(JSONObject theData, Class<T> theType) {
+ return builder.build(theData, theType);
+ }
+
+ public static <T> Class<T> typeArgument(Class theType) {
+ return (Class<T>)
+ ((ParameterizedType)theType.getGenericSuperclass()).
+ getActualTypeArguments()[0];
+ }
+
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.java
new file mode 100644
index 0000000..d368886
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.java
@@ -0,0 +1,144 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.util.List;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Collections;
+
+import java.util.stream.Collectors;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import java.lang.reflect.Type;
+import java.lang.reflect.Method;
+import java.lang.reflect.Array;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+
+import java.lang.invoke.MethodHandles;
+
+import com.google.common.reflect.Invokable;
+import org.onap.sdc.dcae.catalog.commons.Proxy;
+import org.onap.sdc.dcae.catalog.commons.ProxyBuilder;
+import com.google.common.reflect.AbstractInvocationHandler;
+
+import org.apache.commons.beanutils.ConvertUtils;
+
+import org.json.JSONObject;
+import org.json.JSONArray;
+
+public class Proxy extends AbstractInvocationHandler {
+
+ @Retention(RetentionPolicy.RUNTIME)
+ @Target(ElementType.METHOD)
+
+ public static @interface DataMap {
+
+ public String map() default "";
+
+ public boolean proxy() default false;
+
+ public Class elementType() default Void.class;
+ }
+
+
+ public static final Constructor<MethodHandles.Lookup> lookupHandleConstructor;
+
+ static {
+ try {
+ lookupHandleConstructor =
+ MethodHandles.Lookup.class.getDeclaredConstructor(Class.class,
+ int.class);
+
+ if (!lookupHandleConstructor.isAccessible()) {
+ lookupHandleConstructor.setAccessible(true);
+ }
+ }
+ catch (Exception x) {
+ throw new RuntimeException(x);
+ }
+ }
+
+
+ private JSONObject data;
+ private ProxyBuilder builder;
+
+ protected Proxy(JSONObject theData, ProxyBuilder theBuilder) {
+ this.data = theData;
+ this.builder = theBuilder;
+ }
+
+ public JSONObject data() {
+ return this.data;
+ }
+
+ public ProxyBuilder getBuilder() {
+ return this.builder;
+ }
+
+ protected Object handleInvocation(
+ Object theProxy,Method theMethod,Object[] theArgs)
+ throws Throwable {
+ if (theMethod.isDefault()) {
+ final Class<?> declaringClass = theMethod.getDeclaringClass();
+
+ return lookupHandleConstructor
+ .newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
+ .unreflectSpecial(theMethod, declaringClass)
+ .bindTo(theProxy)
+ .invokeWithArguments(theArgs);
+ }
+
+ String key = theMethod.getName();
+
+ Proxy.DataMap dataMap = (Proxy.DataMap)theMethod.getAnnotation(Proxy.DataMap.class);
+ if (dataMap != null) {
+ String dataKey = dataMap.map();
+ if (dataKey != null && !"".equals(dataKey))
+ key = dataKey;
+ }
+
+ //this is ugly, can this be done through an extension mechanism such as plugging in functions?
+ if ( builder.hasExtension(key) )
+ return this.builder.extension(key).apply(this, theArgs);
+
+ //we give priority to the context (because of the 'catalog' property issue in catalog service) but
+ //how natural is this?
+ Object val = this.builder.context(key);
+ if (val == null)
+ val = this.data.opt(key);
+
+ if (val == null)
+ return null;
+
+//as we create proxies here we should store them back in the 'data' so that we do not do it again
+//can we always 'recognize' them?
+ if (val instanceof String &&
+ String.class != theMethod.getReturnType()) {
+ //??This will yield a POJO ..
+ return ConvertUtils.convert((String)val, theMethod.getReturnType());
+ }
+ else if (val instanceof JSONObject) {
+ if (dataMap != null && dataMap.proxy()) {
+ return builder.build((JSONObject)val, theMethod.getReturnType());
+ }
+ }
+ else if (val instanceof JSONArray&& dataMap != null &&
+ dataMap.proxy() &&
+ List.class.isAssignableFrom(theMethod.getReturnType())) {
+
+ List res = (List) theMethod.getReturnType().newInstance();
+ for (int i = 0; i < ((JSONArray) val).length(); i++) {
+ res.add(builder.build(((JSONArray) val).getJSONObject(i), dataMap.elementType()));
+ }
+ return res;
+
+ }
+ return val;
+ }
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.pojo b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.pojo
new file mode 100644
index 0000000..b3b5cb9
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.pojo
@@ -0,0 +1,145 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Collections;
+
+import java.util.stream.Collectors;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import java.lang.reflect.Type;
+import java.lang.reflect.Method;
+import java.lang.reflect.Array;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+
+import java.lang.invoke.MethodHandles;
+
+import com.google.common.reflect.Invokable;
+import com.google.common.reflect.AbstractInvocationHandler;
+
+import org.apache.commons.beanutils.ConvertUtils;
+
+
+/**
+ */
+public class Proxy
+ extends AbstractInvocationHandler {
+
+ @Retention(RetentionPolicy.RUNTIME)
+ @Target(ElementType.METHOD)
+
+ public static @interface DataMap {
+
+ public String map() default "";
+
+ public boolean proxy() default false;
+
+ public Class elementType() default Void.class;
+ }
+
+
+ public static Constructor<MethodHandles.Lookup> lookupHandleConstructor;
+
+ static {
+ try {
+ lookupHandleConstructor =
+ MethodHandles.Lookup.class.getDeclaredConstructor(Class.class,
+ int.class);
+
+ if (!lookupHandleConstructor.isAccessible()) {
+ lookupHandleConstructor.setAccessible(true);
+ }
+ }
+ catch (Exception x) {
+ throw new RuntimeException(x);
+ }
+ }
+
+
+ private Map data;
+ private ProxyBuilder builder;
+
+ protected Proxy(Map theData, ProxyBuilder theBuilder) {
+ this.data = theData;
+ this.builder = theBuilder;
+ }
+
+ public Map data() {
+ return this.data;
+ }
+
+ public ProxyBuilder getBuilder() {
+ return this.builder;
+ }
+
+ protected Object handleInvocation(
+ Object theProxy,Method theMethod,Object[] theArgs)
+ throws Throwable {
+ if (theMethod.isDefault()) {
+ final Class<?> declaringClass = theMethod.getDeclaringClass();
+ /*
+ return MethodHandles.lookup()
+ .in(declaringClass)
+ .unreflectSpecial(theMethod, declaringClass)
+ .bindTo(theProxy)
+ .invokeWithArguments(theArgs);
+ */
+ return lookupHandleConstructor
+ .newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
+ .unreflectSpecial(theMethod, declaringClass)
+ .bindTo(theProxy)
+ .invokeWithArguments(theArgs);
+ }
+
+ String key = theMethod.getName();
+
+ Proxy.DataMap dataMap = (Proxy.DataMap)theMethod.getAnnotation(Proxy.DataMap.class);
+ if (dataMap != null) {
+ String dataKey = dataMap.map();
+ if (dataKey != null && !"".equals(dataKey))
+ key = dataKey;
+ }
+
+ //this is ugly, can this be done through an extension mechanism such as plugging in functions?
+ if ( builder.hasExtension(key) )
+ return this.builder.extension(key).apply(this, theArgs);
+
+ Object val = this.data.getOrDefault(key, this.builder.context(key));
+
+System.out.println("!! " + key + " : " + val);
+
+//as we create proxies here we should store them back in the 'data' so that we do not do it again
+//can we always 'recognize' them?
+ if (val instanceof String &&
+ String.class != theMethod.getReturnType()) {
+ return ConvertUtils.convert((String)val, theMethod.getReturnType());
+ }
+ else if (val instanceof Map) {
+ if (dataMap != null && dataMap.proxy()) {
+ return builder.build((Map)val, theMethod.getReturnType());
+ }
+ }
+ else if (val instanceof List) {
+ if (dataMap != null && dataMap.proxy()) {
+ return ((List)val)
+ .stream()
+ .map(e -> this.builder.build((Map)e, dataMap.elementType()))
+ .collect(Collectors.toList());
+ }
+ }
+/*
+ else if (val.getClass().isArray()) {
+ if (dataMap != null && dataMap.proxy()) {
+ }
+ }
+*/
+ return val;
+ }
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ProxyBuilder.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ProxyBuilder.java
new file mode 100644
index 0000000..e3a422a
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ProxyBuilder.java
@@ -0,0 +1,92 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.util.Map;
+
+import java.util.function.Function;
+import java.util.function.BiFunction;
+
+import org.apache.commons.beanutils.ConvertUtils;
+import org.apache.commons.beanutils.Converter;
+
+import org.json.JSONObject;
+
+import org.onap.sdc.dcae.catalog.commons.Proxy;
+import org.onap.sdc.dcae.catalog.commons.ProxyBuilder;
+
+
+public class ProxyBuilder {
+
+ private Map<String, ?> context;
+ private Map<String, BiFunction<Proxy, Object[], Object>> extensions;
+
+ public ProxyBuilder() {
+ }
+/*
+ public <T> T build(Map theData, Class<T> theType) {
+ return build(theData, this.context, theType);
+ }
+
+ public <T> T build(Map theData, Map theContextData, Class<T> theType) {
+ return (T)java.lang.reflect.Proxy.newProxyInstance(
+ ProxyBuilder.class.getClassLoader(),
+ new Class[] { theType },
+ new Proxy(theData, this));
+ }
+*/
+ public <T> T build(Map theData, Class<T> theType) {
+ return build(new JSONObject(theData), theType);
+ }
+
+ public <T> T build(Map theData, Map theContextData, Class<T> theType) {
+ return build(new JSONObject(theData), theContextData, theType);
+ }
+
+ public <T> T build(JSONObject theData, Class<T> theType) {
+ return build(theData, this.context, theType);
+ }
+
+ public <T> T build(JSONObject theData, Map theContextData, Class<T> theType) {
+ return (T)java.lang.reflect.Proxy.newProxyInstance(
+ ProxyBuilder.class.getClassLoader(),
+ new Class[] { theType },
+ new Proxy(theData, this));
+ }
+
+
+
+
+ public ProxyBuilder withConverter(final Function<Object, ?> theConverter, Class theType) {
+ ConvertUtils.register(new Converter() {
+ public Object convert(Class theToType, Object theValue) {
+ return theConverter.apply(theValue);
+ }
+ },
+ theType);
+ return this;
+ }
+
+ /*
+ plug in an extension to the proxy default behaviour.
+ */
+ public ProxyBuilder withExtensions(Map<String, BiFunction<Proxy, Object[], Object>> theExtensions) {
+ this.extensions = theExtensions;
+ return this;
+ }
+
+ public ProxyBuilder withContext(Map<String, ?> theContext) {
+ this.context = theContext;
+ return this;
+ }
+
+ protected Object context(String theName) {
+ return this.context == null ? null : this.context.get(theName);
+ }
+
+ protected BiFunction<Proxy, Object[], Object> extension(String theName) {
+ return this.extensions == null ? null : this.extensions.get(theName);
+ }
+
+ protected boolean hasExtension(String theName) {
+ return this.extensions == null ? false : this.extensions.containsKey(theName);
+ }
+}
diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Recycler.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Recycler.java
new file mode 100644
index 0000000..3493cb1
--- /dev/null
+++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Recycler.java
@@ -0,0 +1,329 @@
+package org.onap.sdc.dcae.catalog.commons;
+
+import java.io.Reader;
+import java.io.IOException;
+
+import java.util.List;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.AbstractMap;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.Collections;
+import java.util.Spliterators;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+
+import org.apache.commons.jxpath.Pointer;
+import org.apache.commons.jxpath.JXPathContext;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.core.type.TypeReference;
+
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.yaml.snakeyaml.Yaml;
+
+
+/**
+ * Practically a copy of the Validator's service Recycler, minus the Spring framework aspects + picking up the
+ * description of every node
+ */
+public class Recycler {
+
+ private static final String PROPERTIES = "properties";
+ private static final String VALUE = "value";
+ private static final String ASSIGNMENT = "assignment";
+ private static final String CAPABILITY = "capability";
+ private static final String RELATIONSHIP = "relationship";
+ private static final String NAME = "name";
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+ private List<Map> imports;
+ private List<String> metas;
+
+ public Recycler() {
+ withImports();
+ withMetas(null);
+ }
+
+ public Recycler withImports(String... theImports) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Setting imports to {}", theImports);
+ ListBuilder importsBuilder = new ListBuilder();
+ for (int i = 0; i < theImports.length; i++) {
+ importsBuilder.add(new MapBuilder()
+ .put("i" + i, theImports[i])
+ .build());
+ }
+ this.imports = importsBuilder.build();
+ return this;
+ }
+
+ private List imports() {
+ ListBuilder importsBuilder = new ListBuilder();
+ for (Map e: this.imports) {
+ importsBuilder.add(new MapBuilder()
+ .putAll(e)
+ .build());
+ }
+ return importsBuilder.build();
+ }
+
+ public Recycler withMetas(String... theMetas) {
+ this.metas = (theMetas == null) ? Collections.emptyList() : Arrays.asList(theMetas);
+ return this;
+ }
+
+ public Object recycle(final Reader theSource) throws Exception {
+ return this.recycle(new ObjectMapper().readValue(theSource, (Class)HashMap.class));
+ }
+
+ public Object recycle(final Object theDump) {
+
+ final JXPathContext jxroot = JXPathContext.newContext(theDump);
+ jxroot.setLenient(true);
+
+ final Map<String, Object> nodeTemplates =
+ (Map<String, Object>)new MapBuilder()
+ .putAll(
+ StreamSupport
+ .stream(
+ Spliterators.spliteratorUnknownSize((Iterator<Pointer>)jxroot.iteratePointers("/nodes"), 16), false)
+ .map(p -> {
+ JXPathContext jxnode = jxroot.getRelativeContext(p);
+ return new AbstractMap.SimpleEntry<String,Object>(
+ (String)jxnode.getValue(NAME) + "_" + (String)jxnode.getValue("nid"),
+ new MapBuilder()
+ .put("type", jxnode.getValue("type/name"))
+ .put("description", jxnode.getValue("description"))
+ .putOpt("metadata", nodeMetadata(jxnode))
+ .putOpt(PROPERTIES, nodeProperties(jxnode))
+ .putOpt("requirements", nodeRequirements(jxnode))
+ .putOpt("capabilities", nodeCapabilities(jxnode))
+ .build());
+ })::iterator)
+ .buildOpt();
+
+ return new MapBuilder()
+ .put("tosca_definitions_version", "tosca_simple_yaml_1_0_0")
+ .put("imports", imports())
+ .put("topology_template", new MapBuilder()
+ .putOpt("node_templates", nodeTemplates)
+ .build())
+ .build();
+ }
+
+ /* */
+ private Object nodeProperties(JXPathContext theNodeContext) {
+ return
+ new MapBuilder()
+ .putAll(
+ StreamSupport.stream(
+ Spliterators.spliteratorUnknownSize((Iterator<Map>)theNodeContext.iterate(PROPERTIES), 16), false)
+ .map(m -> new AbstractMap.SimpleEntry(m.get(NAME), this.nodeProperty(m)))
+ .filter(e -> e.getValue() != null)
+ ::iterator)
+ .buildOpt();
+ }
+
+ /* */
+ private Object nodeProperty(final Map theSpec) {
+ Object value = theSpec.get(VALUE);
+ if (value == null) {
+ value = theSpec.get("default");
+ if (value == null) {
+ /*final*/ Map assign = (Map)theSpec.get(ASSIGNMENT);
+ if (assign != null) {
+ value = assign.get(VALUE);
+ }
+ }
+ }
+ String type = (String)theSpec.get("type");
+ if (value != null && type != null) {
+ value = getValueByType(value, type);
+ }
+ return value;
+ }
+
+ private Object getValueByType(Object value, String type) {
+ Object returnValue = null;
+ try {
+ if ("map".equals(type) && !(value instanceof Map)) {
+ returnValue = new ObjectMapper().readValue(value.toString(), new TypeReference<Map>(){});
+ }
+ else if ("list".equals(type) && !(value instanceof List)) {
+ returnValue = new ObjectMapper().readValue(value.toString(), new TypeReference<List>(){});
+ }
+ else if ("integer".equals(type) && (value instanceof String)) {
+ returnValue = Integer.valueOf((String)value);
+ }
+ else if ("float".equals(type) && (value instanceof String)) {
+ returnValue = Double.valueOf((String)value); //double because that's how the yaml parser would encode it
+ }
+ }
+ catch (NumberFormatException nfx) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Failed to process String representation {} of numeric data: {}", value, nfx);
+ }
+ catch (IOException iox) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Failed to process {} representation of a collection: {}", value.getClass().getName(), iox);
+ }
+ return returnValue;
+ }
+
+ /* */
+ private List nodeRequirements(JXPathContext theNodeContext) {
+ return
+ new ListBuilder()
+ .addAll(
+ StreamSupport.stream(
+ Spliterators.spliteratorUnknownSize((Iterator<Map>)theNodeContext.iterate("requirements"), 16), false)
+ .flatMap(m -> this.nodeRequirement(m, theNodeContext).stream())
+ //nicer that the ListBuilder buy cannot handle the empty lists, i.e. it will generate empty requirement lists
+ // .collect(Collectors.toList())
+ .toArray())
+ .buildOpt();
+ }
+
+ /*
+ * @param theSpec the requirement entry that appears within the node specification
+ * @param theNodeContext .. Should I pass the root context instead of assuming that the nodes context has it as parent?
+ * @return a List as one requirement (definition) could end up being instantiated multiple times
+ */
+ private List nodeRequirement(final Map theSpec, JXPathContext theNodeContext/*Iterator theTargets*/) {
+
+ final ListBuilder value = new ListBuilder();
+
+ final Map target = (Map)theSpec.get("target");
+ final Map capability = (Map)theSpec.get(CAPABILITY);
+ final Map relationship = (Map)theSpec.get(RELATIONSHIP);
+
+ //this are actual assignments
+ for (Iterator i = theNodeContext.getParentContext().iterate("/relations[@n2='" + theNodeContext.getValue("nid") + "']/meta[@p2='" + theSpec.get(NAME) +"']"); i.hasNext(); ) {
+
+ String targetNodeName = (String)((Map)i.next()).get("n1");
+
+ //make sure target exists
+ Map targetNode = (Map)theNodeContext.getParentContext().getValue("/nodes[@nid='" + targetNodeName + "']");
+ if (null == targetNode) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Relation points to non-existing node {}", targetNodeName);
+ continue; //this risks of producing a partial template ..
+ }
+
+ value.add(new MapBuilder().put(theSpec.get(NAME), new MapBuilder()
+ .putOpt("node", targetNode.get(NAME) + "_" + targetNode.get("nid"))
+ .putOpt(CAPABILITY, capability == null ? null : capability.get(NAME))
+ .putOpt(RELATIONSHIP, relationship == null ? null : relationship.get("type"))
+ .build()).build());
+ }
+ addTemporary(theSpec, theNodeContext, value, capability, relationship);
+
+ if (value.isEmpty()) {
+ value.add(new MapBuilder().put(theSpec.get(NAME), new MapBuilder()
+ .putOpt("node", target == null ? null : target.get(NAME) + "_" + target.get("nid"))
+ .putOpt(CAPABILITY, capability == null ? null : capability.get(NAME))
+ .putOpt(RELATIONSHIP, relationship == null ? null : relationship.get("type"))
+ .build()).build());
+ }
+
+ return value.build();
+ }
+
+ private void addTemporary(Map theSpec, JXPathContext theNodeContext, ListBuilder value, Map capability, Map relationship) {
+ //temporary
+ for (Iterator i = theNodeContext.getParentContext().iterate("/relations[@n1='" + theNodeContext.getValue("nid") + "']/meta[@p1='" + theSpec.get(NAME) +"']"); i.hasNext(); ) {
+
+ String targetNodeName = (String)((Map)i.next()).get("n2");
+
+ Map targetNode = (Map)theNodeContext.getParentContext().getValue("/nodes[@nid='" + targetNodeName + "']");
+ //make sure target exists
+ if (null == targetNode) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Relation points to non-existing node {}", targetNode);
+ continue; //this risks of producing a partial template ..
+ }
+
+ value.add(new MapBuilder().put(theSpec.get(NAME), new MapBuilder()
+ .putOpt("node", targetNode.get(NAME) + "_" + targetNode.get("nid"))
+ .putOpt(CAPABILITY, capability == null ? null : capability.get(NAME))
+ .putOpt(RELATIONSHIP, relationship == null ? null : relationship.get("type"))
+ .build()).build());
+ }
+ //end temporary
+ }
+
+ /* */
+ private Map nodeCapabilities(JXPathContext theNodeContext) {
+ return
+ new MapBuilder()
+ .putAll(
+ StreamSupport.stream(
+ Spliterators.spliteratorUnknownSize((Iterator<Map>)theNodeContext.iterate("capabilities"), 16), false)
+ .map(m -> this.nodeCapability(m))
+ .filter(c -> c != null)
+ ::iterator)
+ .buildOpt();
+ }
+
+ /**
+ * this handles a capability assignment which only includes properties and attributes so unless there
+ * are any properties/attributes assignments we might not generate anything
+ */
+ private Map.Entry nodeCapability(final Map theSpec) {
+ List<Map> properties = (List<Map>) theSpec.get(PROPERTIES);
+ if (properties == null || properties.isEmpty()) {
+ return null;
+ }
+
+ return new AbstractMap.SimpleEntry(theSpec.get(NAME),
+ new MapBuilder()
+ .put(PROPERTIES,
+ new MapBuilder().putAll(properties.stream()
+ .filter(p -> p.containsKey(ASSIGNMENT) ||
+ p.containsKey(VALUE))
+ .map(p -> new AbstractMap.SimpleEntry(
+ p.get(NAME),
+ p.containsKey(ASSIGNMENT) ?
+ ((Map) p.get(ASSIGNMENT)).get(VALUE)
+ : p.get(VALUE))
+ )
+ ::iterator)
+ .build())
+ .build());
+ }
+
+
+ /* */
+ private Object nodeMetadata(JXPathContext theNodeContext) {
+ return
+ new MapBuilder()
+ .putAll(
+ this.metas
+ .stream()
+ .flatMap(m -> {
+ Object v = theNodeContext.getValue(m);
+ if (v == null) {
+ return Stream.empty();
+ }
+ if (v instanceof Map) {
+ return ((Map) v).entrySet()
+ .stream()
+ .map(e -> new AbstractMap.SimpleEntry<String, Object>
+ (((Map.Entry) e).getKey().toString(),
+ ((Map.Entry) e).getValue().toString()));
+ }
+ return Stream.of(new AbstractMap.SimpleEntry<String,Object>(m, v.toString()));
+ })
+ ::iterator)
+ .buildOpt();
+ }
+
+
+ public static String toString(Object theVal) {
+ return new Yaml().dump(theVal);
+ }
+
+
+ public static void main(String[] theArgs) throws Exception {
+ debugLogger.log(LogLevel.DEBUG, Recycler.class.getName(),
+ Recycler.toString(
+ new Recycler().recycle(new java.io.FileReader(theArgs[0]))));
+ }
+}
diff --git a/dcaedt_catalog/db/pom.xml b/dcaedt_catalog/db/pom.xml
new file mode 100644
index 0000000..8a0e1f9
--- /dev/null
+++ b/dcaedt_catalog/db/pom.xml
@@ -0,0 +1,149 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Catalog</artifactId>
+ <version>1806.0.1-SNAPSHOT</version>
+ </parent>
+ <artifactId>DCAE-DT-Catalog-DB</artifactId>
+ <packaging>jar</packaging>
+ <name>DCAE DT Catalog database</name>
+
+ <build>
+ <sourceDirectory>src/main/java</sourceDirectory>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>3.1</version>
+ <configuration>
+ <source>1.8</source>
+ <target>1.8</target>
+ <encoding>${project.build.sourceEncoding}</encoding>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.10</version>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/deps</outputDirectory>
+ <overWriteReleases>false</overWriteReleases>
+ <overWriteSnapshots>false</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>buildnumber-maven-plugin</artifactId>
+ <version>1.4</version>
+ <executions>
+ <execution>
+ <phase>validate</phase>
+ <goals>
+ <goal>create</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <doCheck>false</doCheck>
+ <doUpdate>false</doUpdate>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.1</version>
+ <configuration>
+ <archive>
+ <manifest>
+ <addDefaultImplementationEntries>true</addDefaultImplementationEntries>
+ </manifest>
+ <manifestEntries>
+ <Implementation-Build>${buildNumber}</Implementation-Build>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ </plugin>
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.6</version>
+ <configuration>
+ <descriptorRefs>
+ <descriptorRef>jar-with-dependencies</descriptorRef>
+ </descriptorRefs>
+ <archive>
+ <manifest>
+ <mainClass>org.onap.sdc.dcae.db.neo4j.Modeled</mainClass>
+ </manifest>
+ <manifestEntries>
+ <Implementation-Build>${buildNumber}</Implementation-Build>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ <executions>
+ <execution>
+ <id>make-assembly</id> <!-- this is used for inheritance merges -->
+ <phase>package</phase> <!-- bind to the packaging phase -->
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpasyncclient</artifactId>
+ <version>4.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>2.4</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>1.3</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-jxpath</groupId>
+ <artifactId>commons-jxpath</artifactId>
+ <version>1.3</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>17.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.yaml</groupId>
+ <artifactId>snakeyaml</artifactId>
+ <version>1.17</version>
+ </dependency>
+ <dependency>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ <version>20160212</version>
+ </dependency>
+ <dependency>
+ <groupId>com.github.wnameless</groupId>
+ <artifactId>json-flattener</artifactId>
+ <version>0.2.2</version>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java b/dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java
new file mode 100644
index 0000000..6b2f395
--- /dev/null
+++ b/dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java
@@ -0,0 +1,1980 @@
+/*
+ * AT&T - PROPRIETARY
+ * THIS FILE CONTAINS PROPRIETARY INFORMATION OF
+ * AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN
+ * ACCORDANCE WITH APPLICABLE AGREEMENTS.
+ *
+ * Copyright (c) 2014 AT&T Knowledge Ventures
+ * Unpublished and Not for Publication
+ * All Rights Reserved
+ */
+package org.onap.sdc.dcae.db.neo4j;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.List;
+import java.util.LinkedList;
+import java.util.Collections;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.codec.binary.Base64;
+
+import org.apache.commons.jxpath.JXPathContext;
+import org.apache.commons.jxpath.JXPathException;
+
+import org.apache.http.Header;
+import org.apache.http.HttpHeaders;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.JSONArray;
+
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.yaml.snakeyaml.Yaml;
+
+import com.google.common.collect.Table;
+import com.google.common.collect.HashBasedTable;
+
+/* A few less obvious design choices:
+ * - representing properties across type hierarchies (same for requirements
+ * and capabilities, and will be for attributes and interfaces when we'll
+ * add them): we attach to each type only those properties it declares (such a
+ * declaration might be the re-definition of a property defined by a supertype).
+ * Calculating the set of properties for a type (i.e. the one it declares plus
+ * the ones it inherits, with respect to re-defintions) is a 2 step process:
+ * 1. run a query matching all properties acrosss the type's hierarchy, from
+ * leaf to root type (neo's job)
+ * 2. collecting them in a set that accumulates them with respect to
+ * re-definition (model catalog client library job)
+ * A (viable) alternative would have been to calculate the entire property set
+ * at model import time and associate them it the type node. It would simplify
+ * the query and processing in the catalog API. It has the drawback of making
+ * the reverse process (exporting a yaml model from neo) tedious.
+ * As we get a better sense of were the optimizations are needed this might
+ * be a change to be made ..
+ *
+ *
+ * - representing requirements and capability as nodes. At first glance
+ * both can be represented as edges pointing from a Type Node or Template Node
+ * to another Type Node or Template Node. While this is true for capabilities
+ * it is not so for requirements: a requirement could point to a capability
+ * of a Type Node, i.e. it is a hyperedge between a Type Node (or Tempate Node), * another Type Node (the target) and a capability of the target. As such, the
+ * requirements ands up being represented as a node and the capability will need
+ * to do the same in order to be able to be pointed at (and for the sake of
+ * uniformity ..).
+ *
+ *
+ */
+public class Modeled {
+
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+ private static HttpClientBuilder httpClientBuilder =
+ HttpClientBuilder.create();
+ private static String USAGE = "oil oil_stylesheet_path | bigdata | aws | awsdata input_file customer";
+
+ private static List<String> ignoreMissing = new LinkedList<String>();
+
+ static {
+ Collections.addAll(ignoreMissing,
+ "tosca.datatypes",
+ "tosca.capabilities",
+ "tosca.relationships",
+ "tosca.interfaces",
+ "tosca.nodes",
+ "tosca.artifacts",
+ "tosca.policies",
+ "tosca.groups");
+ }
+
+ public static void main(String[] theArgs) {
+
+ CommandLineParser parser = new BasicParser();
+
+ // create the Options
+ Options options = new Options();
+ options.addOption(OptionBuilder.
+ withArgName("target")
+ .withLongOpt("target")
+ .withDescription("target ice4j database uri")
+ .hasArg()
+ .isRequired()
+ .create('t'));
+
+ options.addOption(OptionBuilder.
+ withArgName("action")
+ .withLongOpt("action")
+ .withDescription("one of import, annotate, list, remove")
+ .hasArg()
+ .isRequired()
+ .create('a'));
+
+ options.addOption(
+ OptionBuilder.withArgName("input")
+ .withLongOpt("input")
+ .withDescription(
+ "for import/annotate: the tosca template file, " +
+ "for list: an optional json filter, " +
+ "for remove: the template id")
+ .hasArgs()
+ .create('i')).addOption(
+ OptionBuilder.withArgName("labels")
+ .withLongOpt("labels")
+ .withDescription(
+ "for annotate: the ':' sepatated list of annotation labels")
+ .hasArgs()
+ .create('l'));
+
+ options.addOption(OptionBuilder.
+ withArgName("ignore")
+ .withLongOpt("ignore")
+ .isRequired(false)
+ .withDescription(
+ "for annotate: the ':' sepatated list of namespaces who's missing constructs can be ignored")
+ .hasArgs()
+ .create());
+
+
+ CommandLine line;
+ try {
+ line = parser.parse(options, theArgs);
+ } catch (ParseException exp) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), exp.getMessage());
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("import", options);
+ return;
+ }
+
+ String ignores = line.getOptionValue("ignore");
+ if (ignores != null)
+ Collections.addAll(ignoreMissing, ignores.split(":"));
+
+ Modeled modeled = new Modeled();
+ try {
+ modeled.setNeoUri(new URI(line.getOptionValue("target")));
+ } catch (URISyntaxException urisx) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), "Invalid target specification: {}", urisx);
+ return;
+ }
+
+ try {
+ loadStorageSpec();
+
+ String action = line.getOptionValue("action");
+ if ("import".equals(action)) {
+ modeled.importTemplate(line.getOptionValue("input"));
+ } else if ("annotate".equals(action)) {
+ modeled.annotateItem(line.getOptionValue("input"), line.getOptionValue("labels"));
+ } else if ("list".equals(action)) {
+ modeled.listTemplates(line.getOptionValue("input"));
+ } else if ("remove".equals(action)) {
+ modeled.removeTemplate(line.getOptionValue("input"));
+ } else {
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("import", options);
+ }
+ } catch (Exception x) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), x.getMessage());
+ }
+ }
+
+ private static Tracker<String> tracker = new Tracker<String>();
+ private static Map toscaStorageSpec;
+
+ private static void loadStorageSpec() {
+ toscaStorageSpec = (Map) new Yaml().load(
+ Modeled.class.getClassLoader().getResourceAsStream("tosca-schema.yaml"));
+
+ Map storageSpec = (Map) new Yaml().load(
+ Modeled.class.getClassLoader().getResourceAsStream("tosca-storage-schema.yaml"));
+
+ JXPathContext jxPath = JXPathContext.newContext(toscaStorageSpec);
+ for (Iterator<Map.Entry<String, Object>> ces =
+ storageSpec.entrySet().iterator();
+ ces.hasNext(); ) {
+ Map.Entry<String, Object> ce = ces.next();
+ try {
+ Map m = (Map) jxPath.getValue(ce.getKey());
+ if (m == null) {
+ debugLogger.log(LogLevel.DEBUG, Modeled.class.getName(), "No schema entry '{}'", ce.getKey());
+ continue;
+ }
+
+ m.putAll((Map) ce.getValue());
+ } catch (JXPathException jxpx) {
+ errLogger.log(LogLevel.WARN, Modeled.class.getName(), "Failed to apply storage info {}", jxpx);
+ }
+ }
+ }
+
+
+ private static JSONObject EMPTY_JSON_OBJECT = new JSONObject();
+
+ private URI neoUri = null;
+
+ private Modeled() {
+ }
+
+ private void setNeoUri(URI theUri) {
+ this.neoUri = theUri;
+ }
+
+ public URI getNeoUri() {
+ return this.neoUri;
+ }
+
+ /* Experimental in nature. I was reluctant creating another node to represent
+ * the set of constraints as they're integral part of the property (or other
+ * artifact) they're related to. I was also looking for a representation
+ * that would easily be processable into a TOSCA abstraction in the
+ * Catalog API. So ... we pack all the constraints as a JSON string and store
+ * them as a single property of the TOSCA artifact they belog to.
+ * Highs: easily un-winds in an object
+ * Lows: can't write query selectors based on constraints values ..
+ //the TOSCA/yaml spec exposes constraints as a List .. where each
+ //entry is a Map .. why??
+ */
+ private static String yamlEncodeConstraints(List theConstraints) {
+ Map allConstraints = new HashMap();
+ for (Object c : theConstraints) {
+ allConstraints.putAll((Map) c);
+ //this would be the place to add dedicate processing of those
+ //constraints with 'special' values, i.e. in_range: dual scalar,
+ //valid_values: list
+ }
+ return JSONObject.valueToString(allConstraints);
+ }
+
+ /* TODO: attributes handling to be added, similar to properties.
+ */
+ private void yamlNodeProperties(String theNodeId,
+ Map<String, Object> theProperties,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ for (Map.Entry<String, Object> propertyEntry : theProperties.entrySet()) {
+ String propName = propertyEntry.getKey();
+ Object propObject = propertyEntry.getValue();
+
+ Map propValues;
+ if (propObject instanceof Map) {
+ propValues = (Map) propObject;
+ } else {
+ //valuation, not of interest here
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNode, unknown property representation {} for {}, node {}", propObject.getClass(), propObject, theNodeId);
+ continue;
+ }
+
+ String constraintsValue = null;
+ if (propValues.containsKey("constraints")) {
+ constraintsValue = yamlEncodeConstraints(
+ (List) propValues.get("constraints"));
+ }
+
+ String neoPropId = neoCreateNode(
+ theTrx, false,
+ new JSONObject()
+ .put("name", propName)
+ .put("type", propValues.getOrDefault("type", "string"))
+ .put("required", propValues.getOrDefault("required", Boolean.TRUE))
+ .putOpt("default", propValues.get("default"))
+ .putOpt("description", propValues.get("description"))
+ .putOpt("status", propValues.get("status"))
+ .putOpt("constraints", constraintsValue),
+ "TOSCA", "Property");
+
+ neoEdge(theTrx, false,
+ neoPropId,
+ theNodeId,
+ EMPTY_JSON_OBJECT,
+ "PROPERTY_OF");
+ }
+
+ }
+
+ private void yamlNodeTypeCapabilities(String theNodeId,
+ Map<String, Object> theCapabilities,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ for (Map.Entry<String, Object> capability : theCapabilities.entrySet()) {
+ String capabilityName = capability.getKey();
+ Object capabilityValue = capability.getValue();
+
+ String capabilityType = null,
+ capabilityDesc = null;
+ Map<String, Object> capabilitySpec = null;
+
+ if (capabilityValue instanceof String) {
+ //short notation was used, we get the name of a capability type
+ capabilityType = (String) capabilityValue;
+
+ capabilitySpec = Collections.singletonMap("type", capabilityType);
+ } else if (capabilityValue instanceof Map) {
+ //extended notation
+ capabilitySpec = (Map<String, Object>) capabilityValue;
+
+ capabilityType = (String) capabilitySpec.get("type");
+ //cannot be missing
+ if (capabilityType == null) {
+ //ERROR!!
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoNode, missing capability type in {} for node {}", capabilitySpec, theNodeId);
+ continue; //rollback ..
+ }
+ capabilityDesc = (String) capabilitySpec.get("description");
+ }
+
+ //
+ String anonCapabilityTypeId = null;
+ if (capabilitySpec.containsKey("properties")) {
+ //we need an anonymous capability type (augmentation)
+ //or they could be added to the 'Capabillity' node but anonymous
+ //types make processing more uniform
+ anonCapabilityTypeId =
+ yamlAnonymousType(capabilitySpec,
+ capabilityType,
+//not a very nice owner string as theNodeId is cryptic (we should use
+//node name but do not have it here ..
+ theNodeId + "#" + capabilityName,
+ true,
+ false,
+ theTrx);
+ }
+
+ JSONObject capabilityDef = new JSONObject()
+ .put("name", capabilityName)
+ .putOpt("description", capabilityDesc);
+ if (capabilitySpec != null) {
+ List occurrences = (List) capabilitySpec.get("occurrences");
+ if (occurrences != null) {
+ capabilityDef.put("occurrences", encodeRange(occurrences));
+ }
+ List valid_source_types = (List) capabilitySpec.get("valid_source_types");
+ if (valid_source_types != null) {
+ capabilityDef.put("validSourceTypes",
+ new JSONArray(valid_source_types));
+ }
+ }
+
+ String capabilityId = neoCreateNode(
+ theTrx, false,
+ capabilityDef,
+ "TOSCA", "Capability");
+ neoEdge(theTrx, false,
+ capabilityId,
+ theNodeId,
+ EMPTY_JSON_OBJECT,
+ "CAPABILITY_OF");
+
+ if (anonCapabilityTypeId != null) {
+ neoEdge(theTrx, false,
+ capabilityId,
+ anonCapabilityTypeId,
+ new JSONObject()
+ .put("name", capabilityName)
+ .putOpt("description", capabilityDesc),
+ "FEATURES"/* TARGETS */);
+ //no reason this one would point to a non-existing capability as we just created one
+ } else {
+ if (null == neoEdge(theTrx, false,
+ capabilityId,
+ "Type",
+ new JSONObject()
+ .put("name", capabilityType),
+ new JSONObject()
+ .put("name", capabilityName)
+ .putOpt("description", capabilityDesc),
+ "FEATURES"/* TARGETS */)) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeCapabilities, Node {}, capability {} (id: {}) seems to point to invalid capability type: {}", theNodeId, capabilityName, capabilityId, capabilityType);
+ ignoreMissing(capabilityType);
+ }
+ }
+
+ }
+
+ }
+
+ private void yamlNodeTypeRequirements(
+ String theNodeTypeId,
+ List<Map<String, Object>> theRequirements,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ for (Map<String, Object> arequirement : theRequirements) {
+ //supposed to have only one entry
+ Map.Entry<String, Object> requirement =
+ arequirement.entrySet().iterator().next();
+
+ String requirementName = requirement.getKey();
+ Object requirementValue = requirement.getValue();
+
+ String targetNode = null,
+ targetCapability = null,
+ targetRelationship = null;
+ Map<String, Object> requirementSpec = null;
+
+ if (requirementValue instanceof String) {
+ //short form, points to a capability type
+ targetCapability = (String) requirementValue;
+ } else if (requirementValue instanceof Map) {
+ //extended notation
+ requirementSpec = (Map<String, Object>) requirementValue;
+
+ targetCapability = (String) requirementSpec.get("capability");
+ targetNode = (String) requirementSpec.get("node");
+ //this assumes a short form for the relationship specification
+ //it can actually be a map (indicating the relationship type and the
+ //additional interface definitions).
+ targetRelationship = (String) requirementSpec.get("relationship");
+ }
+
+ if (targetCapability == null) {
+ throw new IOException(theNodeTypeId + "missing capability type");
+ }
+
+ JSONObject requirementDef = new JSONObject()
+ .put("name", requirementName);
+ if (requirementSpec != null) {
+ List occurrences = (List) requirementSpec.get("occurrences");
+ if (occurrences != null) {
+ requirementDef.put("occurrences", encodeRange(occurrences));
+ }
+ }
+
+ String requirementId = neoCreateNode(
+ requirementDef,
+ "TOSCA", "Requirement");
+ neoEdge(theTrx, false,
+ requirementId,
+ theNodeTypeId,
+ EMPTY_JSON_OBJECT,
+ "REQUIREMENT_OF");
+
+ //we're not verifying here that this a capability type .. just a type
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetCapability),
+ EMPTY_JSON_OBJECT,
+ "CAPABILITY")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid capability type: {}", theNodeTypeId, requirementName, requirementId, targetCapability);
+ }
+
+ if (targetNode != null) {
+ //points to a node type
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetNode),
+ EMPTY_JSON_OBJECT,
+ "REQUIRES")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid capability type: {}", theNodeTypeId, requirementName, requirementId, targetCapability);
+ }
+ }
+
+ if (targetRelationship != null) {
+ //points to a relationship type
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetRelationship),
+ EMPTY_JSON_OBJECT,
+ "RELATIONSHIP")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid relationship type: {}", theNodeTypeId, requirementName, requirementId, targetRelationship);
+ }
+ }
+ }
+ }
+
+ /*
+ * handles the requirement assignments
+ */
+ private void toscaRequirementsAssignment(
+ String theNodeId,
+ List<Map<String, Object>> theRequirements,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ for (Map<String, Object> arequirement : theRequirements) {
+ //supposed to have only one entry
+ Map.Entry<String, Object> requirement =
+ arequirement.entrySet().iterator().next();
+
+ String requirementName = requirement.getKey();
+ Object requirementValue = requirement.getValue();
+
+ String targetNode = null,
+ targetCapability = null,
+ targetRelationship = null;
+ //TODO: targetFilter
+
+ Map<String, Object> requirementSpec = null;
+
+ if (requirementValue instanceof String) {
+ //short notation was used, we get the name of a local node
+ targetNode = (String) requirementValue;
+ } else if (requirementValue instanceof Map) {
+ //extended notation
+ requirementSpec = (Map<String, Object>) requirementValue;
+
+ targetNode = (String) requirementSpec.get("node");
+ targetCapability = (String) requirementSpec.get("capability");
+ targetRelationship = (String) requirementSpec.get("relationship");
+ }
+
+ /* TODO: add targetFilter definition in here (most likely place)
+ */
+ String requirementId = neoCreateNode(
+ theTrx, false,
+ new JSONObject()
+ .put("name", requirementName),
+ "TOSCA", "Requirement");
+
+ neoEdge(theTrx, false,
+ requirementId,
+ theNodeId,
+ EMPTY_JSON_OBJECT,
+ "REQUIREMENT_OF");
+
+ String targetNodeTemplate = null;
+ if (targetNode != null) {
+ //check if the target is a node within the template (in which case the
+ //requirement is really defined by that node type. i.e. its type's
+ //capabilities
+ targetNodeTemplate = tracker.lookupTemplate("Node", targetNode);
+ if (targetNodeTemplate != null) {
+ neoEdge(theTrx, false,
+ requirementId,
+ targetNodeTemplate,
+ new JSONObject()
+ .put("name", requirementName),
+ "REQUIRES" /* TARGETS */);
+ } else {
+ //if not a local node template then it must be node type
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetNode),
+ EMPTY_JSON_OBJECT,
+ "REQUIRES")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid node type: {}", theNodeId, requirementName, requirementId, targetNode);
+ }
+ }
+ }
+
+ if (targetCapability != null) {
+ /*
+ * Can point to a capability of the targetNode (template or type,
+ * whatever was specified) or to a capability type;
+ */
+ if (targetNode != null) {
+ String stmt = null;
+ if (targetNodeTemplate != null) {
+ //a capability of a local node template
+ //TODO: could be a capability type of a local node (and is up to the
+ //orchestrator to pick) given that the target node has at least one //capability of that type
+ stmt =
+ "MATCH (c:Capability)-[:CAPABILITY_OF]->(n:Node), (r:Requirement) " +
+ "WHERE id(n)=" + targetNodeTemplate + " " +
+ "AND c.name = \"" + targetCapability + "\" " +
+ "AND id(r)=" + requirementId + " " +
+ "MERGE (r)-[rq:REQUIRES_CAPABILITY]->(c) " +
+ "RETURN id(rq)";
+ } else {
+ //a capability of the node type
+ stmt =
+ "MATCH (c:Type:Capability)-[:CAPABILITY_OF]->(t:Type), (r:Requirement) " +
+ "WHERE t.name = \"" + targetNode + "\" " +
+ "AND c.name = \"" + targetCapability + "\" " +
+ "AND id(r)=" + requirementId + " " +
+ "MERGE (r)-[rq:REQUIRES_CAPABILITY]->(c) " +
+ "RETURN id(rq)";
+ }
+ if (null == neoId(theTrx
+ .statement(
+ new JSONObject()
+ .put("statement", stmt))
+ .execute()
+ .result())) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaRequirementsAssignment, Node {}, requirement {} (id: {}) seems to point to invalid node capability: {}", theNodeId, requirementName, requirementId, targetCapability);
+ }
+ } else {
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetCapability),
+ EMPTY_JSON_OBJECT,
+ "REQUIRES_CAPABILITY")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaRequirementsAssignment, Node {}, requirement {} (id: {}) seems to point to invalid capability type: {}", theNodeId, requirementName, requirementId, targetCapability);
+ }
+ }
+ }
+
+ if (targetRelationship != null) {
+ if (null == neoEdge(theTrx, false,
+ requirementId,
+ "Type",
+ new JSONObject()
+ .put("name", targetRelationship),
+ EMPTY_JSON_OBJECT,
+ "RELATIONSHIP")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaRequirementsAssignment, Node {}, requirement {} (id: {}) seems to point to invalid relationship type: {}", theNodeId, requirementName, requirementId, targetRelationship);
+ }
+ } else {
+ //TODO: does the presence of properties/attributes/interfaces in the
+ //requirement definition trigger the defintion of an anonymous
+ //relationship type?? (maybe derived from the one under the
+ //'relationship_type' key, if present?)
+ }
+ }
+ }
+
+ /* an anonymous type is created from a node specification (type,template)
+ */
+ private String yamlAnonymousType(Map<String, Object> theInfo,
+ String theType,
+ String theOwner,
+ boolean doProperties,
+ boolean doCapabilities,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ //is this naming scheme capable enough??NO!
+ String anonTypeId = theOwner + "#" + (theType == null ? "" : theType);
+
+ String neoAnonTypeId = neoMergeNode(
+ theTrx, false,
+ new JSONObject()
+ .put("name", anonTypeId)
+ .put("id", anonTypeId),
+ "TOSCA", "Type");
+
+ if (theType != null) {
+ neoEdge(theTrx, false,
+ neoAnonTypeId,
+ "Type",
+ new JSONObject()
+ .put("name", theType),
+ EMPTY_JSON_OBJECT,
+ "DERIVED_FROM");
+ }
+
+ //shoudl the properties spec be passed explcitly??
+ if (doProperties) {
+ Map<String, Object> props = (Map<String, Object>) theInfo.get("properties");
+ if (props != null) {
+ yamlNodeProperties(neoAnonTypeId, props, theTrx);
+ }
+ }
+
+ return neoAnonTypeId;
+ }
+
+ /*
+ * A first pass over a type spec provisions each type individually
+ * and its properties.
+ * We process here types for all constructs: data, capability, relationship,
+ * node, [interface, artifact]
+ */
+ private void toscaTypeSpec(String theConstruct,
+ Map<String, Map> theTypes,
+ NeoTransaction theTrx)
+ throws IOException {
+ //first pass, provision each type individually (and their properties)
+ String rule = "_" + theConstruct.toLowerCase() + "_type_definition";
+ Map storageSpec = (Map) toscaStorageSpec.get(rule);
+
+ for (Map.Entry<String, Map> toscaType : theTypes.entrySet()) {
+ String typeName = toscaType.getKey();
+ Map<String, Map> typeValue = (Map<String, Map>) toscaType.getValue();
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Type: {}", typeName);
+
+ JSONObject data = pack(storageSpec, typeValue)
+ .put("name", typeName)
+ .put("id", typeName);
+
+ String neoTypeId = neoMergeNode(theTrx, false, data, "TOSCA", "Type", theConstruct);
+
+ tracker.trackType(theConstruct, typeName, neoTypeId);
+
+ Map<String, Object> toscaTypeProps = (Map<String, Object>) typeValue.get("properties");
+ if (toscaTypeProps != null) {
+ yamlNodeProperties(neoTypeId, toscaTypeProps, theTrx);
+ } //type props
+ } //types
+
+ toscaTypePostProc(theConstruct, theTypes, theTrx);
+ }
+
+ /*
+ * A second pass to process the derived_from relationship and
+ * the capabilities (now that the capabilities types have been provisioned)
+ */
+ private void toscaTypePostProc(String theConstruct,
+ Map<String, Map> theTypes,
+ NeoTransaction theTrx)
+ throws IOException {
+ for (Map.Entry<String, Map> typeEntry : theTypes.entrySet()) {
+ Map typeValue = typeEntry.getValue();
+ String typeName = typeEntry.getKey();
+
+ //supertype and description: all types
+ String superTypeName = (String) typeValue.get("derived_from");
+ if (superTypeName != null) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}-DERIVED_FROM->{}", typeName, superTypeName);
+
+ if (tracker.tracksType(theConstruct, superTypeName)) {
+ if (null == neoEdge(theTrx, false,
+ tracker.lookupType(theConstruct, typeName),
+ tracker.lookupType(theConstruct, superTypeName),
+ EMPTY_JSON_OBJECT,
+ "DERIVED_FROM")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, missing parent type {}, id {} for type {}, id {}", superTypeName, tracker.lookupType(theConstruct, superTypeName), typeName, tracker.lookupType(theConstruct, typeName));
+ }
+ } else {
+ if (null == neoEdge(theTrx, false,
+ tracker.lookupType(theConstruct, typeName),
+ "Type",
+ new JSONObject()
+ .put("name", superTypeName),
+ new JSONObject(),
+ "DERIVED_FROM")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, missing parent type {} for type {}", superTypeName, typeName);
+ }
+ }
+ }
+
+ //requirements/capabilities: for node types
+ Map<String, Object> capabilities =
+ (Map<String, Object>) typeValue.get("capabilities");
+ if (capabilities != null) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Processing: {}", capabilities);
+ yamlNodeTypeCapabilities(
+ tracker.lookupType(theConstruct, typeName), capabilities, theTrx);
+ }
+
+ List<Map<String, Object>> requirements =
+ (List<Map<String, Object>>) typeValue.get("requirements");
+ if (requirements != null) {
+ yamlNodeTypeRequirements(
+ tracker.lookupType(theConstruct, typeName), requirements, theTrx);
+ }
+
+ //interfaces: for node types or relationship types
+ Object interfaces = typeValue.get("interfaces");
+ if (interfaces != null) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, Type {}: interfaces section declared but not handled", typeName);
+ if (interfaces instanceof List) {
+ //expect a list of interface types
+ }
+ }
+
+ //valid targets: for relationship types
+ List valid_targets = (List) typeValue.get("valid_targets");
+ if (valid_targets != null) {
+ //add as a property to the type node, can be used for validation
+ //whereever this type is used
+ //the list should contain node type names and we should check that we
+ //have those types
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, Type {}: valid_targets section declared but not handled", typeName);
+
+ }
+
+ List artifacts = (List) typeValue.get("artifacts");
+ if (artifacts != null) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, Type {}: artifacts section declared but not handled", typeName);
+ }
+
+ /* Artifact types can have "mime_type" and "file_ext" sections
+ */
+ }
+ }
+
+ private void toscaTemplate(String theTopologyTemplateId,
+ String theConstruct,
+ Map<String, Object> theTemplates,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ String rule = "_" + theConstruct.toLowerCase() + "_template_definition";
+ Map storageSpec = (Map) toscaStorageSpec.get(rule);
+ if (storageSpec == null) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No rule '{}', can't make up the storage specification for {}", rule, theConstruct);
+ }
+
+ for (Map.Entry<String, Object> template : theTemplates.entrySet()) {
+
+ String templateName = template.getKey();
+ Map<String, Object> templateSpec = (Map<String, Object>) template.getValue();
+
+ String templateType = (String) templateSpec.get("type");
+ if (templateType == null) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoNode, template {}'{}', does not have a type specification .. skipping", theConstruct, templateName);
+ continue;
+ }
+
+ try {
+ //we use create here as node names are not unique across templates
+ JSONObject neoTemplateNode =
+ pack(storageSpec, templateSpec)
+ .put("name", templateName);
+
+ String templateNodeId = neoCreateNode(
+ theTrx, false, neoTemplateNode, "TOSCA", theConstruct);
+
+ tracker.trackTemplate(theConstruct, templateName, templateNodeId);
+
+ neoEdge(theTrx, false,
+ templateNodeId,
+ theTopologyTemplateId,
+ new JSONObject(),
+ theConstruct.toUpperCase() + "_OF");
+
+ if (null == neoEdge(theTrx, false,
+ templateNodeId,
+ "Type",
+ new JSONObject()
+ .put("name", templateType),
+ new JSONObject(),
+ "OF_TYPE")) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlSpec, Template {}, {} {}: failed to identify type {}", theTopologyTemplateId, theConstruct, templateName, templateType);
+ }
+
+ //facets
+
+ //we handle properties for all constructs (as they all have them)
+ Map<String, Object> templateProps =
+ (Map<String, Object>) templateSpec.get("properties");
+ if (templateProps != null) {
+ for (Map.Entry<String, Object> templateProp :
+ templateProps.entrySet()) {
+ String templatePropName = templateProp.getKey();
+ Object templatePropObject = templateProp.getValue();
+
+ final Map templatePropValues;
+ if (templatePropObject instanceof Map) {
+ templatePropValues = (Map) templatePropObject;
+ } else {
+
+ //this is dealing with short form, if we ran the first 2 stages of the checker //we'd always be working on a canonical form ..
+ //
+ templatePropValues = new HashMap();
+ templatePropValues.put("value", templatePropObject);
+ }
+
+ //a node will contain the means for property valuation:
+ //straight value or a call to get_input/get_property/get_attribute
+
+ //find the property node (in the type) this valuation belongs to
+ if (templatePropValues != null) {
+
+ String propertyId =
+ neoId(
+ theTrx.statement(
+ new JSONObject()
+ .put("statement",
+ "MATCH (t:Type)-[:DERIVED_FROM*0..5]->(:Type)<-[:PROPERTY_OF]-(p:Property) " +
+ "WHERE t.name='" + templateType + "' " +
+ "AND p.name='" + templatePropName + "' " +
+ "RETURN id(p)"))
+ .execute()
+ .result()
+ );
+
+ if (propertyId == null) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlSpec, Template {}, {} template {}, property {} does not match the node type spec, skipping property", templateName, theConstruct, templateName, templatePropName);
+ continue;
+ }
+
+ //remove valuation by function: for now handle only get_input
+ String propInput = (String) templatePropValues.remove("get_input");
+
+ List constraints = (List) templatePropValues.remove("constraints");
+ if (constraints != null) {
+ //flattening
+ templatePropValues.put("constraints",
+ yamlEncodeConstraints(constraints));
+ }
+
+ Object val = templatePropValues.remove("value");
+ //check if the value is a collection or user defined data type, the cheap way
+ if (val instanceof List ||
+ val instanceof Map) {
+ /* An interesting option here:
+ * 1. store the whole flatten value under the 'value' property
+ templatePropValues.put("value", JsonFlattener.flatten(JsonObject.valueToString(val)));
+ Simpler but almost impossible to write queries based on property value
+ * 2. store each entry in the flatten map as a separate property (we prefix it with 'value' for
+ * clarity).
+ * see below
+ */
+ /*
+ JsonFlattener.flattenAsMap(JSONObject.valueToString(Collections.singletonMap("value",val)))
+ .entrySet()
+ .stream()
+ .forEach(e -> templatePropValues.put(e.getKey(), e.getValue()));
+ */
+ //simply stores a collection in its (json) string representation. Cannot be used if
+ //queries are necessary based on the value (on one of its elements).
+ templatePropValues.put("value", JSONObject.valueToString(val));
+ } else {
+ /* scalar, store as such */
+ templatePropValues.put("value", val);
+ }
+
+ String templatePropValueId =
+ neoCreateNode(
+ theTrx, false,
+ new JSONObject(templatePropValues),
+ "TOSCA", /*"Property",*/ "Assignment");
+
+ neoEdge(theTrx, false,
+ templatePropValueId,
+ templateNodeId,
+ new JSONObject(),
+ "OF_TEMPLATE");
+
+ neoEdge(theTrx, false,
+ templatePropValueId,
+ propertyId,
+ new JSONObject(),
+ "OF_" + theConstruct.toUpperCase() + "_PROPERTY");
+
+ if (propInput != null) {
+ String inputId = tracker.lookupTemplate("Input", propInput);
+ if (inputId == null) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoNode, Template {},node {}, property {} input {} not found", theTopologyTemplateId, templateName, templatePropName, propInput);
+ }
+
+ neoEdge(theTrx, false,
+ templatePropValueId,
+ inputId,
+ new JSONObject(),
+ "GET_INPUT");
+ }
+ }
+ }
+ }
+ tracker.trackTemplate(theConstruct, templateName, templateNodeId);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} template {} of type {}", theConstruct, templateName, templateType);
+ } catch (IOException iox) {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaTemplate, Failed to persist template {}", iox);
+ throw iox;
+ }
+ }
+ }
+
+ /* while we persist basic type values inline (in the assigment node) we store complex values
+ * in a graph of their own.
+ * We handle the neo4j 'limitation' stated below
+ * Neo4j can only store collections (map, list) of basic types.
+ *
+ * User defined data types can created undefinitely nested strctures of collections.
+ * We could store collections of basic types inline but it would make for a less uniform structure.
+ */
+ private void toscaPropertyAssignment(
+ String theAssignmentId,
+ Object theValue,
+ NeoTransaction theTrx)
+ throws IOException {
+ //look the grammar rules to see if we inline (stringify) or not
+
+ if (theValue instanceof Map) {
+ //a map type property or a user-defined datatype
+ Map<String, Object> elements = (Map<String, Object>) theValue;
+ for (Map.Entry element : elements.entrySet()) {
+
+ String elementId = neoCreateNode(theTrx, false,
+ new JSONObject().
+ put("name", element.getKey()),
+ "TOSCA", "Data", "Element");
+
+ neoEdge(theTrx, false,
+ elementId,
+ theAssignmentId,
+ EMPTY_JSON_OBJECT,
+ "ELEMENT_OF");
+
+ toscaPropertyAssignment(elementId, element.getValue(), theTrx);
+ }
+ } else if (theValue instanceof List) {
+ //a list type property
+ for (int i = 0; i < ((List) theValue).size(); i++) {
+
+ String elementId = neoCreateNode(theTrx, false,
+ new JSONObject().
+ put("pos", i),
+ "TOSCA", "Data", "Element");
+
+ neoEdge(theTrx, false,
+ elementId,
+ theAssignmentId,
+ EMPTY_JSON_OBJECT,
+ "ELEMENT_OF");
+
+ toscaPropertyAssignment(elementId, ((List) theValue).get(i), theTrx);
+ }
+
+ //update theAssignment with a length property
+ neoNodeProperties(theTrx, false, theAssignmentId,
+ new JSONObject().
+ put("length", ((List) theValue).size()));
+ } else {
+ //update the assignment with a 'value' attribute
+ neoNodeProperties(theTrx, false, theAssignmentId,
+ new JSONObject().
+ put("value", theValue));
+ }
+ }
+
+ /*
+ * We only handle properties for now so we assume these are properties
+ * assignments
+ */
+ private void toscaCapabilityAssignment(
+ String theNodeTemplateId,
+ String theCapabilityName,
+ Map<String, Object> theValuations,
+ NeoTransaction theTrx)
+ throws IOException {
+
+ for (Map.Entry<String, Object> valuation : theValuations.entrySet()) {
+ String propertyName = valuation.getKey();
+ Object propertyValueSpec = valuation.getValue();
+
+ Map propertyValue = null;
+ if (propertyValueSpec instanceof Map) {
+ propertyValue = (Map) propertyValueSpec;
+ } else {
+ //this is dealing with short form, if we ran the first 2 stages of
+ //the checker we'd always be working on a canonical form ..
+ propertyValue = new HashMap();
+ propertyValue.put("value", propertyValueSpec);
+ }
+
+ //we need to link the assignment to the node template, the capability
+ //and the property of the capability type (a node can have multiple
+ //capabilities of the same type).
+ String[] ids =
+ neoIds(
+ theTrx.statement(
+ new JSONObject()
+ .put("statement",
+ "MATCH (n:Node)-[:OF_TYPE]->(:Node:Type)<-[:CAPABILITY_OF]-(c:Capability)-[:FEATURES]->(:Capability:Type)-[:DERIVED_FROM*0..5]->(:Capability:Type)<-[:PROPERTY_OF]-(p:Property) " +
+ "WHERE id(n) = " + theNodeTemplateId + " " +
+ "AND c.name = '" + theCapabilityName + "' " +
+ "AND p.name = '" + propertyName + "' " +
+ "RETURN id(p), id(c)"))
+ .execute()
+ .result());
+
+ if (ids == null) {
+ throw new IOException("toscaCapabilityAssignment: " +
+ "node template " + theNodeTemplateId + ", " +
+ "capability " + theCapabilityName + ", " +
+ "property " + propertyName +
+ " does not match the node type spec");
+ }
+
+ /* this node represents the assignment of a value to a capability property
+ * hence my doubts about hoe to label it ['Assignment', 'Property'] or ['Assignment','Capability']
+ * I am inclined towards the second option as there is no other capability assignment in itself.
+ */
+ String assignmentId =
+ neoCreateNode(
+ theTrx, false,
+ new JSONObject(propertyValue),
+ "TOSCA", /*Capability,*/"Assignment");
+
+ neoEdge(theTrx, false,
+ assignmentId,
+ theNodeTemplateId,
+ new JSONObject(),
+ "OF_TEMPLATE");
+
+ neoEdge(theTrx, false,
+ assignmentId,
+ ids[1],
+ new JSONObject(),
+ "OF_CAPABILITY");
+
+ neoEdge(theTrx, false,
+ assignmentId,
+ ids[0],
+ new JSONObject(),
+ "OF_CAPABILITY_PROPERTY");
+ }
+ }
+
+ /*
+ *
+ * */
+ private void importTemplate(String thePath) throws IOException {
+ try (FileInputStream input = new FileInputStream(thePath)){
+ for (Object yaml : new Yaml().loadAll(input)) {
+ toscaSpec((Map) yaml);
+ }
+ }
+ }
+
+ private void toscaSpec(Map theSpec) throws IOException {
+
+ // type specifications
+ // at this time we do not record the relation between a type and the
+ // template it was defined in.
+
+ NeoTransaction trx = new NeoTransaction(this.neoUri);
+ try {
+ {
+ Map<String, Map> types = (Map<String, Map>) theSpec.get("data_types");
+ if (types != null) {
+ toscaTypeSpec("Data", types, trx);
+ }
+
+ types = (Map<String, Map>) theSpec.get("capability_types");
+ if (types != null) {
+ toscaTypeSpec("Capability", types, trx);
+ }
+
+ types = (Map<String, Map>) theSpec.get("relationship_types");
+ if (types != null) {
+ toscaTypeSpec("Relationship", types, trx);
+ }
+
+ types = (Map<String, Map>) theSpec.get("node_types");
+ if (types != null) {
+ toscaTypeSpec("Node", types, trx);
+ }
+
+ types = (Map<String, Map>) theSpec.get("policy_types");
+ if (types != null) {
+ toscaTypeSpec("Policy", types, trx);
+ }
+ }
+
+ Map<String, Map> topologyTemplate = (Map<String, Map>)
+ theSpec.get("topology_template");
+ if (topologyTemplate != null) {
+
+ Map<String, Object> metadata = (Map<String, Object>) theSpec.get("metadata");
+ if (metadata == null) {
+ throw new IOException("Missing metadata, cannot register template");
+ }
+ String templateName = (String) metadata.get("template_name");
+ String templateId = neoMergeNode(
+ trx, false,
+ new JSONObject()
+ .put("name", templateName)
+ .putOpt("description", (String) theSpec.get("description"))
+ .putOpt("version", (String) metadata.get("template_version"))
+ .putOpt("author", (String) metadata.get("template_author"))
+ .putOpt("scope", (String) metadata.get("scope")),
+ "TOSCA", "Template");
+
+ /* inputs */
+ Map<String, Map> toscaInputs = (Map) topologyTemplate.get("inputs");
+ if (toscaInputs != null) {
+ for (Map.Entry<String, Map> toscaInput : toscaInputs.entrySet()) {
+ //we use create here as input names are not unique across templates
+ //also, constraints require special encoding
+ Map toscaInputSpec = toscaInput.getValue();
+
+ List constraints = (List) toscaInputSpec.remove("constraints");
+ if (constraints != null) {
+ //flattening
+ toscaInputSpec.put("constraints",
+ yamlEncodeConstraints(constraints));
+ }
+ String neoInputNodeId =
+ neoCreateNode(
+ trx, false,
+ new JSONObject(toscaInputSpec)
+ .put("name", toscaInput.getKey())
+ .putOpt("type", toscaInputSpec.get("type")),
+ "TOSCA", "Input");
+
+ tracker.trackTemplate(
+ "Input", (String) toscaInput.getKey(), neoInputNodeId);
+
+ neoEdge(trx, false,
+ neoInputNodeId,
+ templateId,
+ new JSONObject(),
+ "INPUT_OF");
+ }
+ }
+
+ /*
+ * The main issue that I have here is with the defintion given to each
+ * section (properties, capabilities, requirements ..) of a Node template:
+ * they are said to 'augment' the information provided in its Node Type but
+ * without specifying the semantics of 'augment'. Can new properties be
+ * added? can interface specification contain new operations?
+ */
+ Map<String, Object> toscaNodes = (Map) topologyTemplate.get("node_templates");
+ if (toscaNodes != null) {
+ toscaTemplate(templateId, "Node", toscaNodes, trx);
+
+ //now that all nodes are in we need a second path over the nodes set in
+ //order to handle the capabilities, requirements ..
+
+ for (Map.Entry<String, Object> toscaNode : toscaNodes.entrySet()) {
+
+ String toscaNodeName = toscaNode.getKey();
+ Map<String, Object> toscaNodeValues = (Map<String, Object>) toscaNode.getValue();
+
+ Map<String, Map> capabilities =
+ (Map<String, Map>) toscaNodeValues.get("capabilities");
+ if (capabilities != null) {
+ for (Map.Entry<String, Map> capability : capabilities.entrySet()) {
+ Map<String, Map> assignments = (Map<String, Map>) capability.getValue();
+ Map<String, Object> propertiesAssignments =
+ assignments.get("properties");
+ if (propertiesAssignments != null) {
+ toscaCapabilityAssignment(
+ tracker.lookupTemplate("Node", toscaNodeName),
+ capability.getKey(),
+ propertiesAssignments,
+ trx);
+ }
+ }
+ }
+
+ List<Map<String, Object>> requirements = (List<Map<String, Object>>)
+ toscaNodeValues.get("requirements");
+ if (requirements != null) {
+ toscaRequirementsAssignment(
+ tracker.lookupTemplate("Node", toscaNodeName), requirements, trx);
+ }
+
+ //interfaces
+ }
+ }
+
+ List toscaPolicies = (List) topologyTemplate.get("policies");
+ if (toscaPolicies != null) {
+ for (Object toscaPolicy : toscaPolicies) {
+ toscaTemplate(templateId, "Policy", (Map<String, Object>) toscaPolicy, trx);
+ }
+ }
+
+ Map<String, Map> toscaOutputs = (Map) topologyTemplate.get("outputs");
+ if (toscaOutputs != null) {
+ for (Map.Entry<String, Map> toscaOutput : toscaOutputs.entrySet()) {
+ Object outputValue = toscaOutput.getValue().get("value");
+ if (outputValue instanceof Map) { //shouldn't I be doing this in all cases??
+ outputValue = JSONObject.valueToString((Map) outputValue);
+ }
+
+ String neoOutputNodeId = neoCreateNode(
+ trx, false,
+ new JSONObject()
+ .put("name", (String) toscaOutput.getKey())
+ .putOpt("description", (String) toscaOutput.getValue().get("description"))
+ .put("value", outputValue.toString()),
+ "TOSCA", "Output");
+
+ neoEdge(trx, false,
+ neoOutputNodeId,
+ templateId,
+ new JSONObject(),
+ "OUTPUT_OF");
+ }
+ }
+
+ //if this is a service template look for its type mapping specification
+ Map<String, Object> substitutionSpec =
+ (Map<String, Object>) theSpec.get("substitution_mappings");
+ if (substitutionSpec != null) {
+
+ String nodeType = (String) substitutionSpec.get("node_type");
+ if (nodeType != null) {
+ neoEdge(trx, false,
+ templateId,
+ "Type",
+ new JSONObject()
+ .put("name", nodeType),
+ new JSONObject(),
+ "SUBSTITUTES");
+ } else {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoProc, Template {} substitution_mapping is missing a node_type in spec: {}", templateName, substitutionSpec);
+ }
+
+ //process the rest of the mapping definition
+ } else {
+ errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoProc, Template {} does not have a substitution mapping", templateName);
+ }
+
+ //try to connect template to catalog item if information was provided
+ //
+ String catalogItemSelector = (String) metadata.get("asc_catalog");
+ if (catalogItemSelector != null) {
+ if (null == neoEdge(trx, false,
+ templateId,
+ "CatalogItem",
+ new JSONObject(catalogItemSelector),
+ new JSONObject(),
+ "MODEL_OF")) {
+ throw new IOException("No such catalog item: " + catalogItemSelector);
+ }
+ }
+ }
+ trx.commit();
+ } catch (IOException iox) {
+ try {
+ trx.rollback();
+ } catch (IOException riox) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), riox.getMessage());
+ }
+ throw iox;
+ }
+ }
+
+ private void annotateItem(String thePath, String theLabels) throws IOException {
+
+ if (theLabels == null) {
+ throw new IOException("Labels ??");
+ }
+
+ try (FileInputStream input = new FileInputStream(thePath)){
+ for (Object yaml : new Yaml().loadAll(input)) {
+ annotateItem((Map) yaml, theLabels);
+ }
+ }
+ }
+
+ private void annotateItem(Map theSpec, String theLabels) throws IOException {
+
+ Map<String, Object> metadata = (Map<String, Object>) theSpec.get("metadata");
+ if (metadata == null) {
+ throw new IOException("Missing metadata, cannot register template");
+ }
+
+ String catalogItemSelector = (String) metadata.remove("asc_catalog");
+ if (catalogItemSelector == null) {
+ throw new IOException("Missing item selector");
+ }
+
+ JSONObject annotation = new JSONObject();
+ for (Map.Entry<String, Object> e : metadata.entrySet()) {
+ String key = e.getKey();
+ if (key.startsWith("asc_")) {
+ annotation.put(key.substring(4, key.length()), e.getValue());
+ }
+ }
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "annotation: " + annotation);
+
+ NeoTransaction trx = new NeoTransaction(this.neoUri);
+ try {
+ String id = neoCreateNode(trx, false, annotation, ("Annotation:" + theLabels).split(":"));
+ if (id == null) {
+ throw new IOException("No such catalog item: " + catalogItemSelector);
+ }
+
+ id = neoEdge(trx, false,
+ id,
+ "CatalogItem",
+ new JSONObject(catalogItemSelector),
+ new JSONObject(),
+ "ANNOTATION_OF");
+ if (id == null) {
+ throw new IOException("No such catalog item: " + catalogItemSelector);
+ }
+
+ trx.commit();
+ } catch (IOException iox) {
+ try {
+ trx.rollback();
+ } catch (IOException riox) {
+ errLogger.log(LogLevel.ERROR, this.getClass().getName(), riox.getMessage());
+ }
+ throw iox;
+ }
+ }
+
+ private void listTemplates(String theSelector) throws IOException {
+
+ JSONObject selector = null;
+
+ if (theSelector != null) {
+ selector = new JSONObject(theSelector);
+ }
+
+ NeoTransaction trx = new NeoTransaction(this.neoUri);
+
+ JSONObject res = trx.statement(new JSONObject()
+ .put("statement",
+ "MATCH (t:TOSCA:Template" +
+ (selector != null ? neoLiteralMap(selector) : "") + ") RETURN t, id(t)")
+ .put("parameters",
+ new JSONObject()
+ .put("props", selector != null ? selector : new JSONObject())))
+ .commit()
+ .result();
+
+ JSONArray data = res
+ .getJSONArray("results")
+ .getJSONObject(0)
+ .getJSONArray("data");
+ if (data.length() == 0) {
+ return;
+ }
+
+ for (int i = 0; i < data.length(); i++) {
+ JSONArray row = data.getJSONObject(i)
+ .getJSONArray("row");
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}: {}", row.getInt(1), row.getJSONObject(0));
+ }
+ }
+
+
+ private void removeTemplate(String theId) throws IOException {
+
+ //find the nodes to delete and then use 'detach delete'
+
+ NeoTransaction trx = new NeoTransaction(this.neoUri);
+
+ try {
+ //Template elements are never more then three hops away and point towards the template
+ JSONObject res = trx.statement(new JSONObject()
+ .put("statement",
+ "MATCH (t:TOSCA:Template)<-[*0..3]-(x) " +
+ "WHERE id(t)=" + theId + " RETURN {labels:labels(x),id:id(x)} as tgt"))
+ .execute()
+ .result();
+
+ JSONArray data = res
+ .getJSONArray("results")
+ .getJSONObject(0)
+ .getJSONArray("data");
+ if (data.length() == 0) {
+ return;
+ }
+
+ for (int i = data.length() - 1; i >= 0; i--) {
+ JSONArray row = data.getJSONObject(i)
+ .getJSONArray("row");
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "> {}", row.getJSONObject(0));
+
+ //double check
+
+
+ res = trx.statement(new JSONObject()
+ .put("statement",
+ "MATCH (n) " +
+ "WHERE id(n)=" + row.getJSONObject(0).getInt("id") + " " +
+ "DETACH DELETE n"))
+ .execute()
+ .result();
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "> {}", res);
+ }
+
+ trx.commit();
+ } catch (IOException iox) {
+ try {
+ trx.rollback();
+ } catch (IOException riox) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Rollback failed: {}", riox);
+ }
+ throw iox;
+ }
+ }
+
+ /*
+ */
+ private static void ignoreMissing(String theTarget) throws IOException {
+
+ for (String prefix : ignoreMissing) {
+ //make sure they are only one name element away
+ if ((theTarget.startsWith(prefix)) && (theTarget.substring(prefix.length()).lastIndexOf('.') == 0)) {
+ return;
+ }
+ }
+
+ throw new IOException("Not configured to ignore missing " + theTarget);
+ }
+
+ private static JSONArray encodeRange(List theRange) throws IOException {
+ JSONArray range = new JSONArray();
+ for (Object value : theRange) {
+ if (value instanceof Number) {
+ range.put(((Number) value).intValue());
+ } else if (value instanceof String &&
+ "UNBOUNDED".equals(value)) {
+ range.put(Integer.MAX_VALUE);
+ } else {
+ throw new IOException("Unexpected value in range definition: " + value);
+ }
+ }
+ return range;
+ }
+
+ private static String neoLiteralMap(JSONObject theProps) {
+ return neoLiteralMap(theProps, "props");
+ }
+
+ private static String neoLiteralMap(JSONObject theProps, String theArg) {
+ if (theProps.length() == 0) {
+ return "";
+ }
+ StringBuilder sb = new StringBuilder("");
+ for (Iterator i = theProps.keys(); i.hasNext(); ) {
+ String key = (String) i.next();
+ sb.append("`")
+ .append(key)
+ .append("`: {")
+ .append(theArg)
+ .append("}.`")
+ .append(key)
+ .append("`,");
+ }
+ return "{ " + sb.substring(0, sb.length() - 1) + " }";
+ }
+
+ private static String neoLabelsString(int theStartPos, String... theLabels) {
+ StringBuffer lbls = new StringBuffer("");
+ for (int i = theStartPos; i < theLabels.length; i++) {
+ lbls.append(":")
+ .append(theLabels[i]);
+ }
+ return lbls.toString();
+ }
+
+ private String neoCreateNode(
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+ return neoNode("CREATE", theProperties, theLabels);
+ }
+
+ /* executes the (up to 2) statements required to construct a node
+ in a dedicated transaction */
+ private String neoNode(
+ String theVerb,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+ NeoTransaction trx = new NeoTransaction(this.neoUri);
+ try {
+ return neoNode(trx, true,
+ theVerb, theProperties, theLabels);
+ } catch (IOException iox) {
+ try {
+ trx.rollback();
+ } catch (IOException ioxx) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), ioxx.getMessage());
+ }
+ throw iox;
+ }
+ }
+
+ private String neoCreateNode(
+ NeoTransaction theTransaction,
+ boolean doCommit,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+ return neoNode(theTransaction, doCommit, "CREATE", theProperties, theLabels);
+ }
+
+ private String neoMergeNode(
+ NeoTransaction theTransaction,
+ boolean doCommit,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+ return neoNode(theTransaction, doCommit, "MERGE", theProperties, theLabels);
+ }
+
+ /* execute the statements required to construct a node as part of the
+ given transaction
+
+ */
+ private String neoNode(
+ NeoTransaction theTransaction,
+ boolean doCommit,
+ String theVerb,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNode {}", new Object[]{theProperties, theLabels});
+
+ JSONObject node;
+ String nodeId;
+
+ node = theTransaction
+ .statement(
+ new JSONObject()
+ .put("statement",
+ theVerb + " (n:" + theLabels[0] + neoLiteralMap(theProperties) + " ) RETURN id(n)")
+ .put("parameters",
+ new JSONObject()
+ .put("props", theProperties)))
+ .execute()
+ .result();
+
+
+ nodeId = neoId(node);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNode, node: {}", nodeId);
+
+ if (theLabels.length > 1) {
+ theTransaction.statement(
+ new JSONObject()
+ .put("statement",
+ "START n=node(" + nodeId + ") SET n " + neoLabelsString(1, theLabels)));
+ }
+ theTransaction.execute(doCommit);
+
+ return nodeId;
+ }
+
+ private void neoNodeProperties(
+ NeoTransaction theTransaction,
+ boolean doCommit,
+ String theId,
+ JSONObject theProperties) throws IOException {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNodeProperties {}", new Object[]{theId, theProperties});
+ theTransaction
+ .statement(
+ new JSONObject()
+ .put("statement",
+ "START n=node(" + theId + ") SET n+= " +
+ neoLiteralMap(theProperties) + " RETURN id(n)")
+ .put("parameters",
+ new JSONObject()
+ .put("props", theProperties)))
+ .execute(doCommit);
+ }
+
+ private String neoEdge(
+ NeoTransaction theTransaction,
+ boolean doCommit,
+ String theFrom, String theTo,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoEdge: {}", new Object[]{theFrom, theTo, theProperties, theLabels});
+
+ return neoEdge(
+ theTransaction, doCommit,
+ new JSONObject()
+ .put("statement",
+ "START a=node(" + theFrom + "),b=node(" + theTo + ") " +
+ "MERGE (a)-[r:" + theLabels[0] + neoLiteralMap(theProperties) + "]->(b) " +
+ "RETURN id(r)")
+ .put("parameters",
+ new JSONObject()
+ .put("props", theProperties)));
+ }
+
+ private String neoEdge(
+ NeoTransaction theTransaction, boolean doCommit,
+ String theFromId,
+ String theToLabel, JSONObject theToProps,
+ JSONObject theProperties,
+ String... theLabels) throws IOException {
+
+ return neoEdge(theTransaction, doCommit,
+ new JSONObject()
+ .put("statement",
+ //"START a=node(" + theFromId + ") " +
+ "MATCH (a),(b:" + theToLabel + neoLiteralMap(theToProps, "toProps") + ") " +
+ "WHERE id(a)=" + theFromId + " " +
+ "MERGE (a)-[r:" + theLabels[0] + neoLiteralMap(theProperties) + "]->(b) " +
+ "RETURN id(r)")
+ .put("parameters",
+ new JSONObject()
+ .put("toProps", theToProps)
+ .put("props", theProperties)));
+ }
+
+ private String neoEdge(NeoTransaction theTransaction,
+ boolean doCommit,
+ JSONObject theEdgeStatement)
+ throws IOException {
+
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoEdge {}", new Object[]{theEdgeStatement});
+
+ return neoId(
+ theTransaction
+ .statement(theEdgeStatement)
+ .execute(doCommit)
+ .result()
+ );
+ }
+
+ private static String neoId(JSONObject theResult) throws IOException {
+ try {
+ JSONArray data = theResult
+ .getJSONArray("results")
+ .getJSONObject(0)
+ .getJSONArray("data");
+ if (data.length() == 0) {
+ return null;
+ }
+
+ return String.valueOf(
+ data.getJSONObject(0)
+ .getJSONArray("row")
+ .getInt(0));
+ } catch (JSONException jsonx) {
+ errLogger.log(LogLevel.WARN, Modeled.class.getName(), "neoId, No 'id' in result: {} {}", theResult, jsonx);
+ throw new IOException("no 'id' in result", jsonx);
+ }
+ }
+
+ private static String[] neoIds(JSONObject theResult) throws IOException {
+ try {
+ JSONArray data = theResult
+ .getJSONArray("results")
+ .getJSONObject(0)
+ .getJSONArray("data");
+ if (data.length() == 0) {
+ return new String[]{};
+ }
+
+ JSONArray array = data.getJSONObject(0)
+ .getJSONArray("row");
+
+ String[] res = new String[array.length()];
+ for (int i = 0; i < array.length(); i++) {
+ res[i] = String.valueOf(array.getInt(i));
+ }
+ return res;
+ } catch (JSONException jsonx) {
+ errLogger.log(LogLevel.WARN, Modeled.class.getName(), "neoId, No 'id' in result: {} {}", theResult, jsonx);
+ throw new IOException("no 'id' in result", jsonx);
+ }
+ }
+
+ private static class NeoTransaction {
+
+ private HttpClient client = null;
+ private String uri = null;
+ private String auth = null;
+ private JSONObject result = null;
+ private JSONArray stmts = new JSONArray();
+
+ NeoTransaction(URI theTarget) {
+
+ client = httpClientBuilder.build();
+ this.uri = theTarget.getScheme() + "://" + theTarget.getHost() + ":" + theTarget.getPort() + "/db/data/transaction";
+
+ String userInfo = theTarget.getUserInfo();
+ if (userInfo != null) {
+ this.auth = "Basic " + new String(
+ Base64.encodeBase64(
+ userInfo.getBytes(Charset.forName("ISO-8859-1"))));
+ }
+ }
+
+ /* adds a statement to the next execution cycle */
+ NeoTransaction statement(JSONObject theStatement) {
+ if (this.client == null) {
+ throw new IllegalStateException("Transaction was completed");
+ }
+ this.stmts.put(theStatement);
+ return this;
+ }
+
+ /* executes all pending statements but does not commit the transaction */
+ /* executing a transaction with no statements refreshes the transaction timer in order to keep the transaction alive */
+ NeoTransaction execute() throws IOException {
+ if (this.client == null) {
+ throw new IllegalStateException("Transaction was completed");
+ }
+ post(this.uri);
+ return this;
+ }
+
+ /* executes all pending statements and commits the transaction */
+ NeoTransaction commit() throws IOException {
+ if (this.client == null) {
+ throw new IllegalStateException("Transaction was completed");
+ }
+ post(this.uri + "/commit");
+ //mark the transaction as terminated
+ this.client = null;
+ return this;
+ }
+
+ /* just to simplify some code written on top of NeoTransaction */
+ NeoTransaction execute(boolean doCommit) throws IOException {
+ return doCommit ? commit() : execute();
+ }
+
+ private void post(String theUri) throws IOException {
+ HttpPost post = new HttpPost(theUri);
+ JSONObject payload = new JSONObject()
+ .put("statements", this.stmts);
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "post> " + payload);
+ post.setEntity(new StringEntity(payload.toString(),
+ ContentType.APPLICATION_JSON));
+ run(post);
+ }
+
+ /* rollbacks the transaction changes */
+ NeoTransaction rollback() throws IOException {
+ if (this.client == null) {
+ throw new IllegalStateException("Transaction was completed");
+ }
+ if (this.uri == null) {
+ throw new IllegalStateException("Transaction not started");
+ }
+ run(new HttpDelete(this.uri));
+ return this;
+ }
+
+ /* retrieve the (raw) results of the last execute/commit cycle */
+ JSONObject result() {
+ return this.result;
+ }
+
+ private void run(HttpUriRequest theRequest) throws IOException {
+ theRequest.setHeader(HttpHeaders.ACCEPT, "application/json; charset=UTF-8");
+ if (this.auth != null) {
+ theRequest.setHeader(HttpHeaders.AUTHORIZATION, this.auth);
+ }
+
+ HttpResponse response = this.client.execute(theRequest);
+ int statusCode = response.getStatusLine().getStatusCode();
+ if (statusCode >= 300) {
+ try {
+ this.result = new JSONObject(IOUtils.toString(response.getEntity().getContent(), "UTF-8"));
+ } catch (Exception x) {
+ errLogger.log(LogLevel.ERROR, Modeled.class.getName(), x.getMessage());
+ }
+ throw new IOException("Neo statement(s) '" + this.stmts + "' failed: " + response.getStatusLine());
+ }
+
+ try {
+ this.result = new JSONObject(
+ IOUtils.toString(response.getEntity().getContent(), "UTF-8"));
+ } catch (Exception x) {
+ throw new IOException("no json in response", x);
+ }
+
+ JSONArray errors = this.result.getJSONArray("errors");
+ if (errors.length() > 0) {
+ throw new IOException("Neo statement(s) '" + this.stmts + "' have errors: " + errors);
+ }
+ //we only get a header if this was not a one statement transaction
+ Header hdr = response.getFirstHeader("Location");
+ if (hdr != null) {
+ if (!hdr.getValue().startsWith(this.uri)) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "new transaction location?? : {} vs. {}", this.uri, hdr.getValue());
+ }
+ this.uri = hdr.getValue();
+ }
+ this.stmts = new JSONArray();
+ }
+ }
+
+ private static JSONObject pack(Map theRule, Map theDef) {
+ JSONObject pack = new JSONObject();
+
+ if (theRule == null) {
+ return pack;
+ }
+
+ //these are the facets of the construct definition
+ Map facets = (Map) theRule.get("mapping");
+ if (facets == null) {
+ return pack;
+ }
+
+ facets.entrySet().stream()
+ .forEach(
+ theEntry ->
+ {
+ Map.Entry entry = (Map.Entry) theEntry;
+ Map facetDef = (Map) entry.getValue();
+
+ String storage = (String) facetDef.getOrDefault("storage", "");
+ String type = (String) facetDef.get("type");
+
+ if ("none".equals(storage)) {
+ return;
+ }
+ if ("map".equals(type)) {
+ //maps are used for cross-references between constructs or for
+ //constructs facets
+ return;
+ }
+ Object val = theDef.get(entry.getKey());
+ if ("seq".equals(type)) {
+ //sequences can be stored inlined, if so instructed ..
+ if ("inline".equals(storage)) {
+ val = JSONObject.valueToString(val);
+ } else {
+ return;
+ }
+ }
+ if ("no".equals(facetDef.getOrDefault("required", "no"))) {
+ pack.putOpt((String) entry.getKey(), theDef.get(entry.getKey()));
+ } else {
+ pack.putOnce((String) entry.getKey(), theDef.get(entry.getKey()));
+ }
+ });
+ return pack;
+ }
+
+ /* a sort of catalog of neo identifiers generated for the different
+ * constructs (or their types) we store
+ */
+ private static class Tracker<T> {
+
+ private Table<String, String, T>
+ typeTracker = HashBasedTable.create(),
+ templateTracker = HashBasedTable.create();
+
+ void trackType(String theConstruct, String theName, T theInfo) {
+ typeTracker.put(theConstruct, theName, theInfo);
+ }
+
+ T lookupType(String theConstruct, String theName) {
+ return typeTracker.get(theConstruct, theName);
+ }
+
+ boolean tracksType(String theConstruct, String theName) {
+ return typeTracker.contains(theConstruct, theName);
+ }
+
+ void trackTemplate(String theConstruct, String theName, T theInfo) {
+ templateTracker.put(theConstruct, theName, theInfo);
+ }
+
+ T lookupTemplate(String theConstruct, String theName) {
+ return templateTracker.get(theConstruct, theName);
+ }
+
+ }
+}
diff --git a/dcaedt_catalog/db/src/main/resources/tosca-schema.yaml b/dcaedt_catalog/db/src/main/resources/tosca-schema.yaml
new file mode 100644
index 0000000..5944e22
--- /dev/null
+++ b/dcaedt_catalog/db/src/main/resources/tosca-schema.yaml
@@ -0,0 +1,1231 @@
+_status_values: &status_values
+ enum:
+ - supported
+ - unsupported
+ - experimental
+ - deprecated
+
+#I do not know that the lists and maps qualify as 'primitive' ..
+_primitive_types: &primitive_types
+ enum: [string,integer,float,boolean,timestamp,list,map,version,range,scalar-unit.size,scalar_unit.frequency,scalar_unit.time]
+
+#needs custom validation as we have to make sure there are 2 elements and allow for the
+#UNBOUNDED keyword as second element
+_range_definition: &range_definition
+ type: seq
+ name: range_definition
+ sequence:
+ - type: scalar
+
+#see A.5.2
+#this is where the need of verifying the size of a collection (sequence/map) came from
+#this is specified as a sequence where each entry is a map with one entry??
+_constraints_sequence: &constraints_sequence
+ name: constraints_sequence
+# short: "0"
+ type: seq
+ sequence:
+ - type: map
+# length: 1
+ mapping:
+ equal:
+ desc: "Constrains a property or parameter to a value equal to the value declared."
+ type: any
+ required: no
+ greater_than:
+ desc: "Constrains a property or parameter to a value greater than the value declared"
+ type: scalar
+ required: no
+ greater_or_equal:
+ desc: "Constrains a property or parameter to a value greater than or equal to the value declared."
+ type: scalar
+ required: no
+ less_than:
+ desc: "Constrains a property or parameter to a value less than the value declared"
+ type: scalar
+ required: no
+ less_or_equal:
+ desc: "Constrains a property or parameter to a value less than or equal to the value declared."
+ type: scalar
+ required: no
+ in_range:
+ desc: "Constrains a property or parameter to a value in range of (inclusive) the two values declared.
+"
+ type: seq
+# length: 2
+ sequence:
+ - type: scalar
+ required: no
+ valid_values:
+ desc: "Constrains a property or parameter to a value that is in the list of declared values"
+ type: seq
+ sequence:
+ - type: scalar
+ required: no
+ length:
+ desc: "Constrains the property or parameter to a value of a given length."
+ type: int
+ required: no
+ min_length:
+ desc: "Constrains the property or parameter to a value to a minimum length"
+ type: scalar
+ required: no
+ max_length:
+ desc: "Constrains the property or parameter to a value to a maximum length"
+ type: scalar
+ required: no
+ pattern:
+ desc: "Constrains the property or parameter to a value that is allowed by the provided regular expression."
+ type: str
+ required: no
+
+# section A.5.3 property_filter_definition
+# it is a constraints sequence that gets attached to a property ..
+_property_filter_definition: &property_filter_definition
+ name: property_filter_definition
+ type: map
+ mapping:
+ =:
+ *constraints_sequence
+
+#section A.5.4 node_filter_definition
+_node_filter_definition: &node_filter_definition
+ type: map
+ name: node_filter_definition
+ mapping:
+ properties:
+ desc: "property names to constraints to be applied to those properties"
+ required: no
+ type: seq
+ sequence:
+ - *property_filter_definition
+# - type: map
+# mapping:
+# =:
+# *constraints_sequence
+ capabilities:
+ desc: ""
+ required: no
+ type: seq
+ sequence:
+ - type: map
+ name: node_filter_capabilities_sequence
+ desc: "the key is a capability name or type"
+ mapping:
+ =:
+ name: node_filter_capabilities_entry
+ type: map
+ mapping:
+ properties:
+ desc: "the capability properties and their constraints"
+ name: node_filter_capabilities_properties
+ type: seq
+ sequence:
+ - type: map
+ name: node_filter_capabilities_property
+ mapping:
+ =: *constraints_sequence
+
+#used in property and attribute definitions
+_entry_schema_definition: &entry_schema_definition
+ desc: "The optional key that is used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map"
+ name: entry_schema_definition
+ required: no
+ type: map
+ short: type
+ mapping:
+ "type":
+ desc: "collection element type"
+ required: yes
+ type: str
+ description:
+ required: no
+ type: str
+ constraints:
+ *constraints_sequence
+
+# see section A.5.5
+_artifact_definition: &artifact_definition
+ type: map
+ name: artifact_definition
+ short: implementation # assumes type can be inferred ..
+ mapping:
+ "type":
+ desc: "The required artifact type for the artifact definition"
+ required: yes
+ type: str
+ description:
+ desc: "The optional description for the artifact definition"
+ required: no
+ type: str
+ implementation:
+ desc: "The optional URI string (relative or absolute) which can be used to locate the artifacts file.
+"
+ required: no
+ type: str
+ repository:
+ desc: "The optional name of the repository definition which contains the location of the external repository that contains the artifact"
+ required: no
+ type: str
+ deploy_path:
+ desc: "The file path the associated file would be deployed into within the target nodes container."
+ required: no
+ type: str
+
+# see section A.5.6
+_repository_definition: &repository_definition
+ type: map
+ name: repository_definition
+ short: url
+ mapping:
+ description:
+ desc: "The optional description for the repository.
+"
+ required: no
+ type: str
+ url:
+ desc: "The required URL or network address used to access the repository"
+ required: yes
+ type: str
+ credential:
+ desc: "The optional Credential used to authorize access to the repository"
+ required: no
+ type: str
+
+#see section A.5.7
+_property_definition: &property_definition
+ type: map
+ name: property_definition
+ mapping:
+ "type":
+ type: str
+ required: yes
+#not as easy, it can be an user defined data type
+# <<: *primitive_types
+ description:
+ type: str
+ required: no
+ constraints:
+ desc: "The optional list of sequenced constraint clauses for the Data Type."
+ required: no
+ <<: *constraints_sequence
+ default:
+ type: any
+ required: no
+ "required":
+ type: bool
+ required: no
+ status:
+ type: str
+ required: no
+ <<: *status_values
+ entry_schema:
+ <<: *entry_schema_definition
+# desc: "used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map."
+# type: str
+# required: no
+
+#see section A.5.8
+#_property_assignment_definition: &property_assignment_definition
+
+#see A.5.9
+_attribute_definition: &attribute_definition
+ type: map
+ name: attribute_definition
+ mapping:
+ "type":
+ type: str
+ required: yes
+# <<: *primitive_types
+ description:
+ type: str
+ required: no
+ default:
+ type: any
+ required: no
+ status:
+ desc: "The optional status of the attribute relative to the specification or implementation"
+ type: str
+ required: no
+ <<: *status_values
+ entry_schema:
+ <<: *entry_schema_definition
+
+#see section A.5.10
+#here again, we must support the short form which is the most common
+_attribute_assignment_definition: &attribute_assignment_definition
+ type: map
+ name: attribute_assignment_definition
+ mapping:
+ description:
+ desc: "The optional description of the attribute."
+ required: no
+ type: str
+ value:
+#actually 'value | value_expression'
+ desc: "represent the type-compatible value to assign to the named attribute. Attribute values may be provided as the result from the evaluation of an expression or a function"
+ required: yes
+ type: any
+
+
+# see spec section A.5.11
+
+# see spec section A.5.11.1: variant to be used in node or relationship type definitions
+_type_operation_definition: &type_operation_definition
+ type: map
+ name: type_operation_definition
+ short: implementation
+ mapping:
+ description:
+ desc: "The optional description string for the associated named operation."
+ required: no
+ type: str
+ implementation:
+ desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)"
+ required: no
+ type: str
+ inputs:
+ desc: ""
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: "a property value or an expression providing a input value"
+ name: property_assignment
+ type: any
+
+# from A.5.11.2
+_template_operation_definition: &template_operation_definition
+ type: map
+ name: template_operation_definition
+ short: implementation
+ mapping:
+ description:
+ desc: "The optional description string for the associated named operation."
+ required: no
+ type: str
+ implementation:
+ desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)"
+ name: template_operation_implementation_definition
+ required: no
+ short: primary
+ type: map
+ mapping:
+ primary:
+ desc: "The optional implementation artifact name (e.g., the primary script file name within a TOSCA CSAR file). "
+ required: no
+ type: str
+ dependencies:
+ desc: "The optional list of one or more dependent or secondary implementation artifact name which are referenced by the primary implementation artifact (e.g., a library the script installs or a secondary script)"
+ required: no
+ type: seq
+ sequence:
+ - type: str
+ inputs:
+ desc: ""
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: "a property value or an expression providing a input value"
+ name: property_assignment
+ type: any
+
+
+# see section A.5.12, specifically A.5.12.2.1 : definition to be used in node or relationship type definition
+_type_interface_definition: &type_interface_definition
+ type: map
+ name: type_interface_definition
+ mapping:
+ "type":
+ desc: "represents the required name of the Interface Type for the interface definition
+"
+ required: yes
+ type: str
+ inputs:
+ desc: "The optional list of input property definitions available to all defined operations"
+ type: map
+ mapping:
+ =:
+ *property_definition
+ =:
+ *type_operation_definition
+
+# see section A.5.12.2.2, extended notation to be used in node or relationship template definitions
+_template_interface_definition: &template_interface_definition
+ type: map
+ name: template_interface_definition
+ mapping:
+ inputs:
+ desc: "The optional list of input property definitions available to all defined operations"
+ type: map
+ mapping:
+ =:
+ desc: "a property value or an expression providing a property value"
+ name: property_assignment
+ type: any
+ =:
+ *template_operation_definition
+
+
+# A.6 section: type specific definitions
+
+# see section A.6.1
+_capability_definition: &capability_definition
+ type: map
+ name: capability_definition
+ short: type
+ mapping:
+ "type":
+ desc: "The required name of the Capability Type the capability definition is based upon"
+ required: yes
+ type: str
+ description:
+ desc: "The optional description of the Capability definition"
+ required: no
+ type: str
+ properties:
+ desc: ""
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ attributes:
+ desc: "An optional list of property definitions for the Capability definition"
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_definition
+ valid_source_types:
+ desc: ""
+ required: no
+ type: seq
+ sequence:
+ - type: str
+ occurrences:
+ desc: "The optional minimum and maximum occurrences for the capability."
+ required: no
+ <<: *range_definition
+
+# see section A.6.2
+#
+_requirement_definition: &requirement_definition
+ type: map
+ name: requirement_definition
+ short: capability #as per A.6.2.2.1
+ mapping:
+ capability:
+ desc: "The required reserved keyname used that can be used to provide the name of a valid Capability Type that can fulfil the requirement"
+ required: yes
+ type: str
+ node:
+ desc: "The optional reserved keyname used to provide the name of a valid Node Type that contains the capability definition that can be used to fulfil the requirement. "
+ required: no
+ type: str
+ relationship:
+# and from section A.6.2.1, this one is an oddball
+ desc: "The optional reserved keyname used to provide the name of a valid Relationship Type to construct when fulfilling the requirement."
+ required: no
+ name: requirement_relationship_definition
+ short: type
+ type: map
+ mapping:
+ type:
+ desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement definitions relationship keyname.
+"
+ required: yes
+ type: str
+ interfaces:
+ #not clear which interface definition is to be used here
+ desc: "allows augmentation (additional properties and operations) of the interfaces defined by the relationship type indicated above"
+ required: no
+ type: map
+ mapping:
+ =:
+ *type_interface_definition
+ occurrences:
+ desc: "The optional minimum and maximum occurrences for the requirement."
+ required: no
+ <<: *range_definition
+
+# see section A.6.3
+_artifact_type_definition: &artifact_type_definition
+ type: map
+ name: artifact_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent Artifact Type name the Artifact Type derives from"
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Artifact Type."
+ required: no
+ type: str
+ mime_type:
+ desc: "The required mime type property for the Artifact Type."
+ required: no
+ type: str
+ file_ext:
+ desc: "The required file extension property for the Artifact Type"
+ required: no
+ type: seq
+ sequence:
+ - type: str
+ properties:
+ desc: "An optional list of property definitions for the Artifact Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+
+#see spec section #A.6.4
+_interface_type_definition: &interface_type_definition
+ type: map
+ name: interface_type_definition
+ mapping:
+ inputs:
+ desc: "The optional list of input property definitions available to all defined operations"
+ type: map
+ mapping:
+ =:
+ type: str
+ desc: "property_name to property_value(_expression) mapping"
+ =:
+ *type_operation_definition
+
+# A.6.5
+_data_type_definition: &data_type_definition
+ type: map
+ name: data_type_definition
+ mapping:
+ derived_from:
+ desc: "The optional key used when a datatype is derived from an existing TOSCA Data Type.
+"
+ required: no
+ type: str
+ description:
+ desc: "The optional description for the Data Type.
+"
+ required: no
+ type: str
+ constraints:
+ desc: "The optional list of sequenced constraint clauses for the Data Type."
+ <<: *constraints_sequence
+ properties:
+ desc: "The optional list property definitions that comprise the schema for a complex Data Type in TOSCA"
+ type: map
+ mapping:
+ =:
+ *property_definition
+
+# see section A.6.6
+_capability_type_definition: &capability_type_definition
+ type: map
+ name: capability_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent capability type name this new Capability Type derives from."
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Capability Type"
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property definitions for the Capability Type."
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ attributes:
+ desc: "An optional list of attribute definitions for the Capability Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_definition
+ valid_source_types:
+ desc: "An optional list of one or more valid names of Node Types that are supported as valid sources of any relationship established to the declared Capability Type"
+ required: no
+ type: seq
+ sequence:
+ - type: str
+
+# section A.6.7 requirement definition: TOSCA YAML profile relies on capability types to
+# define requirements
+
+# see section A.6.9
+_relationship_type_definition: &relationship_type_definition
+ type: map
+ name: relationship_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent Relationship Type name the Relationship Type derives from"
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Relationship Type."
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property definitions for the Relationship Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ attributes:
+ desc: "An optional list of attribute definitions for the Relationship Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_definition
+ interfaces:
+ desc: "An optional list of interface definitions interfaces supported by the Relationship Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *type_interface_definition
+ valid_target_types:
+ desc: "An optional list of one or more names of Capability Types that are valid targets for this relationship. "
+ required: no
+ type: seq
+ sequence:
+ - type: str
+
+#see section 3.6.10
+_group_type_definition: &group_type_definition
+ type: map
+ name: group_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent Group Type name this new Group Type derives from"
+ required: no
+ type: str
+ version:
+ desc: "An optional version for the Group Type definition"
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Group Type"
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property definitions for the Group Type."
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ targets:
+ desc: "An optional list of one or more names of Node Types that are valid
+(allowed) as members of the Group Type."
+ required: no
+ type: seq
+ sequence:
+ - type: str
+ interfaces:
+ desc: "An optional list of interface definitions supported by the Group Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *type_interface_definition
+
+#see section 3.6.11
+_policy_type_definition: &policy_type_definition
+ type: map
+ name: policy_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent Policy Type name this new Policy Type derives from"
+ required: no
+ type: str
+ version:
+ desc: "An optional version for the Policy Type definition"
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Policy Type"
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property definitions for the Policy Type."
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ targets:
+ desc: "An optional list of valid Node Types or Group Types the Policy Type
+can be applied to"
+ required: no
+ type: seq
+ sequence:
+ - type: str
+
+# see section A.6.8
+_node_type_definition: &node_type_definition
+ type: map
+ name: node_type_definition
+ mapping:
+ derived_from:
+ desc: "An optional parent Node Type name this new Node Type derives from"
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Node Type"
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property definitions for the Node Type."
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ attributes:
+ desc: "An optional list of attribute definitions for the Node Type.
+"
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_definition
+ requirements:
+ desc: "An optional sequenced list of requirement definitions for the Node Type.
+"
+ required: no
+ type: seq
+ sequence:
+ - type: map
+ mapping:
+ =:
+ *requirement_definition
+ capabilities:
+ desc: "An optional list of capability definitions for the Node Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *capability_definition
+ interfaces:
+ desc: ""
+ required: no
+ type: map
+ mapping:
+ =:
+ *type_interface_definition
+ artifacts:
+ desc: "An optional list of named artifact definitions for the Node Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *artifact_definition
+
+# A.7 Template specific definitions
+
+# see section A.7.1
+_capability_assignment_definition: &capability_assignment_definition
+ type: map
+ name: capability_assignment_definition
+ mapping:
+ properties:
+ # list of property assignments
+ desc: "An optional list of property definitions for the Capability definition"
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: "a property value or an expression providing a property value"
+ name: property_assignment
+ type: any
+ attributes:
+ # list of attribute assignments
+ desc: "An optional list of attribute definitions for the Capability definition"
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: ""
+ name: attribute_assignment
+ type: any
+
+# see section A.7.2
+_requirement_assignment_definition: &requirement_assignment_definition
+ type: map
+ name: requirement_assignment_definition
+ short: node
+ mapping:
+ capability:
+ desc: " used to provide the name of either a: Capability definition within a target node template that can fulfill the requirement or Capability Type that the provider will use to select a type-compatible target node template to fulfill the requirement at runtime."
+ required: no
+ type: str
+ node:
+#why is this a reference to a node type and not to a node template??
+ desc: "used to identify the target node of a relationship: Node Template name that can fulfil the target node requirement or Node Type name that the provider will use to select a type-compatible node template to fulfil the requirement at runtime"
+ required: no
+ type: str
+ relationship:
+ desc: ""
+ required: no
+#fins a better name name: relationship_definition
+ type: map
+ short: type
+ mapping:
+ "type":
+ desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement assignments relationship keyname"
+ required: no
+ type: str
+ properties:
+ desc: ""
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: "a property value or an expression providing a property value"
+ name: property_assignment
+ type: any
+ interfaces:
+ desc: "from A.5.12.2.2, right?"
+ required: no
+ type: map
+ mapping:
+ =:
+ *template_interface_definition
+ node_filter:
+ desc: "The optional filter definition that TOSCA orchestrators or providers would use to select a type-compatible target node that can fulfill the associated abstract requirement at runtime."
+ required: no
+ <<: *node_filter_definition
+
+# see section A.7.3
+_node_template_definition: &node_template_definition
+ type: map
+ name: node_template_definition
+ mapping:
+ "type":
+ desc: "The required name of the Node Type the Node Template is based upon"
+ required: yes
+ type: str
+ description:
+ desc: "An optional description for the Node Template"
+ required: no
+ type: str
+ directives:
+ desc: "An optional list of directive values to provide processing instructions to orchestrators and tooling."
+ required: no
+ type: seq
+ sequence:
+ - type: str
+ properties:
+#custom check needs to be added: the value or expression providing the property value
+#needs to be compatible with the property definition
+ desc: "An optional list of property value assignments for the Node Template."
+ required: no
+ type: map
+ mapping:
+ =:
+ type: any
+ name: property_assignment
+ desc: "a property value or an expression providing a property value"
+ attributes:
+ desc: "An optional list of attribute value assignments for the Node Template"
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_assignment_definition
+ requirements:
+ desc: "An optional sequenced list of requirement assignments for the Node Template."
+ required: no
+ type: seq
+ sequence:
+ - type: map
+ mapping:
+ =:
+ *requirement_assignment_definition
+ capabilities:
+ desc: "An optional list of capability assignments for the Node Template."
+ required: no
+ type: map
+ mapping:
+ =:
+ *capability_assignment_definition
+ interfaces:
+ desc: "An optional list of named interface definitions for the Node Template"
+ required: no
+ type: map
+ mapping:
+ =:
+ *template_interface_definition
+ artifacts:
+ desc: "An optional list of named artifact definitions for the Node Template.
+"
+ required: no
+ type: map
+ mapping:
+ =:
+ *artifact_definition
+ node_filter:
+ desc: "The optional filter definition that TOSCA orchestrators would use to select the correct target node. This keyname is only valid if the directive has the value of 'selectable' set."
+ required: no
+ <<: *node_filter_definition
+ copy:
+ desc: "The optional (symbolic) name of another node template to copy into (all keynames and values) and use as a basis for this node template."
+ required: no
+ type: str
+
+# see section A.7.4
+_relationship_template_definition: &relationship_template_definition
+ type: map
+ name: relationship_template_definition
+ mapping:
+ "type":
+ desc: "The required name of the Relationship Type the Relationship Template is based upon"
+ required: yes
+ type: str
+ alias:
+ desc: "The optional name of a different Relationship Template definition whose values are (effectively) copied into the definition for this Relationship Template (prior to any other overrides)."
+ required: no
+ type: str
+ description:
+ desc: "An optional description for the Relationship Template"
+ required: no
+ type: str
+ properties:
+ desc: "An optional list of property assignments for the Relationship Template."
+ required: no
+ name: properties_assignment_validation
+ type: map
+ mapping:
+ =:
+ type: any
+#scalar
+ desc: "an expression providing a property value"
+ attributes:
+ desc: "An optional list of attribute value assignments for the Relationship Template"
+ required: no
+ name: attributes_assignment_validation
+ type: map
+ mapping:
+ =:
+ type: scalar
+ desc: "an expression providing an attribute value"
+ interfaces:
+ desc: "An optional list of named interface definitions for the Relationship Template ('augmentation' is allowed here)"
+ required: no
+ type: map
+ mapping:
+ =:
+ *template_interface_definition
+ copy:
+ desc: "The optional (symbolic) name of another relationship template to copy into (all keynames and values) and use as a basis for this relationship template."
+ required: no
+ type: str
+
+
+# see section 3.7.5
+_group_definition: &group_definition
+ type: map
+ name: group_definition
+ mapping:
+ "type":
+ desc: "The required name of the group type the group definition is based upon"
+ required: yes
+ type: str
+ description:
+ desc: "The optional description for the group definition"
+ required: no
+ properties:
+ desc: " represents the optional list of property assignments for the group definition that provide values for properties defined in its declared Group Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ type: any
+ name: property_assignment
+ targets:
+ desc: "contains the required list of one or more node template names (within the same topology template) that are members of this logical group"
+ required: yes
+ type: seq
+ sequence:
+ - type: str
+ interfaces:
+ desc: "represents the optional list of interface definitions for the group definition that augment those provided by its declared Group Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ *template_interface_definition
+
+# see section 3.7.6
+_policy_template_definition: &policy_template_definition
+ type: map
+ name: policy_definition
+ mapping:
+ "type":
+ desc: "The required name of the policy type the policy definition is based upon"
+ required: yes
+ type: str
+ description:
+ desc: "The optional description for the policy definition"
+ required: no
+ properties:
+ desc: "represents the optional list of property assignments for the policy definition that provide values for properties defined in its declared Policy Type"
+ required: no
+ type: map
+ mapping:
+ =:
+ type: any
+ name: property_assignment
+ targets:
+ desc: "represents the optional list of names of node templates or groups that the policy is to applied to"
+ required: no
+ type: seq
+ sequence:
+ - type: str
+
+# see section 3.8 Topology Template definition: defines the topology template of a cloud application.
+# described as a a reusable grammar as it can be a part of a service template definition
+_topology_template_definition: &topology_template_definition
+ type: map
+ name: topology_template_definition
+ mapping:
+ description:
+ desc: "a description of the topology template"
+ required: no
+ type: str
+ inputs:
+ desc: "definition of input parameters for the topology template"
+ name: inputs
+ required: no
+ type: map
+ mapping:
+ =:
+ *property_definition
+ node_templates:
+ desc: "definition of the node templates of the topology"
+ name: node_templates
+ required: no
+ type: map
+ mapping:
+ =:
+ *node_template_definition
+ relationship_templates:
+ desc: "definition of the relationship templates of the topology"
+ required: no
+ name: relationship_templates
+ type: map
+ mapping:
+ =:
+ *relationship_template_definition
+ outputs:
+ desc: "definition of output parameters for the topology template"
+ name: outputs
+ required: no
+ type: map
+ mapping:
+ =:
+ *attribute_assignment_definition
+ groups:
+ desc: "An optional list of Group definitions whose members are node templates defined within this same Topology Template"
+ name: groups
+ required: no
+ type: map
+ mapping:
+ =:
+ *group_definition
+ policies:
+ # see 8.2.3, initially the list is not described as sequenced but then the grammar shows it as such !?
+ desc: "An optional sequenced?? list of Policy definitions for the Topology Template."
+ name: policies
+ required: no
+ type: seq
+ sequence:
+ - type: map
+ mapping:
+ =:
+ *policy_template_definition
+ substitution_mappings:
+# one possible short-coming that is visible here is that the definition of the capability
+# and requirements mappings are given in the spec only with the short/inline version of a
+# YAML list/sequence, which cannot be enforced here ..
+ desc: " a description of the topology template"
+ name: substitution_mappings
+ required: no
+ type: map
+ mapping:
+ node_type:
+ desc: "node type name"
+ required: yes
+ type: str
+ capabilities:
+ desc: "map_of_capability_mappings_to_expose"
+ type: map
+ mapping:
+ =:
+ type: seq
+ sequence:
+ - type: str
+ requirements:
+ desc: "map_of_requirement_mapping_to_expose"
+ type: map
+ mapping:
+ =:
+ type: seq
+ sequence:
+ - type: str
+
+
+# see A.9 Service Template definition: A TOSCA Service Template (YAML) document contains
+# element definitions of building blocks for cloud application, or complete models of cloud applications.
+
+type: map
+name: service_template_definition
+mapping:
+ tosca_definitions_version:
+ desc: "Required TOSCA Definitions version string"
+ required: yes
+ type: str
+
+ tosca_default_namespace:
+ desc: "Optional. default namespace (for type schema)"
+ required: no
+ type: str
+
+ metadata:
+ desc: "Optional metadata keyname: value pairs"
+ name: metadata
+ required: no
+ type: map
+ mapping:
+ template_name:
+ desc: "Optional name of this service template"
+ required: no
+ type: str
+ template_author:
+ desc: "Optional author of this service template"
+ required: no
+ type: str
+ template_version:
+ desc: "Optional version of this service template"
+ required: no
+ type: str
+#to add, the spec says: "Optional list of domain or profile specific metadata keynames"
+
+ description:
+ desc: "Optional description of the definitions inside the file"
+ required: no
+ type: str
+
+ imports:
+ desc: "list of import statements for importing other definitions files"
+ name: imports
+ required: no
+ type: seq
+ sequence:
+ - type: str
+
+ dsl_definitions:
+ desc: "list of YAML alias anchors (or macros)"
+ name: dsl_definitions
+ required: no
+ type: map
+ mapping:
+ =:
+ desc: "some piece of valid yaml that makes the anchor/alias definition"
+ type: any
+ required: no
+
+ repositories:
+ desc: "list of external repository definitions which host TOSCA artifacts"
+ name: repositories
+ required: no
+ type: map
+ mapping:
+ =:
+ *repository_definition
+
+ data_types:
+ desc: "list of TOSCA datatype definitions"
+ name: data_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *data_type_definition
+
+ node_types:
+ desc: "list of node type definitions"
+ name: node_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *node_type_definition
+
+ capability_types:
+ desc: "list of capability type definitions"
+ name: capability_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *capability_type_definition
+
+ relationship_types:
+ desc: "list of relationship type definitions"
+ name: relationship_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *relationship_type_definition
+
+ artifact_types:
+ desc: "list of artifact type definitions"
+ name: artifact_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *artifact_type_definition
+
+ interface_types:
+ desc: "list of interface type definitions"
+ name: interface_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *interface_type_definition
+
+ group_types:
+ desc: "list of group type definitions"
+ name: group_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *group_type_definition
+
+ policy_types:
+ desc: "list of policy type definitions"
+ name: policy_types
+ required: no
+ type: map
+ mapping:
+ =:
+ *policy_type_definition
+
+ topology_template:
+ desc: "topology template definition of the cloud application or service"
+ required: no
+ <<: *topology_template_definition
diff --git a/dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml b/dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml
new file mode 100644
index 0000000..5ca7061
--- /dev/null
+++ b/dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml
@@ -0,0 +1,37 @@
+#_policy_type_storage_definition:
+# <<: *policy_type_definition
+# mapping:
+# <<: *policy_type_mapping
+# targets:
+# <<: *policy_type_targets
+# storage: inline
+
+/_data_type_definition/mapping/derived_from:
+ storage: none
+
+/_node_type_definition/mapping/derived_from:
+ storage: none
+
+/_capability_type_definition/mapping/derived_from:
+ storage: none
+/_capability_type_definition/mapping/valid_source_types:
+ storage: inline
+
+/_relationship_type_definition/mapping/derived_from:
+ storage: none
+/_relationship_type_definition/mapping/valid_target_types:
+ storage: inline
+
+/_policy_type_definition/mapping/derived_from:
+ storage: none
+/_policy_type_definition/mapping/targets:
+ storage: inline
+
+/_node_template_definition/mapping/type:
+ storage: none
+
+/_policy_template_definition/mapping/targets:
+ storage: inline
+
+/_policy_template_definition/mapping/type:
+ storage: none
diff --git a/dcaedt_catalog/pom.xml b/dcaedt_catalog/pom.xml
new file mode 100644
index 0000000..701d665
--- /dev/null
+++ b/dcaedt_catalog/pom.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+ http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>DCAE-DT-Catalog</artifactId>
+ <packaging>pom</packaging>
+ <name>DCAE DT Catalog</name>
+ <parent>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>dcae_dt_be_main</artifactId>
+ <version>1806.0.1-SNAPSHOT</version>
+</parent>
+ <modules>
+ <module>asdc</module>
+ <module>commons</module>
+ <module>db</module>
+ <module>api</module>
+ <module>service</module>
+ </modules>
+
+ <scm>
+ <connection>scm:git:https://github.research.att.com/ASC/ASC-Catalog/</connection>
+ <developerConnection>scm:git:https://github.research.att.com/ASC/ASC-Catalog/</developerConnection>
+ <tag>HEAD</tag>
+ <url>https://github.research.att.com/ASC/ASC-Catalog/</url>
+ </scm>
+</project>
diff --git a/dcaedt_catalog/service/README.md b/dcaedt_catalog/service/README.md
new file mode 100644
index 0000000..8607f95
--- /dev/null
+++ b/dcaedt_catalog/service/README.md
@@ -0,0 +1,4 @@
+ASC-Catalog
+===========
+
+This component implements the Catalog API for the ASC Platform.
diff --git a/dcaedt_catalog/service/pom.xml b/dcaedt_catalog/service/pom.xml
new file mode 100644
index 0000000..c41980d
--- /dev/null
+++ b/dcaedt_catalog/service/pom.xml
@@ -0,0 +1,80 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Catalog</artifactId>
+ <version>1806.0.1-SNAPSHOT</version>
+ </parent>
+ <artifactId>DCAE-DT-Catalog-Service</artifactId>
+ <packaging>jar</packaging>
+ <name>DCAE-DT Catalog Service</name>
+
+ <build>
+ <sourceDirectory>src/main/java</sourceDirectory>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>3.1</version>
+ <configuration>
+ <source>1.8</source>
+ <target>1.8</target>
+ <encoding>${project.build.sourceEncoding}</encoding>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-maven-plugin</artifactId>
+ <version>1.3.3.RELEASE</version>
+ <configuration>
+ <mainClass>org.onap.sdc.dcae.catalog.engine.CatalogEngine</mainClass>
+ </configuration>
+ <executions>
+ <execution>
+ <goals>
+ <goal>repackage</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-core</artifactId>
+ <version>4.3.5.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-web</artifactId>
+ <version>4.3.5.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-web</artifactId>
+ <version>1.4.1.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-webmvc</artifactId>
+ <version>4.3.5.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-autoconfigure</artifactId>
+ <version>1.4.1.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.onap.sdc.dcae</groupId>
+ <artifactId>DCAE-DT-Catalog-API</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogController.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogController.java
new file mode 100644
index 0000000..7b9e1a2
--- /dev/null
+++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogController.java
@@ -0,0 +1,594 @@
+/*
+ * AT&T - PROPRIETARY
+ * THIS FILE CONTAINS PROPRIETARY INFORMATION OF
+ * AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN
+ * ACCORDANCE WITH APPLICABLE AGREEMENTS.
+ *
+ * Copyright (c) 2015 AT&T Knowledge Ventures
+ * Unpublished and Not for Publication
+ * All Rights Reserved
+ */
+package org.onap.sdc.dcae.catalog.engine;
+/*
+ * THIS FILE CONTAINS PROPRIETARY INFORMATION OF
+ * AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN
+ * ACCORDANCE WITH APPLICABLE AGREEMENTS.
+ *
+ * Copyright (c) 2015 AT&T Knowledge Ventures
+ * Unpublished and Not for Publication
+ * All Rights Reserved
+ */
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+import static org.onap.sdc.dcae.catalog.Catalog.*;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.json.JSONObject;
+import org.onap.sdc.common.onaplog.OnapLoggerDebug;
+import org.onap.sdc.common.onaplog.OnapLoggerError;
+import org.onap.sdc.common.onaplog.Enums.LogLevel;
+import org.onap.sdc.dcae.catalog.Catalog;
+import org.onap.sdc.dcae.catalog.asdc.ASDCCatalog;
+import org.onap.sdc.dcae.catalog.commons.Future;
+import org.onap.sdc.dcae.catalog.commons.FutureHandler;
+import org.onap.sdc.dcae.composition.util.DcaeBeConstants;
+import org.onap.sdc.dcae.composition.util.SystemProperties;
+import org.json.JSONArray;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMethod;
+import org.springframework.web.bind.annotation.RestController;
+
+import org.springframework.web.context.request.async.DeferredResult;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+import org.springframework.web.bind.annotation.CrossOrigin;
+
+/**
+ * All requests body:
+ * {
+ * "id": optional request uuid,
+ * "timestamp": optional request timestamp,
+ * "catalog": optional catalog uri,
+ * "timeout": optional timeout - default 0 no time limit
+ * }
+ *
+ * All responses body:
+ * { "data": {},
+ * "error": {}
+ * }
+ *
+ * If a non-2xx reponse is provided and error occured at catalog engine processing level.
+ * If error has occured in data retrieval then the response error object is not empty.
+ *
+ * Available uris
+ * /catalog
+ * /elements : roots of the catalog; request body is optional but can specify a label under 'startingLabel'
+ * response contains items under 'data/elements'
+ * /{itemId}/elements : catalog descendants of the given item, possibly a mix of folders and items
+ * response contains items under 'data/elements'
+ * /lookup.by.name : lookup catalog entries by name.
+ The request body must contain a 'selector' entry with a 'name' criteria
+ * response contains items under 'data/elements'
+ * Example: '{"id":"5d0c1cf4-11aa-11e6-a148-3e1d05defe78","selector":{"name":"Firewall"}}'
+ * /lookup.by.annotation
+ The request body must contain a 'annotation' entry and it can have a 'selector' entry
+ * with a multiple annotation property criteria
+ * response contains items under 'data/elements'
+ * /lookup.by.model.property.value :
+ * The request must contain a "selector" entry as a JSONObject containing the selection criteria
+ * (property name with values) and desired output properties (null values). Example:
+ * "selector":{"att-part-number":"L-CSR-50M-APP-3Y",
+ * "management-option":"ATT",
+ * "vnf-type":null,
+ * "vendor-model":null}
+ * response contains items under 'data/elements'
+ * /referents : provides generic recommendations
+ * response contains items under 'data/elements'
+ * /{itemId}/referents : provides recommendations for the given item
+ * response contains items under 'data/elements'
+ * /{itemId}/model : retrieves the TOSCA model for the item with the given id
+ * response under 'data/model'
+ *
+ */
+
+
+@RestController
+//@RequestMapping(value="/catalog",method=RequestMethod.POST)
+@CrossOrigin(origins="*")
+//@ConfigurationProperties(prefix="catalogController")
+public class CatalogController {
+
+ private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
+ private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
+
+
+ @Autowired
+ private SystemProperties systemProperties;
+
+
+ private boolean enableCORS = false;
+ private URI defaultCatalog;
+ private static Map<URI, Catalog> catalogs = new HashMap<URI, Catalog>();
+
+
+ public void setDefaultCatalog(URI theUri) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "set default catalog at {}", theUri);
+ this.defaultCatalog = theUri;
+ }
+
+ public void setEnableCORS(boolean doEnable) {
+ this.enableCORS = doEnable;
+ }
+
+// @RequestMapping(value="/elements",method={RequestMethod.POST, RequestMethod.GET}, produces = "application/json")
+// public DeferredResult<CatalogResponse> items(@RequestBody(required=false) ItemsRequest theRequest) {
+//
+// final ItemsRequest request = (theRequest == null) ? ItemsRequest.EMPTY_REQUEST : theRequest;
+//
+// Catalog catalog = getCatalog(request.getCatalog());
+// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout());
+//
+// catalog.rootsByLabel(request.getStartingLabel())
+// //catalog.roots()
+// .setHandler(
+// new CatalogHandler<Folders>(request, result) {
+// public CatalogResponse handleData(Folders theFolders) {
+// JSONArray ja = new JSONArray();
+// if (theFolders != null) {
+// for (Folder folder : theFolders) {
+// ja.put(patchData(catalog, folder.data()));
+// }
+// }
+// CatalogResponse response = new CatalogResponse(this.request);
+// response.data()
+// .put("elements", ja);
+// return response;
+// }
+// });
+// return result;
+// }
+//
+// @RequestMapping(value="/{theItemId}/elements",method={RequestMethod.POST,RequestMethod.GET}, produces = "application/json")
+// public DeferredResult<CatalogResponse> items(@RequestBody(required=false) ItemsRequest theRequest, @PathVariable String theItemId) {
+//
+// final ItemsRequest request = (theRequest == null) ? ItemsRequest.EMPTY_REQUEST : theRequest;
+//
+// Catalog catalog = getCatalog(request.getCatalog());
+// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout());
+//
+// catalog
+//// .fetchFolderByItemId(theItemId)
+// .folder(theItemId)
+// .withParts()
+// .withPartAnnotations()
+// .withItems()
+// .withItemAnnotations()
+// .withItemModels()
+// .execute()
+// .setHandler(
+// new CatalogHandler<Folder>(request, result) {
+// public CatalogResponse handleData(Folder theFolder) {
+// CatalogResponse response = new CatalogResponse(this.request);
+// if (theFolder == null) {
+// return response;
+// }
+//
+// try {
+// Elements folders = theFolder.elements("parts",Folders.class);
+// if (folders != null) {
+// for (Object folder: folders) {
+// patchData(catalog, ((Element)folder).data());
+// //lots of ephemere proxies created here ..
+// Elements annotations =
+// ((Element)folder).elements("annotations", Annotations.class);
+// if (annotations != null) {
+// for (Object a: annotations) {
+// patchData(catalog, ((Annotation)a).data());
+// }
+// }
+// }
+// }
+// Elements items = theFolder.elements("items",Items.class);
+// if (items != null) {
+// for (Object i: items) {
+// patchData(catalog, ((Element)i).data());
+// //lots of ephemere proxies created here ..
+// Elements annotations =
+// ((Element)i).elements("annotations", Annotations.class);
+// if (annotations != null) {
+// for (Object a: annotations){
+// patchData(catalog, ((Annotation)a).data());
+// }
+// }
+// }
+// }
+// }
+// catch(Exception x) {
+//x.printStackTrace();
+// return new CatalogError(this.request, "", x);
+// }
+//
+// response.data()
+// .put("element", theFolder.data());
+// return response;
+// }
+// });
+//
+// return result;
+// }
+//
+// @RequestMapping(value="/lookup.by.name",method=RequestMethod.POST, produces = "application/json")
+// public DeferredResult<CatalogResponse> elementsByName(@RequestBody ElementsLookup theRequest) {
+//
+// Catalog catalog = getCatalog(theRequest.getCatalog());
+// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(theRequest.getTimeout());
+//
+// catalog
+// .lookup(new JSONObject(theRequest.getSelector()))
+// .setHandler(
+// new CatalogHandler<Mixels>(theRequest, result) {
+// public CatalogResponse handleData(Mixels theElems) {
+// JSONArray ja = new JSONArray();
+// if (theElems != null) {
+// for (Object elem : theElems) {
+// ja.put(patchData(catalog, ((Element)elem).data()));
+// }
+// }
+// CatalogResponse response = new CatalogResponse(theRequest);
+// response.data()
+// .put("elements", ja);
+// return response;
+// }
+// });
+//
+// return result;
+// }
+//
+// @RequestMapping(value="/lookup.by.annotation",method=RequestMethod.POST, produces = "application/json")
+// public DeferredResult<CatalogResponse> elementsByAnnotation(@RequestBody ElementsLookup theRequest) {
+//
+// Catalog catalog = getCatalog(theRequest.getCatalog());
+// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(theRequest.getTimeout());
+//
+// catalog
+// .lookup(theRequest.getAnnotation(),
+// new JSONObject(theRequest.getSelector()))
+// .setHandler(
+// new CatalogHandler<Mixels>(theRequest, result) {
+// public CatalogResponse handleData(Mixels theElems) {
+// JSONArray ja = new JSONArray();
+// if (theElems != null) {
+// for (Object elem : theElems) {
+// ja.put(patchData(catalog, ((Element)elem).data()));
+// }
+// }
+// CatalogResponse response = new CatalogResponse(this.request);
+// response.data()
+// .put("elements", ja);
+// return response;
+// }
+// });
+//
+// return result;
+// }
+
+ /**
+ * NeoCatalog specific
+ *//*
+ @RequestMapping(value="/lookup.by.model.property.value",method=RequestMethod.POST, produces = "application/json")
+ public DeferredResult<CatalogResponse> elementsByModelPropertyValue(@RequestBody ElementsLookup theRequest) {
+
+ DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(theRequest.getTimeout());
+
+ NeoCatalog catalog = asNeo(getCatalog(theRequest.getCatalog()));
+ if (catalog == null) {
+ result.setErrorResult(
+ new CatalogError(
+ theRequest,"The selected catalog is not capable of handling this request (lookup.by.model.property.value)"));
+ return result;
+ }
+
+ catalog
+ .lookupItemsByToscaNodePropertyValue(theRequest.getJSONSelector())
+ .setHandler(
+ new CatalogHandler<Items>(theRequest, result) {
+ public CatalogResponse handleData(Items theItems) {
+ JSONArray ja = new JSONArray();
+ if (theItems != null) {
+ for (Item item : theItems) {
+ ja.put(patchData(catalog, item.data()));
+ }
+ }
+ CatalogResponse response = new CatalogResponse(this.request);
+ response.data()
+ .put("elements", ja);
+ return response;
+ }
+ });
+
+ return result;
+ }
+*/
+ /**
+ * This follows the current convention that each item will have a single model
+ 2 stage
+ */
+// @RequestMapping(value="/{theItemId}/model",method={RequestMethod.POST,RequestMethod.GET}, produces = "application/json")
+// //public DeferredResult<CatalogResponse> model(@RequestBody ElementRequest theRequest) {
+// public DeferredResult<CatalogResponse> model(@RequestBody(required=false) ElementRequest theRequest, @PathVariable String theItemId) {
+// final ElementRequest request = (theRequest == null) ? ElementRequest.EMPTY_REQUEST : theRequest;
+//
+// Catalog catalog = getCatalog(request.getCatalog());
+// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout());
+//
+// catalog
+//// .fetchItemByItemId(/*theRequest.getProductId()*/theItemId)
+// .item(theItemId)
+// .withModels()
+// .execute()
+// .setHandler(
+// new CatalogHandler<Item>(request, result) {
+// public CatalogResponse handleData(Item theItem) {
+// if (theItem == null) {
+// return new CatalogError(this.request, "No such item");
+// }
+// Templates models = null;
+// try {
+// models = (Templates)theItem.elements("models", Templates.class);
+// }
+// catch (Exception x) {
+// return new CatalogError(this.request, "Failed to decode templates from result", x);
+// }
+//
+// if (models == null || models.size() == 0) {
+// return new CatalogError(this.request, "Item has no models");
+// }
+// if (models.size() > 1) {
+// return new CatalogError(this.request, "Item has more than one model !?");
+// }
+// try{
+// catalog.template(models.get(0).id())
+// .withInputs()
+// .withOutputs()
+// .withNodes()
+// .withNodeProperties()
+// .withNodePropertiesAssignments()
+// .withNodeRequirements()
+// .withNodeCapabilities()
+// .withNodeCapabilityProperties()
+// .withNodeCapabilityPropertyAssignments()
+// .withPolicies()
+// .withPolicyProperties()
+// .withPolicyPropertiesAssignments()
+// .execute()
+// .setHandler(
+// new CatalogHandler<Template>(this.request, this.result) {
+// public CatalogResponse handleData(Template theTemplate) {
+// CatalogResponse response = new CatalogResponse(this.request);
+// if (theTemplate != null) {
+// response.data()
+// .put("model", patchData(catalog, theTemplate.data()));
+// }
+// return response;
+// }
+// });
+// }
+// catch (Exception x) {
+// x.printStackTrace();
+// }
+// return null;
+// }
+// });
+//
+// return result;
+// }
+
+// @RequestMapping(value="/{theItemId}/type/{theTypeName}",method={RequestMethod.POST,RequestMethod.GET}, produces = "application/json")
+// public DeferredResult<CatalogResponse> model(@RequestBody(required=false) ElementRequest theRequest, @PathVariable String theItemId, @PathVariable String theTypeName) {
+// final ElementRequest request = (theRequest == null) ? ElementRequest.EMPTY_REQUEST : theRequest;
+//
+// Catalog catalog = getCatalog(request.getCatalog());
+// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout());
+//
+// catalog.type(theItemId, theTypeName)
+// .withHierarchy()
+// .withCapabilities()
+// .withRequirements()
+// .execute()
+// .setHandler(
+// new CatalogHandler<Type>(request, result) {
+// public CatalogResponse handleData(Type theType) {
+// CatalogResponse response = new CatalogResponse(this.request);
+// if (theType != null) {
+// response.data()
+// .put("type", patchData(catalog, theType.data()));
+// }
+// return response;
+// }
+// });
+//
+// return result;
+// }
+
+/*
+ @RequestMapping(value="/referents",method=RequestMethod.POST, produces = "application/json")
+ public DeferredResult<CatalogResponse> referents(@RequestBody(required=false) ElementRequest theRequest) {
+ final ElementRequest request = (theRequest == null) ? ElementRequest.EMPTY_REQUEST : theRequest;
+ DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout());
+
+ NeoCatalog catalog = asNeo(getCatalog(theRequest.getCatalog()));
+ if (catalog == null) {
+ result.setErrorResult(
+ new CatalogError(
+ theRequest,"The selected catalog is not capable of handling this request (referents)"));
+ return result;
+ }
+
+ catalog
+ .defaultRecommendations()
+ .setHandler(
+ new CatalogHandler<Mixels>(request, result) {
+ public CatalogResponse handleData(Mixels theElems) {
+ JSONArray ja = new JSONArray();
+ if (theElems != null) {
+ for (Element elem : theElems) {
+ ja.put(patchData(catalog, elem.data()));
+ }
+ }
+ CatalogResponse response = new CatalogResponse(this.request);
+ response.data()
+ .put("elements", ja);
+ return response;
+ }
+ });
+
+ return result;
+ }
+*/
+
+/* @RequestMapping(value="/{theItemId}/referents",method=RequestMethod.POST, produces = "application/json")
+ public DeferredResult<CatalogResponse> referents(@RequestBody(required=false) ElementRequest theRequest, @PathVariable String theItemId) {
+ final ElementRequest request = (theRequest == null) ? ElementRequest.EMPTY_REQUEST : theRequest;
+ DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout());
+
+ NeoCatalog catalog = asNeo(getCatalog(theRequest.getCatalog()));
+ if (catalog == null) {
+ result.setErrorResult(
+ new CatalogError(
+ theRequest,"The selected catalog is not capable of handling this request (item referents)"));
+ return result;
+ }
+
+ catalog
+ .recommendationsForItemId(theItemId)
+ .setHandler(
+ new CatalogHandler<Mixels>(request, result) {
+ public CatalogResponse handleData(Mixels theElems) {
+ JSONArray ja = new JSONArray();
+ if (theElems != null) {
+ for (Element elem : theElems) {
+ ja.put(patchData(catalog, elem.data()));
+ }
+ }
+ CatalogResponse response = new CatalogResponse(this.request);
+ response.data()
+ .put("elements", ja);
+ return response;
+ }
+ });
+
+ return result;
+ }
+*/
+ @PostConstruct
+ public void initCatalog() {
+ // Dump some info and construct our configuration objects
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "initCatalog");
+
+ this.defaultCatalog = URI.create(systemProperties.getProperties().getProperty(DcaeBeConstants.Config.ASDC_CATALOG_URL));
+ // Initialize default catalog connection
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "default catalog at {}", this.defaultCatalog);
+ getCatalog(null);
+
+ // Done
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "CatalogEngine started");
+ }
+
+ @PreDestroy
+ public void cleanupCatalog() {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "destroyCatalog");
+ }
+
+ public Catalog getCatalog(URI theCatalogUri) {
+ //TODO: Thread safety! Check catalog is alive!
+ if (theCatalogUri == null)
+ theCatalogUri = this.defaultCatalog;
+
+ Catalog cat = catalogs.get(theCatalogUri);
+ if (cat == null && theCatalogUri != null) {
+ String scheme = theCatalogUri.getScheme();
+ URI catalogUri = null;
+ try {
+ catalogUri = new URI(theCatalogUri.getSchemeSpecificPart() + "#" + theCatalogUri.getFragment());
+ }
+ catch (URISyntaxException urisx) {
+ throw new IllegalArgumentException("Invalid catalog reference '" + theCatalogUri.getSchemeSpecificPart() + "'");
+ }
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Build catalog for {}", catalogUri);
+
+ if ("asdc".equals(scheme)) {
+ cat = new ASDCCatalog(catalogUri);
+ }
+ else {
+ return null;
+ }
+
+ catalogs.put(theCatalogUri, cat);
+ }
+ return cat;
+ }
+
+/* private NeoCatalog asNeo(Catalog theCatalog) {
+ try {
+ return (NeoCatalog)theCatalog;
+ }
+ catch (ClassCastException ccx) {
+ return null;
+ }
+ }*/
+
+ public JSONObject patchData(Catalog theCat, JSONObject theData) {
+ theData.put("catalog", theCat.getUri());
+ theData.put("catalogId", theData.optLong("id"));
+ theData.put("id", theData.optLong("itemId"));
+ return theData;
+ }
+
+ public abstract class CatalogHandler<T> implements FutureHandler<T> {
+
+ protected DeferredResult result;
+ protected CatalogRequest request;
+
+ public CatalogHandler(CatalogRequest theRequest, DeferredResult theResult) {
+ this.request = theRequest;
+ this.result = theResult;
+ }
+
+ public abstract CatalogResponse handleData(T theData);
+
+ //@Override
+ public void handle(Future<T> theEvent) {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "handle");
+
+ if (this.result.isSetOrExpired()) {
+ debugLogger.log(LogLevel.WARN, this.getClass().getName(), "handle, Data is late");
+ return;
+ }
+
+ if (theEvent.failed()) {
+ this.result.setErrorResult(new CatalogError(this.request, "Catalog API failed", theEvent.cause()));
+ }
+ else {
+ debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "handle, got: {}", theEvent.result());
+ CatalogResponse response = handleData(theEvent.result());
+ //a null result allows the handler to pass the processing onto some other async processing stage
+ if (response != null) {
+ if (!this.result.setResult(response)) {
+ this.result.setErrorResult(new CatalogError(this.request, "Catalog API call succesful but late"));
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogEngine.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogEngine.java
new file mode 100644
index 0000000..042798f
--- /dev/null
+++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogEngine.java
@@ -0,0 +1,26 @@
+package org.onap.sdc.dcae.catalog.engine;
+
+import org.onap.sdc.dcae.catalog.engine.CatalogEngine;
+import org.springframework.boot.SpringApplication;
+import org.springframework.context.ApplicationContext;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.ImportResource;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+
+import java.util.Arrays;
+
+
+
+@SpringBootApplication
+
+public class CatalogEngine {
+
+ public static void main(String[] args) {
+
+ SpringApplication.run(CatalogEngine.class, args);
+ }
+
+}
diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogError.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogError.java
new file mode 100644
index 0000000..0c7c418
--- /dev/null
+++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogError.java
@@ -0,0 +1,20 @@
+package org.onap.sdc.dcae.catalog.engine;
+
+import org.onap.sdc.dcae.catalog.engine.CatalogRequest;
+import org.onap.sdc.dcae.catalog.engine.CatalogResponse;
+
+/**
+ */
+public class CatalogError extends CatalogResponse {
+
+ public CatalogError(CatalogRequest theRequest, String theMessage) {
+ super(theRequest);
+ error().put("message", theMessage);
+ }
+
+ public CatalogError(CatalogRequest theRequest, String theMessage, Throwable theError) {
+ super(theRequest);
+ error().put("message", theMessage)
+ .put("exception", theError.toString());
+ }
+}
diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogMessage.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogMessage.java
new file mode 100644
index 0000000..aee475b
--- /dev/null
+++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogMessage.java
@@ -0,0 +1,27 @@
+package org.onap.sdc.dcae.catalog.engine;
+
+import java.util.UUID;
+
+public class CatalogMessage {
+
+ private UUID id;
+ private long timestamp = 0;
+
+
+ public void setId(UUID theId) {
+ this.id = theId;
+ }
+
+ public UUID getId() {
+ return this.id;
+ }
+
+ public void setTimestamp(long theTimestamp) {
+ this.timestamp = theTimestamp;
+ }
+
+ public long getTimestamp() {
+ return this.timestamp;
+ }
+
+}
diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogRequest.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogRequest.java
new file mode 100644
index 0000000..ad6caca
--- /dev/null
+++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogRequest.java
@@ -0,0 +1,27 @@
+package org.onap.sdc.dcae.catalog.engine;
+
+import java.net.URI;
+
+import org.onap.sdc.dcae.catalog.engine.CatalogMessage;
+
+public class CatalogRequest extends CatalogMessage {
+
+ private URI catalog;
+ private long timeout = 0;
+
+ public void setCatalog(URI theCatalogUri) {
+ this.catalog = theCatalogUri;
+ }
+
+ public URI getCatalog() {
+ return this.catalog;
+ }
+
+ public void setTimeout(long theTimeout) {
+ this.timeout = theTimeout;
+ }
+
+ public long getTimeout() {
+ return this.timeout;
+ }
+}
diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogResponse.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogResponse.java
new file mode 100644
index 0000000..2f9913f
--- /dev/null
+++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogResponse.java
@@ -0,0 +1,39 @@
+package org.onap.sdc.dcae.catalog.engine;
+
+
+import com.fasterxml.jackson.annotation.JsonRawValue;
+
+import org.json.JSONObject;
+import org.onap.sdc.dcae.catalog.engine.CatalogMessage;
+import org.onap.sdc.dcae.catalog.engine.CatalogRequest;
+
+/**
+ */
+public class CatalogResponse extends CatalogMessage {
+
+ private JSONObject data = new JSONObject(),
+ error = new JSONObject();
+
+ public CatalogResponse(CatalogRequest theRequest) {
+ setId(theRequest.getId());
+ setTimestamp(theRequest.getTimestamp());
+ }
+
+ public JSONObject data() {
+ return this.data;
+ }
+
+ @JsonRawValue
+ public String getData() {
+ return this.data.toString();
+ }
+
+ public JSONObject error() {
+ return this.error;
+ }
+
+ @JsonRawValue
+ public String getError() {
+ return this.error.toString();
+ }
+}
diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementRequest.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementRequest.java
new file mode 100644
index 0000000..87d532f
--- /dev/null
+++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementRequest.java
@@ -0,0 +1,6 @@
+package org.onap.sdc.dcae.catalog.engine;
+
+public class ElementRequest extends CatalogRequest {
+
+ public static final ElementRequest EMPTY_REQUEST = new ElementRequest();
+}
diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementsLookup.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementsLookup.java
new file mode 100644
index 0000000..756be89
--- /dev/null
+++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementsLookup.java
@@ -0,0 +1,49 @@
+package org.onap.sdc.dcae.catalog.engine;
+
+import java.util.Map;
+import java.util.Collections;
+
+import org.json.JSONObject;
+import org.onap.sdc.dcae.catalog.engine.CatalogRequest;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+
+/**
+ */
+public class ElementsLookup extends CatalogRequest {
+
+ private String annotation;
+ private Map<String,Object> selector;
+
+ public void setAnnotation(String theAnnon) {
+ this.annotation = theAnnon;
+ }
+
+ public String getAnnotation() {
+ return this.annotation;
+ }
+
+ public Map<String,Object> getSelector() {
+ return this.selector == null ? Collections.EMPTY_MAP : this.selector;
+ }
+
+ public void setSelector(Map<String,Object> theSelector) {
+ this.selector = theSelector;
+ }
+
+ public Object getSelectorEntry(String theName) {
+ return getSelector().get(theName);
+ }
+
+ /**
+ * Because the JSONObject(Map) constructor would not copy entries wth null values.
+ */
+ @JsonIgnore
+ public JSONObject getJSONSelector() {
+ JSONObject jsonSelector = new JSONObject();
+ for (Map.Entry<String, Object> entry: this.selector.entrySet()) {
+ jsonSelector.put(entry.getKey(), entry.getValue() != null ? entry.getValue() : JSONObject.NULL);
+ }
+ return jsonSelector;
+ }
+}
diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ItemsRequest.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ItemsRequest.java
new file mode 100644
index 0000000..9215282
--- /dev/null
+++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ItemsRequest.java
@@ -0,0 +1,16 @@
+package org.onap.sdc.dcae.catalog.engine;
+
+public class ItemsRequest extends CatalogRequest {
+
+ public static final ItemsRequest EMPTY_REQUEST = new ItemsRequest("Superportfolio");
+
+ private String startingLabel;
+
+ private ItemsRequest(String theLabel) {
+ this.startingLabel = theLabel;
+ }
+
+ public String getStartingLabel() {
+ return this.startingLabel == null ? "Superportfolio" : this.startingLabel;
+ }
+}
diff --git a/dcaedt_catalog/service/src/main/resources/log4j.properties b/dcaedt_catalog/service/src/main/resources/log4j.properties
new file mode 100644
index 0000000..e732166
--- /dev/null
+++ b/dcaedt_catalog/service/src/main/resources/log4j.properties
@@ -0,0 +1,15 @@
+#
+#
+#
+#
+
+
+log4j.rootLogger=DEBUG, stdout
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5p %l- %m%n
+
+log4j.logger.org.vertx=INFO
+log4j.logger.com.hazelcast=DEBUG
+log4j.logger.io.netty=WARN