summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorgfraboni <gino.fraboni@amdocs.com>2017-09-19 13:25:30 -0400
committerGino Fraboni <gino.fraboni@amdocs.com>2017-09-21 20:19:44 +0000
commitbe779fa2d7f44b511bde929582b6340c650d24cc (patch)
tree8fe925e696ab96aaa368fd5ca521eefecc124d2b
parentc66ca602f26e66145091d529ef970be27fb766d9 (diff)
Add Consul support to 'OneClick' deployment.
The OOM is moving to using Consul to provide health checks for all ONAP components. This push adds deployment of a 3 node Consul Server cluster and a single Consul agent to the 'OneClick' deployment. As a first step, health check scripts for the A&AI microservices have also been included. Support for additional ONAP components will follow. Issue-ID: OOM-86 Change-Id: Ib63f3d8b1b745551c9ec55b6529d022b32006e9c Signed-off-by: gfraboni <gino.fraboni@amdocs.com>
-rwxr-xr-xkubernetes/config/docker/init/config-init.sh4
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-data-router-health.json14
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-model-loader-health.json14
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-search-data-service-health.json33
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-services-health.json53
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-sparky-be-health.json14
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-tabular-backend-health.json14
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/client-cert-onap.crt.pem25
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/client-cert-onap.key.pem32
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/kubectlbin0 -> 72337373 bytes
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/model-loader.properties23
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-doc.txt9
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-script.sh17
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/data-router-script.sh16
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/gremlin-script.sh16
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/model-loader-script.sh16
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/search-data-service-availability.sh47
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sparky-be-script.sh16
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/tabular-db-availability.sh20
-rw-r--r--kubernetes/consul/Chart.yaml4
-rw-r--r--kubernetes/consul/templates/consul-agent-deployment.yaml29
-rw-r--r--kubernetes/consul/templates/consul-server-deployment.yaml30
-rw-r--r--kubernetes/consul/templates/consul-server-service.yaml22
-rw-r--r--kubernetes/consul/values.yaml7
-rwxr-xr-xkubernetes/oneclick/createAll.bash2
-rw-r--r--kubernetes/oneclick/setenv.bash2
26 files changed, 477 insertions, 2 deletions
diff --git a/kubernetes/config/docker/init/config-init.sh b/kubernetes/config/docker/init/config-init.sh
index 9217d2cd68..29167bf844 100755
--- a/kubernetes/config/docker/init/config-init.sh
+++ b/kubernetes/config/docker/init/config-init.sh
@@ -51,6 +51,9 @@ mkdir -p /config-init/$NAMESPACE/aai/data-router/logs/
mkdir -p /config-init/$NAMESPACE/mso/mariadb/data
mkdir -p /config-init/$NAMESPACE/clamp/mariadb/data
mkdir -p /config-init/$NAMESPACE/log/elasticsearch/data
+mkdir -p /config-init/$NAMESPACE/consul/consul-agent-config/bin
+mkdir -p /config-init/$NAMESPACE/consul/consul-agent-config/scripts
+mkdir -p /config-init/$NAMESPACE/consul/consul-server-config
echo "Setting permissions to container writeable directories"
chmod -R 777 /config-init/$NAMESPACE/sdc/logs/
@@ -73,6 +76,7 @@ echo "Substituting configuration parameters"
# replace the default 'onap' namespace qualification of K8s hostnames within the config files
find /config-init/$NAMESPACE/ -type f -exec sed -i -e "s/\.onap-/\.$NAMESPACE-/g" {} \;
+find /config-init/$NAMESPACE/ -type f -exec sed -i -e "s/kubectl -n onap/kubectl -n $NAMESPACE/g" {} \;
# set the ubuntu 14 image
find /config-init/$NAMESPACE/ -type f -exec sed -i -e "s/UBUNTU_14_IMAGE_NAME_HERE/$OPENSTACK_UBUNTU_14_IMAGE/g" {} \;
# set the openstack public network uuid
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-data-router-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-data-router-health.json
new file mode 100644
index 0000000000..a60203694a
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-data-router-health.json
@@ -0,0 +1,14 @@
+{
+ "service": {
+ "name": "A&AI Synapse Data Routing Service",
+ "checks": [
+ {
+ "id": "data-router-process",
+ "name": "Synapse Presence",
+ "script": "/consul/config/scripts/data-router-script.sh",
+ "interval": "15s",
+ "timeout": "1s"
+ }
+ ]
+ }
+}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-model-loader-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-model-loader-health.json
new file mode 100644
index 0000000000..4e2e305afd
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-model-loader-health.json
@@ -0,0 +1,14 @@
+{
+ "service": {
+ "name": "A&AI Model Loader",
+ "checks": [
+ {
+ "id": "model-loader-process",
+ "name": "Model Loader Presence",
+ "script": "/consul/config/scripts/model-loader-script.sh",
+ "interval": "15s",
+ "timeout": "1s"
+ }
+ ]
+ }
+}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-search-data-service-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-search-data-service-health.json
new file mode 100644
index 0000000000..c74fe8af26
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-search-data-service-health.json
@@ -0,0 +1,33 @@
+{
+ "service": {
+ "name": "A&AI Search Data Service",
+ "checks": [
+ {
+ "id": "elasticsearch",
+ "name": "Search Data Service Document Store",
+ "http": "http://elasticsearch.onap-aai:9200/_cat/indices?v",
+ "interval": "15s",
+ "timeout": "1s"
+ },
+ {
+ "id": "elasticsearch-write-health",
+ "name": "Search Data Service Document Store Write Test",
+ "script": "/consul/config/scripts/aai-search-storage-write-script.sh",
+ "interval": "60s"
+ },
+ {
+ "id": "search-data-service-availability",
+ "name": "Search Data Service Availability",
+ "script": "curl -k --cert /consul/config/bin/client-cert-onap.crt.pem --cert-type PEM --key /consul/config/bin/client-cert-onap.key.pem --key-type PEM https://search-data-service.onap-aai:9509/services/search-data-service/v1/jaxrsExample/jaxrs-services/echo/up 2>&1 | grep 'Up'",
+ "interval": "15s"
+ },
+ {
+ "id": "search-data-service-api",
+ "name": "Search Data Service Operational Test",
+ "script": "/consul/config/scripts/search-data-service-availability.sh",
+ "interval": "15s",
+ "timeout": "1s"
+ }
+ ]
+ }
+}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-services-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-services-health.json
new file mode 100644
index 0000000000..35f9371e8d
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-services-health.json
@@ -0,0 +1,53 @@
+{
+ "service": {
+ "name": "Active and Available Inventory",
+ "checks": [
+ {
+ "id": "aai-service",
+ "name": "Core A&AI",
+ "http": "https://aai-service.onap-aai:8443/aai/util/echo",
+ "header": {
+ "Authorization": ["Basic QUFJOkFBSQ=="],
+ "X-TransactionId": ["ConsulHealthCheck"],
+ "X-FromAppId": ["healthcheck"]
+ },
+ "tls_skip_verify": true,
+ "interval": "15s",
+ "timeout": "1s"
+ },
+ {
+ "id": "aai-resources",
+ "name": "Resources Microservice",
+ "http": "https://aai-resources.onap-aai:8447/aai/util/echo",
+ "header": {
+ "Authorization": ["Basic QUFJOkFBSQ=="],
+ "X-TransactionId": ["ConsulHealthCheck"],
+ "X-FromAppId": ["healthcheck"]
+ },
+ "tls_skip_verify": true,
+ "interval": "15s",
+ "timeout": "1s"
+ },
+ {
+ "id": "aai-traversal",
+ "name": "Traversal Microservice",
+ "http": "https://aai-traversal.onap-aai:8446/aai/util/echo",
+ "header": {
+ "Authorization": ["Basic QUFJOkFBSQ=="],
+ "X-TransactionId": ["ConsulHealthCheck"],
+ "X-FromAppId": ["healthcheck"]
+ },
+ "tls_skip_verify": true,
+ "interval": "15s",
+ "timeout": "1s"
+ },
+ {
+ "id": "gremlin-server",
+ "name": "Graph Data Store",
+ "script": "/consul/config/scripts/gremlin-script.sh",
+ "interval": "15s",
+ "timeout": "1s"
+ }
+ ]
+ }
+}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-sparky-be-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-sparky-be-health.json
new file mode 100644
index 0000000000..6af58dbf4f
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-sparky-be-health.json
@@ -0,0 +1,14 @@
+{
+ "service": {
+ "name": "A&AI UI Backend Service",
+ "checks": [
+ {
+ "id": "sparky-be-process",
+ "name": "UI Backend Presence",
+ "script": "/consul/config/scripts/sparky-be-script.sh",
+ "interval": "15s",
+ "timeout": "1s"
+ }
+ ]
+ }
+}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-tabular-backend-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-tabular-backend-health.json
new file mode 100644
index 0000000000..f76b33b3d8
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-tabular-backend-health.json
@@ -0,0 +1,14 @@
+{
+ "service": {
+ "name": "A&AI Tabular Data Store",
+ "checks": [
+ {
+ "id": "tabular-backend",
+ "name": "Tabular Data Store Operational Test",
+ "script": "/consul/config/scripts/tabular-db-availability.sh",
+ "interval": "15s",
+ "timeout": "1s"
+ }
+ ]
+ }
+}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/client-cert-onap.crt.pem b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/client-cert-onap.crt.pem
new file mode 100644
index 0000000000..5696aa3570
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/client-cert-onap.crt.pem
@@ -0,0 +1,25 @@
+Bag Attributes
+ friendlyName: tomcat
+ localKeyID: 54 69 6D 65 20 31 34 39 33 33 32 33 39 32 32 37 35 31
+subject=/C=CA/ST=Ontario/L=Ottawa/O=ONAP/OU=ONAP/CN=ONAP
+issuer=/C=CA/ST=Ontario/L=Ottawa/O=ONAP/OU=ONAP/CN=ONAP
+-----BEGIN CERTIFICATE-----
+MIIDWTCCAkGgAwIBAgIERWHcIzANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJD
+QTEQMA4GA1UECBMHT250YXJpbzEPMA0GA1UEBxMGT3R0YXdhMQ0wCwYDVQQKEwRP
+TkFQMQ0wCwYDVQQLEwRPTkFQMQ0wCwYDVQQDEwRPTkFQMB4XDTE3MDQyNzIwMDUz
+N1oXDTM3MDExMjIwMDUzN1owXTELMAkGA1UEBhMCQ0ExEDAOBgNVBAgTB09udGFy
+aW8xDzANBgNVBAcTBk90dGF3YTENMAsGA1UEChMET05BUDENMAsGA1UECxMET05B
+UDENMAsGA1UEAxMET05BUDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AJsQpjB5U0exZHWKVt6xDzmBBhLiAtv7Qb8zsbAcIZPxuKsieOJykWDCaf+Ip7oe
++b86nf4LmKrNm4KMsDNnlU7Bg7+3HFa7m+tZgfILORv2HPMRXgvcqPFr1dxgTBkp
+xtlcGXHhA8oBpmqTmOCitE+ngVH+FBVxN93aHEDz+Dgc06PyzoP/xWI0GjvlOsv/
+qZeXCj6K4Hpu/FSPNk06Piq9M+rDwUMuyaRtY9FWjYMvkMCrRvlZUoAasrC0BGyR
+UAboHdk5aW3AZ0cVR6NMSlELcvCUFqzacAOWLgffX3b5vhkOaAsmnnzmxANV6s0t
+SqrD6Mmjg5OcYJW4VFKrwjUCAwEAAaMhMB8wHQYDVR0OBBYEFNji+IU70Qgptn4i
+boq/rOKNAg8tMA0GCSqGSIb3DQEBCwUAA4IBAQBc5mJLeeUUzJ4MujZjn0DS3Lvv
+THJTE54Id1euT3ddzfX3htF0Ewd90YzmLuj1y8r8PXj7b/8Bq+cvoKbmJ42c8h3X
+If0tqde+gYWx1X3NAWHwz00Cje9R0KY4Bx1Cvr39jTw/ESnuSQDKPHBnn8WyAS9K
+08ZhvrVSK54d3U7tDVut9UVva8Scdi12utTAWaOIlusLo3bU9Z6t+tgg7AnQBYc0
+N9oCMbq/MACFlLSdc1J6NITYS8XHY2RS8u88eLbWkCcEEx1glYz/PMX3+V1Ow9Uy
+MjenEx8ifl96ZSOe9XsI2gl2TCaevCY/QuREu4LZB9XmO0gncH7gF5w9Bw2b
+-----END CERTIFICATE-----
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/client-cert-onap.key.pem b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/client-cert-onap.key.pem
new file mode 100644
index 0000000000..c7e386e55f
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/client-cert-onap.key.pem
@@ -0,0 +1,32 @@
+Bag Attributes
+ friendlyName: tomcat
+ localKeyID: 54 69 6D 65 20 31 34 39 33 33 32 33 39 32 32 37 35 31
+Key Attributes: <No Attributes>
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCbEKYweVNHsWR1
+ilbesQ85gQYS4gLb+0G/M7GwHCGT8birInjicpFgwmn/iKe6Hvm/Op3+C5iqzZuC
+jLAzZ5VOwYO/txxWu5vrWYHyCzkb9hzzEV4L3Kjxa9XcYEwZKcbZXBlx4QPKAaZq
+k5jgorRPp4FR/hQVcTfd2hxA8/g4HNOj8s6D/8ViNBo75TrL/6mXlwo+iuB6bvxU
+jzZNOj4qvTPqw8FDLsmkbWPRVo2DL5DAq0b5WVKAGrKwtARskVAG6B3ZOWltwGdH
+FUejTEpRC3LwlBas2nADli4H3192+b4ZDmgLJp585sQDVerNLUqqw+jJo4OTnGCV
+uFRSq8I1AgMBAAECggEANFs6wcM1S0+qC8XZ7vb5nQDjfByzunLrkBN0O3JEJB/J
+qn7JMixcyb7a61zIxR8QVHEGR3DC62jgyQOXusOOtjjAs0qwVtihnKVsKr1/WuGO
+hMOobXjj0iAG5ZHeH+DrMxjVvo2rKdnExtdvFunY18xG7dhMD7Fam525THUTql4K
+yxhT7X6MrfS1eFjbR6oAIGNjoNTwyyEjEm4yvHO3PnG2NeyIeu7zIO2k+GimAAXT
+tN3AK30lmr3+35k6o+XQAhDE4/6msn6jBVSdLfK35ATFGwrojD0bCgALR4SUNEyd
+i33nuNLGyeI7DPWbqmjyWQW9uWLFJD85We2HzqBZQQKBgQDIrJ4PLvYE75dFWnSa
+lBr1HZbl/x5mP56MVEiwTabRbUsJoXKlX44lm9hwQaPbuoUAflb1ZtNKbyiRVsuN
+Ft5RToU9PWXyFtc2eyLCJToxHI4MhsuGRAaEeic5+l12wdpRxl74eeXdKJK4P/iU
+8wdhSxDG2ekkj6lyye5l5iwcBwKBgQDF0Pptcs+yPCz9FRqCmHT/I4QTK1VSD6mW
+F2Yd2KEUa4aocIb+L56ghJfYR+enIe9hHmb0ulomJaLLTicZJk6ffDfaQpCFBiS7
+BirDqHX8zlnBHePrBzZPyA5EfGMLxlP4uUk4g28JMFBJaZTEXAnQLUH0mIm0o0YR
+mbsaVo/Y4wKBgFsG8iuxAaf7hoLPJVV5GUFWyrxJnWCEO0csdEyE7MbS7NbRhU++
+qJwmtWc2Xz2svegbZxaqLe31vlEvLeYyGWaIV6gP0c6ezcDI2lt2x46/hS/pdSjS
+cqJlRqXmC79y77VoZmwP31USsnshiYEHPLHFeza4YilTgWmwb5OJdTjBAoGBAJBC
+0P7UhedjvyNqKoUnDdurWPxp07Ueuvw8YDpP61jq+a8JMUlaDQLe76XI+oWGV/6p
+n0fGR0weklRV0Gmk6B2jB1BizuZUDqFd4/4ActtE2WvekoKqJc+VA+KqG8lQf5iZ
+924BXA6Fb2e6WcXBoV5yQvFP9M0JbWYUiMCydAElAoGBAKof78r8POfTPq9fQA9I
+0zsQGnxqnSqyIu5yobM3GyXHBPOKdevlxyXxuMnGTr7upSNZrDrrA+f5Czlu7Fas
+qdt/5PmqYQjRsVoHNQFatUzHWwx2vU2Pr1jBpZFBpnjnLwn3A35+UEWn13nCjkla
+TrDniEcyId4ya5cMLDnM7Zgw
+-----END PRIVATE KEY-----
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/kubectl b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/kubectl
new file mode 100644
index 0000000000..d53ce5f7f2
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/bin/kubectl
Binary files differ
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/model-loader.properties b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/model-loader.properties
new file mode 100644
index 0000000000..b2db044417
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/model-loader.properties
@@ -0,0 +1,23 @@
+# Model Loader Distribution Client Configuration
+ml.distribution.ACTIVE_SERVER_TLS_AUTH=false
+ml.distribution.ASDC_ADDRESS=c2.vm1.sdc.simpledemo.openecomp.org:8443
+ml.distribution.CONSUMER_GROUP=aai-ml-group
+ml.distribution.CONSUMER_ID=aai-ml
+ml.distribution.ENVIRONMENT_NAME=AUTO
+ml.distribution.KEYSTORE_PASSWORD=
+ml.distribution.KEYSTORE_FILE=asdc-client.jks
+ml.distribution.PASSWORD=OBF:1ks51l8d1o3i1pcc1r2r1e211r391kls1pyj1z7u1njf1lx51go21hnj1y0k1mli1sop1k8o1j651vu91mxw1vun1mze1vv11j8x1k5i1sp11mjc1y161hlr1gm41m111nkj1z781pw31kku1r4p1e391r571pbm1o741l4x1ksp
+ml.distribution.POLLING_INTERVAL=30
+ml.distribution.POLLING_TIMEOUT=20
+ml.distribution.USER=aai
+ml.distribution.ARTIFACT_TYPES=MODEL_INVENTORY_PROFILE,MODEL_QUERY_SPEC,VNF_CATALOG
+
+# Model Loader AAI REST Client Configuration
+ml.aai.BASE_URL=https://c1.vm1.aai.simpledemo.openecomp.org:8443
+ml.aai.MODEL_URL=/aai/v10/service-design-and-creation/models/model/
+ml.aai.NAMED_QUERY_URL=/aai/v10/service-design-and-creation/named-queries/named-query/
+ml.aai.VNF_IMAGE_URL=/aai/v8/service-design-and-creation/vnf-images
+ml.aai.KEYSTORE_FILE=aai-os-cert.p12
+ml.aai.KEYSTORE_PASSWORD=OBF:1i9a1u2a1unz1lr61wn51wn11lss1unz1u301i6o
+ml.aai.AUTH_USER=ModelLoader
+ml.aai.AUTH_PASSWORD=OBF:1qvu1v2h1sov1sar1wfw1j7j1wg21saj1sov1v1x1qxw
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-doc.txt b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-doc.txt
new file mode 100644
index 0000000000..a6e084cfea
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-doc.txt
@@ -0,0 +1,9 @@
+{
+ "vnfId" : "testwrite",
+ "device" : "10.198.1.31",
+ "timestamp" : "2017-08-23T19:13:56Z",
+ "jdmTotalMem" : "2097152",
+ "jdmAvailableMem" : "1877272",
+ "jdmUserCpu" : "16",
+ "jdmSystemCpu" : "3"
+}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-script.sh
new file mode 100644
index 0000000000..26e13913a0
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-script.sh
@@ -0,0 +1,17 @@
+if curl -s -X PUT http://elasticsearch.onap-aai:9200/searchhealth/stats/testwrite -d @/consul/config/scripts/aai-search-storage-write-doc.txt | grep '\"created\":true'; then
+ if curl -s -X DELETE http://elasticsearch.onap-aai:9200/searchhealth/stats/testwrite | grep '\"failed\":0'; then
+ if curl -s -X GET http://elasticsearch.onap-aai:9200/searchhealth/stats/testwrite | grep '\"found\":false'; then
+ echo Successful PUT, DELETE, GET from Search Document Storage 2>&1
+ exit 0
+ else
+ echo Failed GET from Search Document Storage 2>&1
+ exit 1
+ fi
+ else
+ echo Failed DELETE from Search Document Storage 2>&1
+ exit 1
+ fi
+else
+ echo Failed PUT from Search Document Storage 2>&1
+ exit 1
+fi
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/data-router-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/data-router-script.sh
new file mode 100644
index 0000000000..53cd5886f9
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/data-router-script.sh
@@ -0,0 +1,16 @@
+
+NAME=$(/consul/config/bin/kubectl -n onap-aai get pod | grep -o "data-router[^[:space:]]*")
+
+if [ -n "$NAME" ]; then
+ if /consul/config/bin/kubectl -n onap-aai exec -it $NAME -- ps -efww | grep 'java' | grep 'data-router' > /dev/null; then
+
+ echo Success. Synapse process is running. 2>&1
+ exit 0
+ else
+ echo Failed. Synapse process is not running. 2>&1
+ exit 1
+ fi
+else
+ echo Failed. Synapse container is offline. 2>&1
+ exit 1
+fi
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/gremlin-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/gremlin-script.sh
new file mode 100644
index 0000000000..c1766f8a2a
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/gremlin-script.sh
@@ -0,0 +1,16 @@
+
+NAME=$(/consul/config/bin/kubectl -n onap-aai get pod | grep -o "gremlin[^[:space:]]*")
+
+if [ -n "$NAME" ]; then
+ if /consul/config/bin/kubectl -n onap-aai exec -it $NAME -- ps -efww | grep 'java' | grep 'gremlin-server' > /dev/null; then
+
+ echo Success. Gremlin Server process is running. 2>&1
+ exit 0
+ else
+ echo Failed. Gremlin Server process is not running. 2>&1
+ exit 1
+ fi
+else
+ echo Failed. Gremlin Server container is offline. 2>&1
+ exit 1
+fi
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/model-loader-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/model-loader-script.sh
new file mode 100644
index 0000000000..1c93ecb38e
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/model-loader-script.sh
@@ -0,0 +1,16 @@
+
+NAME=$(/consul/config/bin/kubectl -n onap-aai get pod | grep -o "model-loader[^[:space:]]*")
+
+if [ -n "$NAME" ]; then
+ if /consul/config/bin/kubectl -n onap-aai exec -it $NAME -- ps -efww | grep 'java' | grep 'model-loader' > /dev/null; then
+
+ echo Success. Model Loader process is running. 2>&1
+ exit 0
+ else
+ echo Failed. Model Loader process is not running. 2>&1
+ exit 1
+ fi
+else
+ echo Failed. Model Loader container is offline. 2>&1
+ exit 1
+fi
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/search-data-service-availability.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/search-data-service-availability.sh
new file mode 100644
index 0000000000..e5cf5cfefb
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/search-data-service-availability.sh
@@ -0,0 +1,47 @@
+#!/bin/sh
+
+SEARCH_SERVICE_NAME="search-data-service.onap-aai"
+SEARCH_SERVICE_PORT=9509
+HEALTH_CHECK_INDEX="healthcheck"
+
+# 'Document Index' REST Endpoint
+INDEX_URL="https://$SEARCH_SERVICE_NAME:$SEARCH_SERVICE_PORT/services/search-data-service/v1/search/indexes/$HEALTH_CHECK_INDEX"
+INDEX_SCHEMA="{\"fields\":[{\"name\": \"field1\", \"data-type\": \"string\"}]}"
+
+
+SEARCH_CERT_FILE="/consul/config/client-cert-onap.crt.pem"
+SEARCH_KEY_FILE="/consul/config/client-cert-onap.key.pem"
+
+
+## Try to create an index via the Search Data Service API.
+CREATE_INDEX_RESP=$(curl -s -o /dev/null -w "%{http_code}" -k --cert $SEARCH_CERT_FILE --cert-type PEM --key $SEARCH_KEY_FILE --key-type PEM -d "$INDEX_SCHEMA" --header "Content-Type: application/json" --header "X-TransactionId: ConsulHealthCheck" -X PUT $INDEX_URL)
+
+RESULT_STRING=" "
+
+if [ $CREATE_INDEX_RESP -eq 201 ]; then
+ RESULT_STRING="Service Is Able To Communicate With Back End"
+elif [ $CREATE_INDEX_RESP -eq 400 ]; then
+ # A 400 response could mean that the index already exists (ie: we didn't
+ # clean up after ourselves on a previous check), so log the response but
+ # don't exit yet. If we fail on the delete then we can consider the
+ # check a failure, otherwise, we are good.
+ RESULT_STRING="$RESULT_STRING Create Index [FAIL - 400 (possible index already exists)] "
+else
+ RESULT_STRING="Service API Failure - $CREATE_INDEX_RESP"
+ echo $RESULT_STRING
+ exit 1
+fi
+
+## Now, clean up after ourselves.
+DELETE_INDEX_RESP=$(curl -s -o /dev/null -w "%{http_code}" -k --cert $SEARCH_CERT_FILE --cert-type PEM --key $SEARCH_KEY_FILE --key-type PEM -d "{ }" --header "Content-Type: application/json" --header "X-TransactionId: ConsulHealthCheck" -X DELETE $INDEX_URL)
+
+if [ $DELETE_INDEX_RESP -eq 200 ]; then
+ RESULT_STRING="Service Is Able To Communicate With Back End"
+else
+ RESULT_STRING="Service API Failure - $DELETE_INDEX_RESP"
+ echo $RESULT_STRING
+ exit 1
+fi
+
+echo $RESULT_STRING
+return 0
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sparky-be-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sparky-be-script.sh
new file mode 100644
index 0000000000..fe265ba2b0
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sparky-be-script.sh
@@ -0,0 +1,16 @@
+
+NAME=$(/consul/config/bin/kubectl -n onap-aai get pod | grep -o "sparky-be[^[:space:]]*")
+
+if [ -n "$NAME" ]; then
+ if /consul/config/bin/kubectl -n onap-aai exec -it $NAME -- ps -efww | grep 'java' | grep 'sparky' > /dev/null; then
+
+ echo Success. UI Backend Service process is running. 2>&1
+ exit 0
+ else
+ echo Failed. UI Backend Service process is not running. 2>&1
+ exit 1
+ fi
+else
+ echo Failed. UI Backend Service container is offline. 2>&1
+ exit 1
+fi
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/tabular-db-availability.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/tabular-db-availability.sh
new file mode 100644
index 0000000000..da9d8a5d82
--- /dev/null
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/tabular-db-availability.sh
@@ -0,0 +1,20 @@
+
+# Query the Hbase service for the cluster status.
+GET_CLUSTER_STATUS_RESPONSE=$(curl -si -X GET -H "Accept: text/xml" http://hbase.onap-aai:8080/status/cluster)
+
+if [ -z "$GET_CLUSTER_STATUS_RESPONSE" ]; then
+ echo "Tabular store is unreachable."
+ return 2
+fi
+
+# Check the resulting status JSON to see if there is a 'DeadNodes' stanza with
+# entries.
+DEAD_NODES=$(echo $GET_CLUSTER_STATUS_RESPONSE | grep "<DeadNodes/>")
+
+if [ -n "$DEAD_NODES" ]; then
+ echo "Tabular store is up and accessible."
+ return 0
+else
+ echo "Tabular store is up but is reporting dead nodes - cluster may be in degraded state."
+ return 1
+fi
diff --git a/kubernetes/consul/Chart.yaml b/kubernetes/consul/Chart.yaml
new file mode 100644
index 0000000000..318234db06
--- /dev/null
+++ b/kubernetes/consul/Chart.yaml
@@ -0,0 +1,4 @@
+apiVersion: v1
+description: A Helm chart for Consul
+name: consul
+version: 1.1.0
diff --git a/kubernetes/consul/templates/consul-agent-deployment.yaml b/kubernetes/consul/templates/consul-agent-deployment.yaml
new file mode 100644
index 0000000000..15318660b5
--- /dev/null
+++ b/kubernetes/consul/templates/consul-agent-deployment.yaml
@@ -0,0 +1,29 @@
+apiVersion: extensions/v1beta1
+kind: Deployment
+metadata:
+ labels:
+ app: consul-agent
+ name: consul-agent
+ namespace: "{{ .Values.nsPrefix }}-consul"
+spec:
+ selector:
+ matchLabels:
+ app: consul-agent
+ template:
+ metadata:
+ labels:
+ app: consul-agent
+ name: consul-agent
+ spec:
+ containers:
+ - image: "{{ .Values.consulimageRegistry }}:{{ .Values.consuldockerTag }}"
+ command: ["/usr/local/bin/docker-entrypoint.sh"]
+ args: ["agent","-client","0.0.0.0","-enable-script-checks","-join","consul-server.{{ .Values.nsPrefix }}-consul"]
+ name: consul-server
+ volumeMounts:
+ - mountPath: /consul/config
+ name: consul-agent-config
+ volumes:
+ - hostPath:
+ path: {{ .Values.rootHostPath }}/{{ .Values.nsPrefix }}/consul/consul-agent-config
+ name: consul-agent-config
diff --git a/kubernetes/consul/templates/consul-server-deployment.yaml b/kubernetes/consul/templates/consul-server-deployment.yaml
new file mode 100644
index 0000000000..3e6dcba865
--- /dev/null
+++ b/kubernetes/consul/templates/consul-server-deployment.yaml
@@ -0,0 +1,30 @@
+apiVersion: extensions/v1beta1
+kind: Deployment
+metadata:
+ labels:
+ app: consul-server
+ name: consul-server
+ namespace: "{{ .Values.nsPrefix }}-consul"
+spec:
+ replicas: 3
+ selector:
+ matchLabels:
+ app: consul-server
+ template:
+ metadata:
+ labels:
+ app: consul-server
+ name: consul-server
+ spec:
+ containers:
+ - image: "{{ .Values.consulimageRegistry }}:{{ .Values.consuldockerTag }}"
+ command: ["/usr/local/bin/docker-entrypoint.sh"]
+ args: ["agent","-server","-client","0.0.0.0","-enable-script-checks","-bootstrap-expect=3","-ui","-join","consul-server.{{ .Values.nsPrefix }}-consul"]
+ name: consul-server
+ volumeMounts:
+ - mountPath: /consul/config
+ name: consul-server-config
+ volumes:
+ - hostPath:
+ path: {{ .Values.rootHostPath }}/{{ .Values.rootHostPath }}/{{ .Values.nsPrefix }}/consul/consul-server-config
+ name: consul-server-config
diff --git a/kubernetes/consul/templates/consul-server-service.yaml b/kubernetes/consul/templates/consul-server-service.yaml
new file mode 100644
index 0000000000..465456425e
--- /dev/null
+++ b/kubernetes/consul/templates/consul-server-service.yaml
@@ -0,0 +1,22 @@
+apiVersion: v1
+kind: Service
+metadata:
+ labels:
+ app: consul-server
+ name: consul-server
+ namespace: "{{ .Values.nsPrefix }}-consul"
+spec:
+ ports:
+ - name: consul-ui
+ nodePort: {{ .Values.nodePortPrefix }}70
+ port: 8500
+ protocol: TCP
+ targetPort: 8500
+ - name: consul-join
+ nodePort: {{ .Values.nodePortPrefix }}71
+ port: 8301
+ protocol: TCP
+ targetPort: 8301
+ selector:
+ app: consul-server
+ type: {{ .Values.service.type | quote }}
diff --git a/kubernetes/consul/values.yaml b/kubernetes/consul/values.yaml
new file mode 100644
index 0000000000..2b713dc278
--- /dev/null
+++ b/kubernetes/consul/values.yaml
@@ -0,0 +1,7 @@
+nsPrefix: "inf"
+nodePortPrefix: 302
+consuldockerTag: "latest"
+rootHostPath: "/dockerdata-nfs"
+consulimageRegistry: "docker.io/consul"
+service:
+ type: NodePort
diff --git a/kubernetes/oneclick/createAll.bash b/kubernetes/oneclick/createAll.bash
index c59cf095ca..afe71af8e1 100755
--- a/kubernetes/oneclick/createAll.bash
+++ b/kubernetes/oneclick/createAll.bash
@@ -14,7 +14,7 @@ Usage: $0 [PARAMs]
-a [APP] : Specify a specific ONAP component (default: all)
from the following choices:
sdc, aai ,mso, message-router, robot,
- vid, sdnc, portal, policy, appc, multicloud, clamp
+ vid, sdnc, portal, policy, appc, multicloud, clamp, consul
EOF
}
diff --git a/kubernetes/oneclick/setenv.bash b/kubernetes/oneclick/setenv.bash
index 3e4125bbb9..a1aba3daf4 100644
--- a/kubernetes/oneclick/setenv.bash
+++ b/kubernetes/oneclick/setenv.bash
@@ -1,7 +1,7 @@
#!/bin/bash
# Deploying MSB first and kube2msb last will ensure all the ONAP services can be registered to MSB
-HELM_APPS=('msb' 'mso' 'message-router' 'sdnc' 'vid' 'robot' 'portal' 'policy' 'appc' 'aai' 'sdc' 'dcae' 'log' 'cli' 'multicloud' 'clamp' 'kube2msb')
+HELM_APPS=('consul' 'msb' 'mso' 'message-router' 'sdnc' 'vid' 'robot' 'portal' 'policy' 'appc' 'aai' 'sdc' 'dcae' 'log' 'cli' 'multicloud' 'clamp' 'kube2msb')
ONAP_DOCKER_REGISTRY=${ONAP_DOCKER_REGISTRY:-nexus3.onap.org:10001}
ONAP_DOCKER_USER=${ONAP_DOCKER_USER:-docker}
ONAP_DOCKER_PASS=${ONAP_DOCKER_PASS:-docker}