aboutsummaryrefslogtreecommitdiffstats
path: root/kubernetes/config/docker
diff options
context:
space:
mode:
authorBorislavG <Borislav.Glozman@amdocs.com>2018-02-27 15:04:26 +0000
committerAlexis de Talhouƫt <alexis.de_talhouet@bell.ca>2018-03-01 18:10:31 +0000
commit8bfc6cf8c3d338c84c48201f7a4f274958e721a9 (patch)
tree2e0e1a969afc6ae38f0a2f62cc1a36e184cb48e1 /kubernetes/config/docker
parentf0a34e22011b4323bbd6a51ad313d6e547808101 (diff)
Run all components in one namespace
Change-Id: I5fcd4d577c1fda4de27842807c7cf7a5d372756e Issue-ID: OOM-722 Signed-off-by: BorislavG <Borislav.Glozman@amdocs.com>
Diffstat (limited to 'kubernetes/config/docker')
-rwxr-xr-xkubernetes/config/docker/init/config-init.sh4
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-hbase-health.json2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-search-data-service-health.json4
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-services-health.json6
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-dbbuilder.json2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnctldb01-healthcheck.json4
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnctldb02-healthcheck.json4
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnhost.json2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-elastic-search.json4
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-kibana.json2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-logstash.json20
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/mr-dmaap-health.json2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/msb-health.json16
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/multicloud-health-check.json8
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-script.sh6
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/appc-dbhost-script.sh4
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/data-router-script.sh4
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/gremlin-script.sh4
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/model-loader-script.sh4
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mr-kafka-health.sh4
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mr-zookeeper-health.sh4
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-api-script.sh2
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-camunda-script.sh2
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-jra-script.sh2
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-mariadb-script.sh4
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-be-script.sh2
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-cs-script.sh2
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-fe-script.sh2
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-titan-script.sh2
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdnc-dbhost-script.sh4
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/search-data-service-availability.sh2
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sparky-be-script.sh4
-rwxr-xr-xkubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/tabular-db-availability.sh2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/vid-mariadb-script.sh4
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdc-health.json2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-dgbuilder.json2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-health.json2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-portal-health.json2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnctldb01-healthcheck.json4
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnctldb02-healthcheck.json4
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnhost.json2
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/vfc-health.json60
-rw-r--r--kubernetes/config/docker/init/src/config/consul/consul-agent-config/vid-health.json2
-rw-r--r--kubernetes/config/docker/init/src/config/dcae/message-router/dmaap/MsgRtrApi.properties8
-rw-r--r--kubernetes/config/docker/init/src/config/log/filebeat/log4j/filebeat.yml2
-rw-r--r--kubernetes/config/docker/init/src/config/log/filebeat/logback/filebeat.yml2
-rwxr-xr-xkubernetes/config/docker/init/src/config/message-router/dmaap/MsgRtrApi.properties8
-rwxr-xr-xkubernetes/config/docker/init/src/config/robot/eteshare/config/vm_properties.py66
48 files changed, 155 insertions, 155 deletions
diff --git a/kubernetes/config/docker/init/config-init.sh b/kubernetes/config/docker/init/config-init.sh
index a0202f8e4e..0e5fae082a 100755
--- a/kubernetes/config/docker/init/config-init.sh
+++ b/kubernetes/config/docker/init/config-init.sh
@@ -93,8 +93,8 @@ echo "Substituting configuration parameters"
# replace the default 'onap' namespace qualification of K8s hostnames within the config files
SED_NS_PATHS="/config-init/$NAMESPACE/"
SED_NS_STRINGS=(
- "s/\.onap-/\.${NAMESPACE}-/g"
- "s/kubectl -n onap/kubectl -n ${NAMESPACE}/g"
+ "s/\.namespace-placeholder/\.${NAMESPACE}/g"
+ "s/kubectl -n namespace-placeholder/kubectl -n ${NAMESPACE}/g"
)
SED_NS_STRING=$(concat_array "${SED_NS_STRINGS[@]}")
find $SED_NS_PATHS -type f -exec sed -i -e "${SED_NS_STRING}" {} \;
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-hbase-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-hbase-health.json
index 07828431fc..1d23b88e0f 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-hbase-health.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-hbase-health.json
@@ -5,7 +5,7 @@
{
"id": "hbase-aai",
"name": "HBase Health Check",
- "http": "http://hbase.onap-aai:8080/status/cluster",
+ "http": "http://hbase.namespace-placeholder:8080/status/cluster",
"method": "GET",
"header": {
"Cache-Control": ["no-cache"],
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-search-data-service-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-search-data-service-health.json
index b1877c7424..bf8830f0df 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-search-data-service-health.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-search-data-service-health.json
@@ -5,7 +5,7 @@
{
"id": "elasticsearch",
"name": "Search Data Service Document Store",
- "http": "http://elasticsearch.onap-aai:9200/_cat/indices?v",
+ "http": "http://aai-elasticsearch.namespace-placeholder:9200/_cat/indices?v",
"interval": "15s",
"timeout": "1s"
},
@@ -18,7 +18,7 @@
{
"id": "search-data-service-availability",
"name": "Search Data Service Availability",
- "script": "curl -k --cert /consul/config/certs/client-cert-onap.crt.pem --cert-type PEM --key /consul/config/bin/client-cert-onap.key.pem --key-type PEM https://search-data-service.onap-aai:9509/services/search-data-service/v1/jaxrsExample/jaxrs-services/echo/up 2>&1 | grep 'Up'",
+ "script": "curl -k --cert /consul/config/certs/client-cert-onap.crt.pem --cert-type PEM --key /consul/config/bin/client-cert-onap.key.pem --key-type PEM https://search-data-service.namespace-placeholder:9509/services/search-data-service/v1/jaxrsExample/jaxrs-services/echo/up 2>&1 | grep 'Up'",
"interval": "15s"
},
{
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-services-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-services-health.json
index 35f9371e8d..99d964e7d8 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-services-health.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/aai-services-health.json
@@ -5,7 +5,7 @@
{
"id": "aai-service",
"name": "Core A&AI",
- "http": "https://aai-service.onap-aai:8443/aai/util/echo",
+ "http": "https://aai-service.namespace-placeholder:8443/aai/util/echo",
"header": {
"Authorization": ["Basic QUFJOkFBSQ=="],
"X-TransactionId": ["ConsulHealthCheck"],
@@ -18,7 +18,7 @@
{
"id": "aai-resources",
"name": "Resources Microservice",
- "http": "https://aai-resources.onap-aai:8447/aai/util/echo",
+ "http": "https://aai-resources.namespace-placeholder:8447/aai/util/echo",
"header": {
"Authorization": ["Basic QUFJOkFBSQ=="],
"X-TransactionId": ["ConsulHealthCheck"],
@@ -31,7 +31,7 @@
{
"id": "aai-traversal",
"name": "Traversal Microservice",
- "http": "https://aai-traversal.onap-aai:8446/aai/util/echo",
+ "http": "https://aai-traversal.namespace-placeholder:8446/aai/util/echo",
"header": {
"Authorization": ["Basic QUFJOkFBSQ=="],
"X-TransactionId": ["ConsulHealthCheck"],
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-dbbuilder.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-dbbuilder.json
index ec5539060f..2763505f59 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-dbbuilder.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-dbbuilder.json
@@ -5,7 +5,7 @@
{
"id": "appc-dgbuilder",
"name": "APPC-Dgbuilder Server Health Check",
- "http": "http://dgbuilder.onap-appc:3000/",
+ "http": "http://dgbuilder.namespace-placeholder:3000/",
"method": "HEAD",
"header": {
"Authorization": ["Basic ZGd1c2VyOnRlc3QxMjM="],
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnctldb01-healthcheck.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnctldb01-healthcheck.json
index 52ffdc0b6d..31f1b253f6 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnctldb01-healthcheck.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnctldb01-healthcheck.json
@@ -3,9 +3,9 @@
"name": "Health Check: APPC-SDN-CTL-DB-01",
"checks": [
{
- "id": "sdnctldb01.onap-appc",
+ "id": "appc-sdnctldb01.namespace-placeholder",
"name": "APPC SDNCTLDB01 Health Check",
- "tcp": "sdnctldb01.onap-appc:3306",
+ "tcp": "appc-sdnctldb01.namespace-placeholder:3306",
"interval": "10s",
"timeout": "1s"
}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnctldb02-healthcheck.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnctldb02-healthcheck.json
index d6ac666ff4..60bdd228da 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnctldb02-healthcheck.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnctldb02-healthcheck.json
@@ -3,9 +3,9 @@
"name": "Health Check: APPC-SDN-CTL-DB-02",
"checks": [
{
- "id": "sdnctldb02.onap-appc",
+ "id": "sdnctldb02.namespace-placeholder",
"name": "APPC SDNCTLDB02 Health Check",
- "tcp": "sdnctldb02.onap-appc:3306",
+ "tcp": "sdnctldb02.namespace-placeholder:3306",
"interval": "10s",
"timeout": "1s"
}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnhost.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnhost.json
index c96a35a22c..09b50077d7 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnhost.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/appc-sdnhost.json
@@ -5,7 +5,7 @@
{
"id": "appc-sdnhost",
"name": "APPC SDN Host Health Check",
- "http": "http://sdnhost.onap-appc:8282/apidoc/explorer/index.html",
+ "http": "http://appc-sdnhost.namespace-placeholder:8282/apidoc/explorer/index.html",
"method": "HEAD",
"header": {
"Authorization": ["Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=="],
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-elastic-search.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-elastic-search.json
index 7785502018..3586f58932 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-elastic-search.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-elastic-search.json
@@ -5,7 +5,7 @@
{
"id": "log-elasticsearch-server",
"name": "Log Elastic Search Health Check",
- "http": "http://elasticsearch.onap-log:9200/_cluster/health?pretty",
+ "http": "http://elasticsearch.namespace-placeholder:9200/_cluster/health?pretty",
"method": "GET",
"tls_skip_verify": true,
"interval": "15s",
@@ -14,7 +14,7 @@
{
"id": "log-elasticsearch-tcp",
"name": "Log Elastic Search TCP Health Check",
- "tcp": "elasticsearchtcp.onap-log:9300",
+ "tcp": "elasticsearchtcp.namespace-placeholder:9300",
"interval": "15s",
"timeout": "1s"
}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-kibana.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-kibana.json
index 794fb4b260..7fda31f8c0 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-kibana.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-kibana.json
@@ -5,7 +5,7 @@
{
"id": "log-kibana-server",
"name": "Log kibana Health Check",
- "http": "http://kibana.onap-log:5601/status",
+ "http": "http://kibana.namespace-placeholder:5601/status",
"method": "HEAD",
"tls_skip_verify": true,
"interval": "15s",
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-logstash.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-logstash.json
index 3c0f450356..e8e6236359 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-logstash.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/log-logstash.json
@@ -5,7 +5,7 @@
{
"id": "log-logstash-internal-server-gi",
"name": "Log Stash Health Check - General Information",
- "http": "http://logstashinternal.onap-log:9600/?pretty",
+ "http": "http://logstashinternal.namespace-placeholder:9600/?pretty",
"method": "GET",
"tls_skip_verify": true,
"interval": "15s",
@@ -14,7 +14,7 @@
{
"id": "log-logstash-internal-server-node-info",
"name": "Log Stash Health Check - Node Information",
- "http": "http://logstashinternal.onap-log:9600/_node/?pretty",
+ "http": "http://logstashinternal.namespace-placeholder:9600/_node/?pretty",
"method": "GET",
"tls_skip_verify": true,
"interval": "15s",
@@ -23,7 +23,7 @@
{
"id": "log-logstash-internal-server-os-info",
"name": "Log Stash Health Check - OS Information",
- "http": "http://logstashinternal.onap-log:9600/_node/os?pretty",
+ "http": "http://logstashinternal.namespace-placeholder:9600/_node/os?pretty",
"method": "GET",
"tls_skip_verify": true,
"interval": "15s",
@@ -32,7 +32,7 @@
{
"id": "log-logstash-internal-server-jvm-info",
"name": "Log Stash Health Check - JVM Information",
- "http": "http://logstashinternal.onap-log:9600/_node/jvm?pretty",
+ "http": "http://logstashinternal.namespace-placeholder:9600/_node/jvm?pretty",
"method": "GET",
"tls_skip_verify": true,
"interval": "15s",
@@ -41,7 +41,7 @@
{
"id": "log-logstash-internal-server-plugin-info",
"name": "Log Stash Health Check - Plugin Information",
- "http": "http://logstashinternal.onap-log:9600/_node/plugins?pretty",
+ "http": "http://logstashinternal.namespace-placeholder:9600/_node/plugins?pretty",
"method": "GET",
"tls_skip_verify": true,
"interval": "15s",
@@ -50,7 +50,7 @@
{
"id": "log-logstash-internal-server-node-stat",
"name": "Log Stash Health Check - Node Stats",
- "http": "http://logstashinternal.onap-log:9600/_node/stats?pretty",
+ "http": "http://logstashinternal.namespace-placeholder:9600/_node/stats?pretty",
"method": "GET",
"tls_skip_verify": true,
"interval": "15s",
@@ -59,7 +59,7 @@
{
"id": "log-logstash-internal-server-jvm-stat",
"name": "Log Stash Health Check - JVM Stats",
- "http": "http://logstashinternal.onap-log:9600/_node/stats/jvm?pretty",
+ "http": "http://logstashinternal.namespace-placeholder:9600/_node/stats/jvm?pretty",
"method": "GET",
"tls_skip_verify": true,
"interval": "15s",
@@ -68,7 +68,7 @@
{
"id": "log-logstash-internal-server-process-stat",
"name": "Log Stash Health Check - Process Stats",
- "http": "http://logstashinternal.onap-log:9600/_node/stats/process?pretty",
+ "http": "http://logstashinternal.namespace-placeholder:9600/_node/stats/process?pretty",
"method": "GET",
"tls_skip_verify": true,
"interval": "15s",
@@ -77,7 +77,7 @@
{
"id": "log-logstash-internal-server-os-stat",
"name": "Log Stash Health Check - OS Stats",
- "http": "http://logstashinternal.onap-log:9600/_node/stats/os?pretty",
+ "http": "http://logstashinternal.namespace-placeholder:9600/_node/stats/os?pretty",
"method": "GET",
"tls_skip_verify": true,
"interval": "15s",
@@ -86,7 +86,7 @@
{
"id": "log-logstash-tcp",
"name": "Log Stash File Beat TCP Health Check",
- "tcp": "logstash.onap-log:5044",
+ "tcp": "logstash.namespace-placeholder:5044",
"interval": "15s",
"timeout": "1s"
}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/mr-dmaap-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/mr-dmaap-health.json
index bd01bc5d95..dddd9692b9 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/mr-dmaap-health.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/mr-dmaap-health.json
@@ -2,7 +2,7 @@
"service": {
"name": "Health Check: Message Router - DMaaP",
"check": {
- "http": "http://dmaap.onap-message-router:3904/topics",
+ "http": "http://dmaap.namespace-placeholder:3904/topics",
"interval": "30s",
"timeout": "1s"
}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/msb-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/msb-health.json
index a5738b3277..02e7dc47f4 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/msb-health.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/msb-health.json
@@ -3,34 +3,34 @@
"name": "Health Check: MSB",
"checks": [
{
- "id": "msb-eag.onap-msb",
+ "id": "msb-eag.namespace-placeholder",
"name": "MSB eag Health Check",
- "http": "http://msb-eag.onap-msb:80/iui/microservices/default.html",
+ "http": "http://msb-eag.namespace-placeholder:80/iui/microservices/default.html",
"method": "HEAD",
"tls_skip_verify": true,
"interval": "15s",
"timeout": "1s"
},
{
- "id": "msb-iag.onap-msb",
+ "id": "msb-iag.namespace-placeholder",
"name": "MSB iag Health Check",
- "http": "http://msb-iag.onap-msb:80/iui/microservices/default.html",
+ "http": "http://msb-iag.namespace-placeholder:80/iui/microservices/default.html",
"method": "HEAD",
"tls_skip_verify": true,
"interval": "15s",
"timeout": "1s"
},
{
- "id": "msb-consul.onap-msb",
+ "id": "msb-consul.namespace-placeholder",
"name": "MSB consul Health Check",
- "tcp": "msb-consul.onap-msb:8500",
+ "tcp": "msb-consul.namespace-placeholder:8500",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "msb-discovery.onap-msb",
+ "id": "msb-discovery.namespace-placeholder",
"name": "MSB discovery Health Check",
- "tcp": "msb-discovery.onap-msb:10081",
+ "tcp": "msb-discovery.namespace-placeholder:10081",
"interval": "15s",
"timeout": "1s"
}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/multicloud-health-check.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/multicloud-health-check.json
index 843afa32bc..760077c543 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/multicloud-health-check.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/multicloud-health-check.json
@@ -5,7 +5,7 @@
{
"id": "framework",
"name": "Framework Health Check",
- "http": "http://framework.onap-multicloud:9001/api/multicloud/v0/swagger.json",
+ "http": "http://framework.namespace-placeholder:9001/api/multicloud/v0/swagger.json",
"method": "HEAD",
"header": {
"Cache-Control": ["no-cache"],
@@ -19,7 +19,7 @@
{
"id": "multicloud-ocata",
"name": "Multicloud Ocata Health Check",
- "http": "http://multicloud-ocata.onap-multicloud:9006/api/multicloud-ocata/v0/swagger.json",
+ "http": "http://multicloud-ocata.namespace-placeholder:9006/api/multicloud-ocata/v0/swagger.json",
"method": "HEAD",
"header": {
"Cache-Control": ["no-cache"],
@@ -33,7 +33,7 @@
{
"id": "multicloud-vio",
"name": "Multicloud Vio Health Check",
- "http": "http://multicloud-vio.onap-multicloud:9004/api/multicloud-vio/v0/swagger.json",
+ "http": "http://multicloud-vio.namespace-placeholder:9004/api/multicloud-vio/v0/swagger.json",
"method": "HEAD",
"header": {
"Cache-Control": ["no-cache"],
@@ -47,7 +47,7 @@
{
"id": "multicloud-windriver",
"name": "Multicloud Windriver Health Check",
- "http": "http://multicloud-windriver.onap-multicloud:9005/api/multicloud-titanium_cloud/v0/swagger.json",
+ "http": "http://multicloud-windriver.namespace-placeholder:9005/api/multicloud-titanium_cloud/v0/swagger.json",
"method": "HEAD",
"header": {
"Cache-Control": ["no-cache"],
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-script.sh
index 26e13913a0..fce0a63015 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/aai-search-storage-write-script.sh
@@ -1,6 +1,6 @@
-if curl -s -X PUT http://elasticsearch.onap-aai:9200/searchhealth/stats/testwrite -d @/consul/config/scripts/aai-search-storage-write-doc.txt | grep '\"created\":true'; then
- if curl -s -X DELETE http://elasticsearch.onap-aai:9200/searchhealth/stats/testwrite | grep '\"failed\":0'; then
- if curl -s -X GET http://elasticsearch.onap-aai:9200/searchhealth/stats/testwrite | grep '\"found\":false'; then
+if curl -s -X PUT http://elasticsearch.namespace-placeholder:9200/searchhealth/stats/testwrite -d @/consul/config/scripts/aai-search-storage-write-doc.txt | grep '\"created\":true'; then
+ if curl -s -X DELETE http://elasticsearch.namespace-placeholder:9200/searchhealth/stats/testwrite | grep '\"failed\":0'; then
+ if curl -s -X GET http://elasticsearch.namespace-placeholder:9200/searchhealth/stats/testwrite | grep '\"found\":false'; then
echo Successful PUT, DELETE, GET from Search Document Storage 2>&1
exit 0
else
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/appc-dbhost-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/appc-dbhost-script.sh
index 4a78c315ba..9abfd49ca7 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/appc-dbhost-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/appc-dbhost-script.sh
@@ -1,6 +1,6 @@
-APPC_DBHOST_POD=$(/consul/config/bin/kubectl -n onap-appc get pod | grep -o "appc-dbhost-[^[:space:]]*")
+APPC_DBHOST_POD=$(/consul/config/bin/kubectl -n namespace-placeholder get pod | grep -o "appc-dbhost-[^[:space:]]*")
if [ -n "$APPC_DBHOST_POD" ]; then
- if /consul/config/bin/kubectl -n onap-appc exec -it $APPC_DBHOST_POD -- ./healthcheck.sh |grep -i "mysqld is alive"; then
+ if /consul/config/bin/kubectl -n namespace-placeholder exec -it $APPC_DBHOST_POD -- ./healthcheck.sh |grep -i "mysqld is alive"; then
echo Success. APPC DBHost is running. 2>&1
exit 0
else
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/data-router-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/data-router-script.sh
index 53cd5886f9..2c7a949987 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/data-router-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/data-router-script.sh
@@ -1,8 +1,8 @@
-NAME=$(/consul/config/bin/kubectl -n onap-aai get pod | grep -o "data-router[^[:space:]]*")
+NAME=$(/consul/config/bin/kubectl -n namespace-placeholder get pod | grep -o "data-router[^[:space:]]*")
if [ -n "$NAME" ]; then
- if /consul/config/bin/kubectl -n onap-aai exec -it $NAME -- ps -efww | grep 'java' | grep 'data-router' > /dev/null; then
+ if /consul/config/bin/kubectl -n namespace-placeholder exec -it $NAME -- ps -efww | grep 'java' | grep 'data-router' > /dev/null; then
echo Success. Synapse process is running. 2>&1
exit 0
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/gremlin-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/gremlin-script.sh
index c1766f8a2a..e55c90fdc1 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/gremlin-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/gremlin-script.sh
@@ -1,8 +1,8 @@
-NAME=$(/consul/config/bin/kubectl -n onap-aai get pod | grep -o "gremlin[^[:space:]]*")
+NAME=$(/consul/config/bin/kubectl -n namespace-placeholder get pod | grep -o "gremlin[^[:space:]]*")
if [ -n "$NAME" ]; then
- if /consul/config/bin/kubectl -n onap-aai exec -it $NAME -- ps -efww | grep 'java' | grep 'gremlin-server' > /dev/null; then
+ if /consul/config/bin/kubectl -n namespace-placeholder exec -it $NAME -- ps -efww | grep 'java' | grep 'gremlin-server' > /dev/null; then
echo Success. Gremlin Server process is running. 2>&1
exit 0
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/model-loader-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/model-loader-script.sh
index 1c93ecb38e..9f4f4a843d 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/model-loader-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/model-loader-script.sh
@@ -1,8 +1,8 @@
-NAME=$(/consul/config/bin/kubectl -n onap-aai get pod | grep -o "model-loader[^[:space:]]*")
+NAME=$(/consul/config/bin/kubectl -n namespace-placeholder get pod | grep -o "model-loader[^[:space:]]*")
if [ -n "$NAME" ]; then
- if /consul/config/bin/kubectl -n onap-aai exec -it $NAME -- ps -efww | grep 'java' | grep 'model-loader' > /dev/null; then
+ if /consul/config/bin/kubectl -n namespace-placeholder exec -it $NAME -- ps -efww | grep 'java' | grep 'model-loader' > /dev/null; then
echo Success. Model Loader process is running. 2>&1
exit 0
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mr-kafka-health.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mr-kafka-health.sh
index 317c2a699c..cde6e8f335 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mr-kafka-health.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mr-kafka-health.sh
@@ -1,6 +1,6 @@
-kafkapod=$(/consul/config/bin/kubectl -n onap-message-router get pod | grep -o "global-kafka-[^[:space:]]*")
+kafkapod=$(/consul/config/bin/kubectl -n namespace-placeholder get pod | grep -o "global-kafka-[^[:space:]]*")
if [ -n "$kafkapod" ]; then
- if /consul/config/bin/kubectl -n onap-message-router exec -it $kafkapod -- ps ef | grep -i kafka; then
+ if /consul/config/bin/kubectl -n namespace-placeholder exec -it $kafkapod -- ps ef | grep -i kafka; then
echo Success. Kafka process is running. 2>&1
exit 0
else
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mr-zookeeper-health.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mr-zookeeper-health.sh
index 3da456c05a..2534fcba9e 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mr-zookeeper-health.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mr-zookeeper-health.sh
@@ -1,6 +1,6 @@
-zkpod=$(/consul/config/bin/kubectl -n onap-message-router get pod | grep -o "zookeeper-[^[:space:]]*")
+zkpod=$(/consul/config/bin/kubectl -n namespace-placeholder get pod | grep -o "zookeeper-[^[:space:]]*")
if [ -n "$zkpod" ]; then
- if /consul/config/bin/kubectl -n onap-message-router exec -it $zkpod -- ps ef | grep -i zookeeper; then
+ if /consul/config/bin/kubectl -n namespace-placeholder exec -it $zkpod -- ps ef | grep -i zookeeper; then
echo Success. Zookeeper process is running. 2>&1
exit 0
else
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-api-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-api-script.sh
index ba0afc0dce..459d2e2b34 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-api-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-api-script.sh
@@ -1,5 +1,5 @@
## Query the health check API.
-HEALTH_CHECK_ENDPOINT="http://mso.onap-mso.svc.cluster.local:8080/ecomp/mso/infra/healthcheck"
+HEALTH_CHECK_ENDPOINT="http://mso.namespace-placeholder:8080/ecomp/mso/infra/healthcheck"
HEALTH_CHECK_RESPONSE=$(curl -s $HEALTH_CHECK_ENDPOINT)
READY=$(echo $HEALTH_CHECK_RESPONSE | grep "Application ready")
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-camunda-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-camunda-script.sh
index 254a0445ce..8342843dd8 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-camunda-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-camunda-script.sh
@@ -1,5 +1,5 @@
## Query the health check API.
-HEALTH_CHECK_ENDPOINT="http://mso.onap-mso.svc.cluster.local:8080/mso/healthcheck"
+HEALTH_CHECK_ENDPOINT="http://mso.namespace-placeholder:8080/mso/healthcheck"
HEALTH_CHECK_RESPONSE=$(curl -s $HEALTH_CHECK_ENDPOINT)
READY=$(echo $HEALTH_CHECK_RESPONSE | grep "Application ready")
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-jra-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-jra-script.sh
index 29ffd8fe4c..c88ebe5a49 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-jra-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-jra-script.sh
@@ -1,5 +1,5 @@
## Query the health check API.
-HEALTH_CHECK_ENDPOINT="http://mso.onap-mso.svc.cluster.local:8080/networks/rest/healthcheck"
+HEALTH_CHECK_ENDPOINT="http://mso.namespace-placeholder:8080/networks/rest/healthcheck"
HEALTH_CHECK_RESPONSE=$(curl -s $HEALTH_CHECK_ENDPOINT)
READY=$(echo $HEALTH_CHECK_RESPONSE | grep "Application ready")
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-mariadb-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-mariadb-script.sh
index 84b22206e0..42e708ee02 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-mariadb-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/mso-mariadb-script.sh
@@ -1,7 +1,7 @@
-NAME=$(/consul/config/bin/kubectl -n onap-mso get pod | grep -o "mariadb[^[:space:]]*")
+NAME=$(/consul/config/bin/kubectl -n namespace-placeholder get pod | grep -o "mariadb[^[:space:]]*")
if [ -n "$NAME" ]; then
- if /consul/config/bin/kubectl -n onap-mso exec -it $NAME -- bash -c 'mysqladmin status -u root -p$MYSQL_ROOT_PASSWORD' > /dev/null; then
+ if /consul/config/bin/kubectl -n namespace-placeholder exec -it $NAME -- bash -c 'mysqladmin status -u root -p$MYSQL_ROOT_PASSWORD' > /dev/null; then
echo Success. mariadb process is running. 2>&1
exit 0
else
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-be-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-be-script.sh
index f460a3f116..66f35d9437 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-be-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-be-script.sh
@@ -1,5 +1,5 @@
## Query the health check API.
-HEALTH_CHECK_ENDPOINT="http://sdc-fe.onap-sdc:8181/sdc1/rest/healthCheck"
+HEALTH_CHECK_ENDPOINT="http://sdc-fe.namespace-placeholder:8181/sdc1/rest/healthCheck"
HEALTH_CHECK_RESPONSE=$(curl -s $HEALTH_CHECK_ENDPOINT)
## Strip out the ON_BOARDING section from the response XML (otherwise we will
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-cs-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-cs-script.sh
index 31cd8d3fa6..cd50120581 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-cs-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-cs-script.sh
@@ -1,5 +1,5 @@
## Query the health check API.
-HEALTH_CHECK_ENDPOINT="http://sdc-fe.onap-sdc:8181/sdc1/rest/healthCheck"
+HEALTH_CHECK_ENDPOINT="http://sdc-fe.namespace-placeholder:8181/sdc1/rest/healthCheck"
HEALTH_CHECK_RESPONSE=$(curl -s $HEALTH_CHECK_ENDPOINT)
## Strip out the ON_BOARDING section from the response XML (otherwise we will
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-fe-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-fe-script.sh
index 6a1d035ecd..9799c31201 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-fe-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-fe-script.sh
@@ -1,5 +1,5 @@
## Query the health check API.
-HEALTH_CHECK_ENDPOINT="http://sdc-fe.onap-sdc:8181/sdc1/rest/healthCheck"
+HEALTH_CHECK_ENDPOINT="http://sdc-fe.namespace-placeholder:8181/sdc1/rest/healthCheck"
HEALTH_CHECK_RESPONSE=$(curl -s $HEALTH_CHECK_ENDPOINT)
## Strip out the ON_BOARDING section from the response XML (otherwise we will
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-titan-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-titan-script.sh
index 6993402653..bfa2068f29 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-titan-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdc-titan-script.sh
@@ -1,5 +1,5 @@
## Query the health check API.
-HEALTH_CHECK_ENDPOINT="http://sdc-fe.onap-sdc:8181/sdc1/rest/healthCheck"
+HEALTH_CHECK_ENDPOINT="http://sdc-fe.namespace-placeholder:8181/sdc1/rest/healthCheck"
HEALTH_CHECK_RESPONSE=$(curl -s $HEALTH_CHECK_ENDPOINT)
## Strip out the ON_BOARDING section from the response XML (otherwise we will
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdnc-dbhost-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdnc-dbhost-script.sh
index 6bb07f80eb..8a523ce0b5 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdnc-dbhost-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sdnc-dbhost-script.sh
@@ -1,6 +1,6 @@
-SDNC_DBHOST_POD=$(/consul/config/bin/kubectl -n onap-sdnc get pod | grep -o "sdnc-dbhost-[^[:space:]]*")
+SDNC_DBHOST_POD=$(/consul/config/bin/kubectl -n namespace-placeholder get pod | grep -o "sdnc-dbhost-[^[:space:]]*")
if [ -n "$SDNC_DBHOST_POD" ]; then
- if /consul/config/bin/kubectl -n onap-sdnc exec -it $SDNC_DBHOST_POD -- ./healthcheck.sh |grep -i "mysqld is alive"; then
+ if /consul/config/bin/kubectl -n namespace-placeholder exec -it $SDNC_DBHOST_POD -- ./healthcheck.sh |grep -i "mysqld is alive"; then
echo Success. SDNC DBHost is running. 2>&1
exit 0
else
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/search-data-service-availability.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/search-data-service-availability.sh
index fc0b04a72b..ef96cca6a3 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/search-data-service-availability.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/search-data-service-availability.sh
@@ -1,6 +1,6 @@
#!/bin/sh
-SEARCH_SERVICE_NAME="search-data-service.onap-aai"
+SEARCH_SERVICE_NAME="search-data-service.namespace-placeholder"
SEARCH_SERVICE_PORT=9509
HEALTH_CHECK_INDEX="healthcheck"
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sparky-be-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sparky-be-script.sh
index fe265ba2b0..d7def2dd90 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sparky-be-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/sparky-be-script.sh
@@ -1,8 +1,8 @@
-NAME=$(/consul/config/bin/kubectl -n onap-aai get pod | grep -o "sparky-be[^[:space:]]*")
+NAME=$(/consul/config/bin/kubectl -n namespace-placeholder get pod | grep -o "sparky-be[^[:space:]]*")
if [ -n "$NAME" ]; then
- if /consul/config/bin/kubectl -n onap-aai exec -it $NAME -- ps -efww | grep 'java' | grep 'sparky' > /dev/null; then
+ if /consul/config/bin/kubectl -n namespace-placeholder exec -it $NAME -- ps -efww | grep 'java' | grep 'sparky' > /dev/null; then
echo Success. UI Backend Service process is running. 2>&1
exit 0
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/tabular-db-availability.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/tabular-db-availability.sh
index da9d8a5d82..a3ac9b433b 100755
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/tabular-db-availability.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/tabular-db-availability.sh
@@ -1,6 +1,6 @@
# Query the Hbase service for the cluster status.
-GET_CLUSTER_STATUS_RESPONSE=$(curl -si -X GET -H "Accept: text/xml" http://hbase.onap-aai:8080/status/cluster)
+GET_CLUSTER_STATUS_RESPONSE=$(curl -si -X GET -H "Accept: text/xml" http://hbase.namespace-placeholder:8080/status/cluster)
if [ -z "$GET_CLUSTER_STATUS_RESPONSE" ]; then
echo "Tabular store is unreachable."
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/vid-mariadb-script.sh b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/vid-mariadb-script.sh
index c87686f764..96c7969fac 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/vid-mariadb-script.sh
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/scripts/vid-mariadb-script.sh
@@ -1,7 +1,7 @@
-NAME=$(/consul/config/bin/kubectl -n onap-vid get pod | grep -o "vid-mariadb[^[:space:]]*")
+NAME=$(/consul/config/bin/kubectl -n namespace-placeholder get pod | grep -o "vid-mariadb[^[:space:]]*")
if [ -n "$NAME" ]; then
- if /consul/config/bin/kubectl -n onap-vid exec -it $NAME -- bash -c 'mysqladmin status -u root -p$MYSQL_ROOT_PASSWORD' > /dev/null; then
+ if /consul/config/bin/kubectl -n namespace-placeholder exec -it $NAME -- bash -c 'mysqladmin status -u root -p$MYSQL_ROOT_PASSWORD' > /dev/null; then
echo Success. mariadb process is running. 2>&1
exit 0
else
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdc-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdc-health.json
index 849e199589..374dcdfd62 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdc-health.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdc-health.json
@@ -33,7 +33,7 @@
{
"id": "sdc-catalog-healthcheck",
"name": "SDC Catalog Health Check",
- "http": "https://sdc-be.onap-sdc:8443/asdc/v1/catalog/services",
+ "http": "https://sdc-be.namespace-placeholder:8443/asdc/v1/catalog/services",
"header": {
"Authorization": ["Basic dmlkOktwOGJKNFNYc3pNMFdYbGhhazNlSGxjc2UyZ0F3ODR2YW9HR21KdlV5MlU="],
"X-ECOMP-InstanceID": ["VID"],
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-dgbuilder.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-dgbuilder.json
index 051c4abb3a..6c293995eb 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-dgbuilder.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-dgbuilder.json
@@ -5,7 +5,7 @@
{
"id": "sdnc-dgbuilder",
"name": "SDNC-DGbuilder Health Check",
- "http": "http://sdnc-dgbuilder.onap-sdnc:3000/",
+ "http": "http://sdnc-dgbuilder.namespace-placeholder:3000/",
"method": "HEAD",
"header": {
"Authorization": ["Basic ZGd1c2VyOnRlc3QxMjM="],
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-health.json
index fc26d2e192..a127aa1d27 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-health.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-health.json
@@ -5,7 +5,7 @@
{
"id": "odl-api-healthcheck",
"name": "SDNC API Health Check",
- "http": "http://sdnhost.onap-sdnc:8282/restconf/operations/SLI-API:healthcheck",
+ "http": "http://sdnhost.namespace-placeholder:8282/restconf/operations/SLI-API:healthcheck",
"method": "POST",
"header": {
"Authorization": ["Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=="],
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-portal-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-portal-health.json
index 3ecc1b3f53..07f2bb7b95 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-portal-health.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-portal-health.json
@@ -5,7 +5,7 @@
{
"id": "sdnc-portal",
"name": "SDNC Portal Health Check",
- "http": "http://sdnc-portal.onap-sdnc:8843/login",
+ "http": "http://sdnc-portal.namespace-placeholder:8843/login",
"method": "HEAD",
"header": {
"Cache-Control": ["no-cache"],
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnctldb01-healthcheck.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnctldb01-healthcheck.json
index 321852e805..6b859c0894 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnctldb01-healthcheck.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnctldb01-healthcheck.json
@@ -3,9 +3,9 @@
"name": "Health Check: SDNC-SDN-CTL-DB-01",
"checks": [
{
- "id": "sdnctldb01.onap-sdnc",
+ "id": "sdnctldb01.namespace-placeholder",
"name": "SDNC SDNCTLDB01 Health Check",
- "tcp": "sdnctldb01.onap-sdnc:3306",
+ "tcp": "sdnctldb01.namespace-placeholder:3306",
"interval": "10s",
"timeout": "1s"
}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnctldb02-healthcheck.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnctldb02-healthcheck.json
index 106a2069bb..c6ad3beb93 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnctldb02-healthcheck.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnctldb02-healthcheck.json
@@ -3,9 +3,9 @@
"name": "Health Check: SDNC-SDN-CTL-DB-02",
"checks": [
{
- "id": "sdnctldb02.onap-sdnc",
+ "id": "sdnctldb02.namespace-placeholder",
"name": "SDNC SDNCTLDB02 Health Check",
- "tcp": "sdnctldb02.onap-sdnc:3306",
+ "tcp": "sdnctldb02.namespace-placeholder:3306",
"interval": "10s",
"timeout": "1s"
}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnhost.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnhost.json
index 93d5b1a115..9494810081 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnhost.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/sdnc-sdnhost.json
@@ -5,7 +5,7 @@
{
"id": "sdnc-sdnhost",
"name": "SDNC SDN Host Health Check",
- "http": "http://sdnhost.onap-sdnc:8282/apidoc/explorer/index.html",
+ "http": "http://sdnhost.namespace-placeholder:8282/apidoc/explorer/index.html",
"method": "HEAD",
"header": {
"Authorization": ["Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=="],
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/vfc-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/vfc-health.json
index c7f83a8bfe..abc9e7f6b4 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/vfc-health.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/vfc-health.json
@@ -3,107 +3,107 @@
"name": "Health Check: VFC",
"checks": [
{
- "id": "vfc-catalog.onap-vfc",
+ "id": "vfc-catalog.namespace-placeholder",
"name": "VFC catalog Health Check",
- "tcp": "vfc-catalog.onap-vfc:8806",
+ "tcp": "vfc-catalog.namespace-placeholder:8806",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-emsdriver.onap-vfc",
+ "id": "vfc-emsdriver.namespace-placeholder",
"name": "VFC emsdriver Health Check",
- "tcp": "vfc-emsdriver.onap-vfc:8206",
+ "tcp": "vfc-emsdriver.namespace-placeholder:8206",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-gvnfmdriver.onap-vfc",
+ "id": "vfc-gvnfmdriver.namespace-placeholder",
"name": "VFC gvnfmdriver Health Check",
- "tcp": "vfc-gvnfmdriver.onap-vfc:8484",
+ "tcp": "vfc-gvnfmdriver.namespace-placeholder:8484",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-hwvnfmdriver.onap-vfc",
+ "id": "vfc-hwvnfmdriver.namespace-placeholder",
"name": "VFC hwvnfmdriver Health Check",
- "tcp": "vfc-hwvnfmdriver.onap-vfc:8482",
+ "tcp": "vfc-hwvnfmdriver.namespace-placeholder:8482",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-jujudriver.onap-vfc",
+ "id": "vfc-jujudriver.namespace-placeholder",
"name": "VFC jujudriver Health Check",
- "tcp": "vfc-jujudriver.onap-vfc:8483",
+ "tcp": "vfc-jujudriver.namespace-placeholder:8483",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-nokiavnfmdriver.onap-vfc",
+ "id": "vfc-nokiavnfmdriver.namespace-placeholder",
"name": "VFC nokiavnfmdriver Health Check",
- "tcp": "vfc-nokiavnfmdriver.onap-vfc:8486",
+ "tcp": "vfc-nokiavnfmdriver.namespace-placeholder:8486",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-nslcm.onap-vfc",
+ "id": "vfc-nslcm.namespace-placeholder",
"name": "VFC nslcm Health Check",
- "tcp": "vfc-nslcm.onap-vfc:8403",
+ "tcp": "vfc-nslcm.namespace-placeholder:8403",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-resmgr.onap-vfc",
+ "id": "vfc-resmgr.namespace-placeholder",
"name": "VFC resmgr Health Check",
- "tcp": "vfc-resmgr.onap-vfc:8480",
+ "tcp": "vfc-resmgr.namespace-placeholder:8480",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-vnflcm.onap-vfc",
+ "id": "vfc-vnflcm.namespace-placeholder",
"name": "VFC vnflcm Health Check",
- "tcp": "vfc-vnflcm.onap-vfc:8801",
+ "tcp": "vfc-vnflcm.namespace-placeholder:8801",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-vnfmgr.onap-vfc",
+ "id": "vfc-vnfmgr.namespace-placeholder",
"name": "VFC vnfmgr Health Check",
- "tcp": "vfc-vnfmgr.onap-vfc:8803",
+ "tcp": "vfc-vnfmgr.namespace-placeholder:8803",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-vnfres.onap-vfc",
+ "id": "vfc-vnfres.namespace-placeholder",
"name": "VFC vnfres Health Check",
- "tcp": "vfc-vnfres.onap-vfc:8802",
+ "tcp": "vfc-vnfres.namespace-placeholder:8802",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-workflow.onap-vfc",
+ "id": "vfc-workflow.namespace-placeholder",
"name": "VFC workflow Health Check",
- "tcp": "vfc-workflow.onap-vfc:10550",
+ "tcp": "vfc-workflow.namespace-placeholder:10550",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-workflowengineactiviti.onap-vfc",
+ "id": "vfc-workflowengineactiviti.namespace-placeholder",
"name": "VFC workflow-engine Health Check",
- "tcp": "vfc-workflowengineactiviti.onap-vfc:8080",
+ "tcp": "vfc-workflowengineactiviti.namespace-placeholder:8080",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-ztesdncdriver.onap-vfc",
+ "id": "vfc-ztesdncdriver.namespace-placeholder",
"name": "VFC ztesdncdriver Health Check",
- "tcp": "vfc-ztesdncdriver.onap-vfc:8411",
+ "tcp": "vfc-ztesdncdriver.namespace-placeholder:8411",
"interval": "15s",
"timeout": "1s"
},
{
- "id": "vfc-ztevnfmdriver.onap-vfc",
+ "id": "vfc-ztevnfmdriver.namespace-placeholder",
"name": "VFC ztevnfmdriver Health Check",
- "tcp": "vfc-ztevnfmdriver.onap-vfc:8410",
+ "tcp": "vfc-ztevnfmdriver.namespace-placeholder:8410",
"interval": "15s",
"timeout": "1s"
}
diff --git a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/vid-health.json b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/vid-health.json
index 8a2a4dca9b..39d71c4e5e 100644
--- a/kubernetes/config/docker/init/src/config/consul/consul-agent-config/vid-health.json
+++ b/kubernetes/config/docker/init/src/config/consul/consul-agent-config/vid-health.json
@@ -5,7 +5,7 @@
{
"id": "vid-server",
"name": "VID Server Health Check",
- "http": "http://vid-server.onap-vid:8080/vid/healthCheck",
+ "http": "http://vid-server.namespace-placeholder:8080/vid/healthCheck",
"method": "GET",
"header": {
"Authorization": ["Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=="],
diff --git a/kubernetes/config/docker/init/src/config/dcae/message-router/dmaap/MsgRtrApi.properties b/kubernetes/config/docker/init/src/config/dcae/message-router/dmaap/MsgRtrApi.properties
index 8c6f50dc67..2f37755d88 100644
--- a/kubernetes/config/docker/init/src/config/dcae/message-router/dmaap/MsgRtrApi.properties
+++ b/kubernetes/config/docker/init/src/config/dcae/message-router/dmaap/MsgRtrApi.properties
@@ -15,9 +15,9 @@
##
## Both Cambria and Kafka make use of Zookeeper.
##
-config.zk.servers=zookeeper.onap-dcae:2181
+config.zk.servers=zookeeper.namespace-placeholder:2181
#config.zk.servers=172.17.0.1:2181
-#dmaap.onap-dcae:2181
+#dmaap.namespace-placeholder:2181
#10.208.128.229:2181
#config.zk.root=/fe3c/cambria/config
@@ -30,9 +30,9 @@ config.zk.servers=zookeeper.onap-dcae:2181
## configurations (after removing "kafka.")
## if you want to change request.required.acks it can take this one value
#kafka.metadata.broker.list=localhost:9092,localhost:9093
-kafka.metadata.broker.list=kafka.onap-dcae:9092
+kafka.metadata.broker.list=kafka.namespace-placeholder:9092
#kafka.metadata.broker.list=172.17.0.1:9092
-#dmaap.onap-dcae:9092
+#dmaap.namespace-placeholder:9092
#10.208.128.229:9092
##kafka.request.required.acks=-1
#kafka.client.zookeeper=${config.zk.servers}
diff --git a/kubernetes/config/docker/init/src/config/log/filebeat/log4j/filebeat.yml b/kubernetes/config/docker/init/src/config/log/filebeat/log4j/filebeat.yml
index 79c9a08a79..f15c2bb8e1 100644
--- a/kubernetes/config/docker/init/src/config/log/filebeat/log4j/filebeat.yml
+++ b/kubernetes/config/docker/init/src/config/log/filebeat/log4j/filebeat.yml
@@ -29,7 +29,7 @@ output.logstash:
#List of logstash server ip addresses with port number.
#But, in our case, this will be the loadbalancer IP address.
#For the below property to work the loadbalancer or logstash should expose 5044 port to listen the filebeat events or port in the property should be changed appropriately.
- hosts: ["logstash.onap-log:5044"]
+ hosts: ["logstash.namespace-placeholder:5044"]
#If enable will do load balancing among availabe Logstash, automatically.
loadbalance: true
diff --git a/kubernetes/config/docker/init/src/config/log/filebeat/logback/filebeat.yml b/kubernetes/config/docker/init/src/config/log/filebeat/logback/filebeat.yml
index f316b866af..7b78c9ba46 100644
--- a/kubernetes/config/docker/init/src/config/log/filebeat/logback/filebeat.yml
+++ b/kubernetes/config/docker/init/src/config/log/filebeat/logback/filebeat.yml
@@ -21,7 +21,7 @@ output.logstash:
#List of logstash server ip addresses with port number.
#But, in our case, this will be the loadbalancer IP address.
#For the below property to work the loadbalancer or logstash should expose 5044 port to listen the filebeat events or port in the property should be changed appropriately.
- hosts: ["logstash.onap-log:5044"]
+ hosts: ["logstash.namespace-placeholder:5044"]
#If enable will do load balancing among availabe Logstash, automatically.
loadbalance: true
diff --git a/kubernetes/config/docker/init/src/config/message-router/dmaap/MsgRtrApi.properties b/kubernetes/config/docker/init/src/config/message-router/dmaap/MsgRtrApi.properties
index d912179121..db8d911248 100755
--- a/kubernetes/config/docker/init/src/config/message-router/dmaap/MsgRtrApi.properties
+++ b/kubernetes/config/docker/init/src/config/message-router/dmaap/MsgRtrApi.properties
@@ -15,9 +15,9 @@
##
## Both Cambria and Kafka make use of Zookeeper.
##
-config.zk.servers=zookeeper.onap-message-router:2181
+config.zk.servers=zookeeper.namespace-placeholder:2181
#config.zk.servers=172.17.0.1:2181
-#dmaap.onap-message-router:2181
+#dmaap.namespace-placeholder:2181
#10.208.128.229:2181
#config.zk.root=/fe3c/cambria/config
@@ -30,9 +30,9 @@ config.zk.servers=zookeeper.onap-message-router:2181
## configurations (after removing "kafka.")
## if you want to change request.required.acks it can take this one value
#kafka.metadata.broker.list=localhost:9092,localhost:9093
-kafka.metadata.broker.list=global-kafka.onap-message-router:9092
+kafka.metadata.broker.list=global-kafka.namespace-placeholder:9092
#kafka.metadata.broker.list=172.17.0.1:9092
-#dmaap.onap-message-router:9092
+#dmaap.namespace-placeholder:9092
#10.208.128.229:9092
##kafka.request.required.acks=-1
#kafka.client.zookeeper=${config.zk.servers}
diff --git a/kubernetes/config/docker/init/src/config/robot/eteshare/config/vm_properties.py b/kubernetes/config/docker/init/src/config/robot/eteshare/config/vm_properties.py
index 387a14451d..1b96b3a001 100755
--- a/kubernetes/config/docker/init/src/config/robot/eteshare/config/vm_properties.py
+++ b/kubernetes/config/docker/init/src/config/robot/eteshare/config/vm_properties.py
@@ -1,77 +1,77 @@
# File generated from /opt/config
#
-GLOBAL_INJECTED_AAI1_IP_ADDR = "aai-service.onap-aai"
+GLOBAL_INJECTED_AAI1_IP_ADDR = "aai-service.namespace-placeholder"
GLOBAL_INJECTED_AAI2_IP_ADDR = "N/A"
-GLOBAL_INJECTED_APPC_IP_ADDR = "sdnhost.onap-appc"
+GLOBAL_INJECTED_APPC_IP_ADDR = "appc-sdnhost.namespace-placeholder"
GLOBAL_INJECTED_ARTIFACTS_VERSION = "1.1.0-SNAPSHOT"
-GLOBAL_INJECTED_CLAMP_IP_ADDR = "clamp.onap-clamp"
+GLOBAL_INJECTED_CLAMP_IP_ADDR = "clamp.namespace-placeholder"
GLOBAL_INJECTED_CLOUD_ENV = "openstack"
-GLOBAL_INJECTED_DCAE_IP_ADDR = "dcae-controller.onap-dcae"
+GLOBAL_INJECTED_DCAE_IP_ADDR = "dcae-controller.namespace-placeholder"
GLOBAL_INJECTED_DNS_IP_ADDR = "10.0.100.1"
GLOBAL_INJECTED_DOCKER_VERSION = "1.1-STAGING-latest"
#GLOBAL_INJECTED_EXTERNAL_DNS = "N/A"
GLOBAL_INJECTED_GERRIT_BRANCH = "master"
GLOBAL_INJECTED_KEYSTONE = "OPENSTACK_KEYSTONE_IP_HERE"
-GLOBAL_INJECTED_MR_IP_ADDR = "dmaap.onap-message-router"
-GLOBAL_INJECTED_MSO_IP_ADDR = "mso.onap-mso"
+GLOBAL_INJECTED_MR_IP_ADDR = "dmaap.namespace-placeholder"
+GLOBAL_INJECTED_MSO_IP_ADDR = "mso.namespace-placeholder"
GLOBAL_INJECTED_NETWORK = "OPENSTACK_NETWORK_ID_WITH_ONAP_ROUTE_HERE"
GLOBAL_INJECTED_NEXUS_DOCKER_REPO = "nexus3.onap.org:10001"
GLOBAL_INJECTED_NEXUS_PASSWORD = "docker"
GLOBAL_INJECTED_NEXUS_REPO = "https://nexus.onap.org/content/sites/raw"
GLOBAL_INJECTED_NEXUS_USERNAME = "docker"
-GLOBAL_INJECTED_OPENO_IP_ADDR = "msb-iag.onap-msb"
+GLOBAL_INJECTED_OPENO_IP_ADDR = "msb-iag.namespace-placeholder"
GLOBAL_INJECTED_OPENSTACK_PASSWORD = "OPENSTACK_PASSWORD_HERE"
GLOBAL_INJECTED_OPENSTACK_TENANT_ID = "OPENSTACK_TENANT_ID_HERE"
GLOBAL_INJECTED_OPENSTACK_USERNAME = "OPENSTACK_USERNAME_HERE"
-GLOBAL_INJECTED_POLICY_IP_ADDR = "pypdp.onap-policy"
-GLOBAL_INJECTED_POLICY_HEALTHCHECK_IP_ADDR = "drools.onap-policy"
-GLOBAL_INJECTED_PORTAL_IP_ADDR = "portalapps.onap-portal"
+GLOBAL_INJECTED_POLICY_IP_ADDR = "pypdp.namespace-placeholder"
+GLOBAL_INJECTED_POLICY_HEALTHCHECK_IP_ADDR = "drools.namespace-placeholder"
+GLOBAL_INJECTED_PORTAL_IP_ADDR = "portalapps.namespace-placeholder"
GLOBAL_INJECTED_REGION = "OPENSTACK_REGION_HERE"
GLOBAL_INJECTED_REMOTE_REPO = "http://gerrit.onap.org/r/testsuite/properties.git"
-GLOBAL_INJECTED_SDC_IP_ADDR = "sdc-be.onap-sdc"
-GLOBAL_INJECTED_SDC_FE_IP_ADDR = "sdc-fe.onap-sdc"
-GLOBAL_INJECTED_SDC_BE_IP_ADDR = "sdc-be.onap-sdc"
-GLOBAL_INJECTED_SDNC_IP_ADDR = "sdnhost.onap-sdnc"
-GLOBAL_INJECTED_SDNC_PORTAL_IP_ADDR = "sdnc-portal.onap-sdnc"
-GLOBAL_INJECTED_SO_IP_ADDR = "mso.onap-mso"
-GLOBAL_INJECTED_VID_IP_ADDR = "vid-server.onap-vid"
+GLOBAL_INJECTED_SDC_IP_ADDR = "sdc-be.namespace-placeholder"
+GLOBAL_INJECTED_SDC_FE_IP_ADDR = "sdc-fe.namespace-placeholder"
+GLOBAL_INJECTED_SDC_BE_IP_ADDR = "sdc-be.namespace-placeholder"
+GLOBAL_INJECTED_SDNC_IP_ADDR = "sdnhost.namespace-placeholder"
+GLOBAL_INJECTED_SDNC_PORTAL_IP_ADDR = "sdnc-portal.namespace-placeholder"
+GLOBAL_INJECTED_SO_IP_ADDR = "mso.namespace-placeholder"
+GLOBAL_INJECTED_VID_IP_ADDR = "vid-server.namespace-placeholder"
GLOBAL_INJECTED_VM_FLAVOR = "OPENSTACK_FLAVOUR_MEDIUM_HERE"
GLOBAL_INJECTED_VM_IMAGE_NAME = "UBUNTU_14_IMAGE_NAME_HERE"
GLOBAL_INJECTED_PUBLIC_NET_ID = "OPENSTACK_PUBLIC_NET_ID_HERE"
GLOBAL_INJECTED_PROPERTIES = {
- "GLOBAL_INJECTED_AAI1_IP_ADDR" : "aai-service.onap-aai",
- "GLOBAL_INJECTED_APPC_IP_ADDR" : "sdnhost.onap-appc",
+ "GLOBAL_INJECTED_AAI1_IP_ADDR" : "aai-service.namespace-placeholder",
+ "GLOBAL_INJECTED_APPC_IP_ADDR" : "appc-sdnhost.namespace-placeholder",
"GLOBAL_INJECTED_ARTIFACTS_VERSION" : "1.1.0-SNAPSHOT",
- "GLOBAL_INJECTED_CLAMP_IP_ADDR" : "clamp.onap-clamp",
+ "GLOBAL_INJECTED_CLAMP_IP_ADDR" : "clamp.namespace-placeholder",
"GLOBAL_INJECTED_CLOUD_ENV" : "openstack",
- "GLOBAL_INJECTED_DCAE_IP_ADDR" : "dcae-controller.onap-dcae",
+ "GLOBAL_INJECTED_DCAE_IP_ADDR" : "dcae-controller.namespace-placeholder",
"GLOBAL_INJECTED_DNS_IP_ADDR" : "10.0.100.1",
"GLOBAL_INJECTED_DOCKER_VERSION" : "1.1-STAGING-latest",
"GLOBAL_INJECTED_GERRIT_BRANCH" : "master",
"GLOBAL_INJECTED_KEYSTONE" : "OPENSTACK_KEYSTONE_IP_HERE",
- "GLOBAL_INJECTED_MR_IP_ADDR" : "dmaap.onap-message-router",
- "GLOBAL_INJECTED_MSO_IP_ADDR" : "mso.onap-mso",
+ "GLOBAL_INJECTED_MR_IP_ADDR" : "dmaap.namespace-placeholder",
+ "GLOBAL_INJECTED_MSO_IP_ADDR" : "mso.namespace-placeholder",
"GLOBAL_INJECTED_NETWORK" : "OPENSTACK_NETWORK_ID_WITH_ONAP_ROUTE_HERE",
"GLOBAL_INJECTED_NEXUS_DOCKER_REPO" : "nexus3.onap.org:10001",
"GLOBAL_INJECTED_NEXUS_PASSWORD" : "docker",
"GLOBAL_INJECTED_NEXUS_REPO" : "https://nexus.onap.org/content/sites/raw",
"GLOBAL_INJECTED_NEXUS_USERNAME" : "docker",
- "GLOBAL_INJECTED_OPENO_IP_ADDR" : "msb-iag.onap-msb",
+ "GLOBAL_INJECTED_OPENO_IP_ADDR" : "msb-iag.namespace-placeholder",
"GLOBAL_INJECTED_OPENSTACK_PASSWORD" : "OPENSTACK_PASSWORD_HERE",
"GLOBAL_INJECTED_OPENSTACK_TENANT_ID" : "OPENSTACK_TENANT_ID_HERE",
"GLOBAL_INJECTED_OPENSTACK_USERNAME" : "OPENSTACK_USERNAME_HERE",
- "GLOBAL_INJECTED_POLICY_IP_ADDR" : "pypdp.onap-policy",
- "GLOBAL_INJECTED_POLICY_HEALTHCHECK_IP_ADDR" : "drools.onap-policy",
- "GLOBAL_INJECTED_PORTAL_IP_ADDR" : "portalapps.onap-portal",
+ "GLOBAL_INJECTED_POLICY_IP_ADDR" : "pypdp.namespace-placeholder",
+ "GLOBAL_INJECTED_POLICY_HEALTHCHECK_IP_ADDR" : "drools.namespace-placeholder",
+ "GLOBAL_INJECTED_PORTAL_IP_ADDR" : "portalapps.namespace-placeholder",
"GLOBAL_INJECTED_REGION" : "OPENSTACK_REGION_HERE",
"GLOBAL_INJECTED_REMOTE_REPO" : "http://gerrit.onap.org/r/testsuite/properties.git",
- "GLOBAL_INJECTED_SDC_FE_IP_ADDR" : "sdc-fe.onap-sdc",
- "GLOBAL_INJECTED_SDC_BE_IP_ADDR" : "sdc-be.onap-sdc",
- "GLOBAL_INJECTED_SDNC_IP_ADDR" : "sdnhost.onap-sdnc",
- "GLOBAL_INJECTED_SDNC_PORTAL_IP_ADDR" : "sdnc-portal.onap-sdnc",
- "GLOBAL_INJECTED_SO_IP_ADDR" : "mso.onap-mso",
- "GLOBAL_INJECTED_VID_IP_ADDR" : "vid-server.onap-vid",
+ "GLOBAL_INJECTED_SDC_FE_IP_ADDR" : "sdc-fe.namespace-placeholder",
+ "GLOBAL_INJECTED_SDC_BE_IP_ADDR" : "sdc-be.namespace-placeholder",
+ "GLOBAL_INJECTED_SDNC_IP_ADDR" : "sdnhost.namespace-placeholder",
+ "GLOBAL_INJECTED_SDNC_PORTAL_IP_ADDR" : "sdnc-portal.namespace-placeholder",
+ "GLOBAL_INJECTED_SO_IP_ADDR" : "mso.namespace-placeholder",
+ "GLOBAL_INJECTED_VID_IP_ADDR" : "vid-server.namespace-placeholder",
"GLOBAL_INJECTED_VM_FLAVOR" : "OPENSTACK_FLAVOUR_MEDIUM_HERE",
"GLOBAL_INJECTED_VM_IMAGE_NAME" : "UBUNTU_14_IMAGE_NAME_HERE",
"GLOBAL_INJECTED_PUBLIC_NET_ID" : "OPENSTACK_PUBLIC_NET_ID_HERE"