aboutsummaryrefslogtreecommitdiffstats
path: root/docker_files
diff options
context:
space:
mode:
authorUbuntu <lji@research.att.com>2017-02-25 00:26:06 +0000
committerUbuntu <lji@research.att.com>2017-02-25 00:26:06 +0000
commit87a1507cbb443ef3b202e066353ac41bf5fce00c (patch)
tree2dbfa21b80aa090093876906973d5783d0a9b02c /docker_files
parent2a392869cb8013245ad4a764363f8fb8e9654de9 (diff)
removal of names and no longer needed files
Change-Id: Iff05cf50a515a008df23c3210a92c038e9862bb0 Signed-off-by: Ubuntu <lji@research.att.com>
Diffstat (limited to 'docker_files')
-rw-r--r--docker_files/MsgRtrApi.properties140
-rwxr-xr-xdocker_files/__MsgRtrApi.properties16
-rw-r--r--docker_files/cadi.properties32
-rw-r--r--docker_files/docker-compose.yml50
4 files changed, 9 insertions, 229 deletions
diff --git a/docker_files/MsgRtrApi.properties b/docker_files/MsgRtrApi.properties
deleted file mode 100644
index b54b483..0000000
--- a/docker_files/MsgRtrApi.properties
+++ /dev/null
@@ -1,140 +0,0 @@
-###############################################################################
-##
-## Cambria API Server config
-##
-## - Default values are shown as commented settings.
-##
-
-###############################################################################
-##
-## HTTP service
-##
-## - 3904 is standard as of 7/29/14.
-#
-## Zookeeper Connection
-##
-## Both Cambria and Kafka make use of Zookeeper.
-##
-config.zk.servers=zookeeper:2181
-#config.zk.servers=172.17.0.1:2181
-#10.0.11.1:2181
-#10.208.128.229:2181
-#config.zk.root=/fe3c/cambria/config
-
-
-###############################################################################
-##
-## Kafka Connection
-##
-## Items below are passed through to Kafka's producer and consumer
-## configurations (after removing "kafka.")
-## if you want to change request.required.acks it can take this one value
-#kafka.metadata.broker.list=localhost:9092,localhost:9093
-kafka.metadata.broker.list=kafka:9092
-#kafka.metadata.broker.list=172.17.0.1:9092
-#10.0.11.1:9092
-#10.208.128.229:9092
-##kafka.request.required.acks=-1
-#kafka.client.zookeeper=${config.zk.servers}
-consumer.timeout.ms=100
-zookeeper.connection.timeout.ms=6000
-zookeeper.session.timeout.ms=6000
-zookeeper.sync.time.ms=2000
-auto.commit.interval.ms=1000
-fetch.message.max.bytes =1000000
-auto.commit.enable=false
-
-
-###############################################################################
-##
-## Secured Config
-##
-## Some data stored in the config system is sensitive -- API keys and secrets,
-## for example. to protect it, we use an encryption layer for this section
-## of the config.
-##
-## The key is a base64 encode AES key. This must be created/configured for
-## each installation.
-#cambria.secureConfig.key=
-##
-## The initialization vector is a 16 byte value specific to the secured store.
-## This must be created/configured for each installation.
-#cambria.secureConfig.iv=
-
-## Southfield Sandbox
-cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
-cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
-authentication.adminSecret=fe3cCompound
-#cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
-#cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
-
-
-###############################################################################
-##
-## Consumer Caching
-##
-## Kafka expects live connections from the consumer to the broker, which
-## obviously doesn't work over connectionless HTTP requests. The Cambria
-## server proxies HTTP requests into Kafka consumer sessions that are kept
-## around for later re-use. Not doing so is costly for setup per request,
-## which would substantially impact a high volume consumer's performance.
-##
-## This complicates Cambria server failover, because we often need server
-## A to close its connection before server B brings up the replacement.
-##
-
-## The consumer cache is normally enabled.
-#cambria.consumer.cache.enabled=true
-
-## Cached consumers are cleaned up after a period of disuse. The server inspects
-## consumers every sweepFreqSeconds and will clean up any connections that are
-## dormant for touchFreqMs.
-#cambria.consumer.cache.sweepFreqSeconds=15
-#cambria.consumer.cache.touchFreqMs=120000
-
-## The cache is managed through ZK. The default value for the ZK connection
-## string is the same as config.zk.servers.
-#cambria.consumer.cache.zkConnect=${config.zk.servers}
-
-##
-## Shared cache information is associated with this node's name. The default
-## name is the hostname plus the HTTP service port this host runs on. (The
-## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
-## which is not always adequate.) You can set this value explicitly here.
-##
-#cambria.api.node.identifier=<use-something-unique-to-this-instance>
-
-###############################################################################
-##
-## Metrics Reporting
-##
-## This server can report its metrics periodically on a topic.
-##
-#metrics.send.cambria.enabled=true
-#metrics.send.cambria.topic=cambria.apinode.metrics #msgrtr.apinode.metrics.dmaap
-#metrics.send.cambria.sendEverySeconds=60
-
-cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
-
-##############################################################################
-#100mb
-maxcontentlength=10000
-
-
-##############################################################################
-#AAF Properties
-msgRtr.namespace.aaf=com.att.dcae.dmaap.mtnje2.mr.topic
-msgRtr.topicfactory.aaf=com.att.dcae.dmaap.topicFactory|:com.att.dcae.dmaap.mtnje2.mr.topic:
-enforced.topic.name.AAF=com.att
-forceAAF=false
-transidUEBtopicreqd=false
-defaultNSforUEB=com.att.dmaap.mr.ueb
-##############################################################################
-#Mirror Maker Agent
-msgRtr.mirrormakeradmin.aaf=com.att.dmaap.mr.dev.mirrormaker|*|admin
-msgRtr.mirrormakeruser.aaf=com.att.dmaap.mr.dev.mirrormaker|*|user
-msgRtr.mirrormakeruser.aaf.create=com.att.dmaap.mr.dev.topicFactory|:com.att.dmaap.mr.dev.topic:
-msgRtr.mirrormaker.timeout=15000
-msgRtr.mirrormaker.topic=com.att.dmaap.mr.prod.mm.agent
-msgRtr.mirrormaker.consumergroup=mmagentserver
-msgRtr.mirrormaker.consumerid=1
diff --git a/docker_files/__MsgRtrApi.properties b/docker_files/__MsgRtrApi.properties
index 5c89ea9..47293a7 100755
--- a/docker_files/__MsgRtrApi.properties
+++ b/docker_files/__MsgRtrApi.properties
@@ -123,18 +123,18 @@ maxcontentlength=10000
##############################################################################
#AAF Properties
-msgRtr.namespace.aaf=com.att.dcae.dmaap.mtnje2.mr.topic
-msgRtr.topicfactory.aaf=com.att.dcae.dmaap.topicFactory|:com.att.dcae.dmaap.mtnje2.mr.topic:
-enforced.topic.name.AAF=com.att
+msgRtr.namespace.aaf=org.openecomp.dcae.dmaap.mtnje2.mr.topic
+msgRtr.topicfactory.aaf=org.openecomp.dcae.dmaap.topicFactory|:org.openecomp.dcae.dmaap.mtnje2.mr.topic:
+enforced.topic.name.AAF=org.openecomp
forceAAF=false
transidUEBtopicreqd=false
-defaultNSforUEB=com.att.dmaap.mr.ueb
+defaultNSforUEB=org.openecomp.dmaap.mr.ueb
##############################################################################
#Mirror Maker Agent
-msgRtr.mirrormakeradmin.aaf=com.att.dmaap.mr.dev.mirrormaker|*|admin
-msgRtr.mirrormakeruser.aaf=com.att.dmaap.mr.dev.mirrormaker|*|user
-msgRtr.mirrormakeruser.aaf.create=com.att.dmaap.mr.dev.topicFactory|:com.att.dmaap.mr.dev.topic:
+msgRtr.mirrormakeradmin.aaf=org.openecomp.dmaap.mr.dev.mirrormaker|*|admin
+msgRtr.mirrormakeruser.aaf=org.openecomp.dmaap.mr.dev.mirrormaker|*|user
+msgRtr.mirrormakeruser.aaf.create=org.openecomp.dmaap.mr.dev.topicFactory|:org.openecomp.dmaap.mr.dev.topic:
msgRtr.mirrormaker.timeout=15000
-msgRtr.mirrormaker.topic=com.att.dmaap.mr.prod.mm.agent
+msgRtr.mirrormaker.topic=org.openecomp.dmaap.mr.prod.mm.agent
msgRtr.mirrormaker.consumergroup=mmagentserver
msgRtr.mirrormaker.consumerid=1
diff --git a/docker_files/cadi.properties b/docker_files/cadi.properties
index 9c95e9c..ee543f8 100644
--- a/docker_files/cadi.properties
+++ b/docker_files/cadi.properties
@@ -1,24 +1,4 @@
-#This properties file is used for defining AAF properties related to the CADI framework. This file is used for running AAF framework
-#using the ajsc-cadi plugin. For more information on the ajsc-cadi plugin, please goto wiki link:
-#http://wiki.web.att.com/display/ajsc/CADI-AJSC-Plugin
-#For more information on utilizing the AAF framework, please goto wiki link:
-#AAF wiki link: http://wiki.web.att.com/display/aaf/AAF+Documentation
-
-#In order to test functionality of cadi-ajsc-plugin locally (pertaining to GLO (AT&T Global Login)), you will need an AT&T cross
-#domain cookie. Cadi "should" find your hostname for you. However, we have seen some situations where this fails. A Local testing
-#modification can include modifying your hosts file so that you can use "mywebserver.att.com" for your localhost in order
-#to test/verify GLO functionality locally. If you are on a Windows machine, you will already have a machine name associated with
-#it that will utilize an AT&T domain such as "sbc.com". You may need to add your AT&T domain to this as a comma separated list depending
-#upon your particular machine domain. This property is commented out as cadi SHOULD find your machine name. With version 1.2.1 of cadi,
-#it appears to resolve Mac machine names as well, now. But, this can be somewhat inconsistent depending on your specific working envrironment.
-#hostname=mywebserver.att.com
-
-#Setting csp_domain to PROD will allow for testing using your attuid and password through GLO.
-#csp_domain=PROD
-#csp_devl_localhost=true
-
-basic_realm=csp.att.com
-#basic_realm=aaf.att.com
+basic_realm=openecomp.org
basic_warn=TRUE
cadi_loglevel=DEBUG
@@ -27,24 +7,14 @@ cadi_keyfile=/appl/dmaapMR1/etc/keyfile
# Configure AAF
aaf_url=https://DME2RESOLVE/service=com.att.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=BAU_SE
-# lji: according to this web page: https://wiki.web.att.com/display/soacloud/DME2+-+URI+Patterns, DME2 supports a direct URL mode:
-# Format 3: DIRECT: http://host:port/service=?/version=?/envContext=?/routeOffer=?
-# Point to a specific host:port and call the service there.No lookup/resolution by DME2 client.
-#aaf_url=http://10.0.17.1:8101/service=com.att.authz.AuthorizationService/version=2.0/envContext=TEST/routeOffer=BAU_SE
-
-#AJSC - MECHID
aaf_id=dgl@openecomp.org
aaf_password=enc:f2u5br1mh29M02-
-
-#aaf_id=m93659@ajsc.att.com
-#aaf_password=enc:NP_WI3mH4YPdWSrY4iLcbhRc4mQY
aaf_timeout=5000
aaf_clean_interval=1200000
aaf_user_expires=60000
aaf_high_count=1000000
-# Some Libs need System Property Sets (i.e. AT&T Discovery)
# The following properties are being set by the AJSC Container and should NOT need to be set here.
AFT_LATITUDE=33.823589
AFT_LONGITUDE=-84.366982
diff --git a/docker_files/docker-compose.yml b/docker_files/docker-compose.yml
deleted file mode 100644
index dddf153..0000000
--- a/docker_files/docker-compose.yml
+++ /dev/null
@@ -1,50 +0,0 @@
-version: '2'
-services:
- zookeeper:
- image: wurstmeister/zookeeper
- ports:
- - "2181:2181"
- volumes:
- - ./data-zookeeper:/opt/zookeeper-3.4.9/data
- logging:
- driver: "json-file"
- options:
- max-size: "30m"
- max-file: "5"
-
- kafka:
- build: .
- ports:
- - "9092:9092"
- environment:
- #KAFKA_ADVERTISED_HOST_NAME: 172.17.0.1
- KAFKA_ADVERTISED_HOST_NAME: kafka
- KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
- KAFKA_BROKER_ID: 1
- volumes:
- - /var/run/docker.sock:/var/run/docker.sock
- - ./data-kafka:/kafka
- - ./start-kafka.sh:/start-kafka.sh
- logging:
- driver: "json-file"
- options:
- max-size: "30m"
- max-file: "5"
-
- dmaap:
- image: attos/dmaap
- ports:
- - "3904:3904"
- - "3905:3905"
- volumes:
- - ./MsgRtrApi.properties:/appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
- - ./cadi.properties:/appl/dmaapMR1/etc/cadi.properties
- - ./mykey:/appl/dmaapMR1/etc/keyfile
- depends_on:
- - zookeeper
- - kafka
- logging:
- driver: "json-file"
- options:
- max-size: "30m"
- max-file: "5"