aboutsummaryrefslogtreecommitdiffstats
path: root/bootstrap/vagrant-onap/lib
diff options
context:
space:
mode:
Diffstat (limited to 'bootstrap/vagrant-onap/lib')
-rwxr-xr-xbootstrap/vagrant-onap/lib/_composed_functions35
-rwxr-xr-xbootstrap/vagrant-onap/lib/_onap_functions107
-rwxr-xr-xbootstrap/vagrant-onap/lib/aai147
-rwxr-xr-xbootstrap/vagrant-onap/lib/appc43
-rwxr-xr-xbootstrap/vagrant-onap/lib/ccsdk36
-rwxr-xr-xbootstrap/vagrant-onap/lib/commons119
-rwxr-xr-xbootstrap/vagrant-onap/lib/config/env-vars79
-rwxr-xr-xbootstrap/vagrant-onap/lib/dcae92
-rw-r--r--bootstrap/vagrant-onap/lib/files/aai.pem102
-rw-r--r--bootstrap/vagrant-onap/lib/files/all-in-one585
-rw-r--r--bootstrap/vagrant-onap/lib/files/globals.yml2
-rw-r--r--bootstrap/vagrant-onap/lib/files/haproxy.cfg120
-rw-r--r--bootstrap/vagrant-onap/lib/files/kolla-build.conf5
-rw-r--r--bootstrap/vagrant-onap/lib/files/kubectl_config_generator.py40
-rw-r--r--bootstrap/vagrant-onap/lib/files/passwords.yml216
-rw-r--r--bootstrap/vagrant-onap/lib/files/settings.xml369
-rwxr-xr-xbootstrap/vagrant-onap/lib/functions450
-rwxr-xr-xbootstrap/vagrant-onap/lib/mr31
-rwxr-xr-xbootstrap/vagrant-onap/lib/msb50
-rwxr-xr-xbootstrap/vagrant-onap/lib/mso94
-rwxr-xr-xbootstrap/vagrant-onap/lib/multicloud51
-rwxr-xr-xbootstrap/vagrant-onap/lib/oom207
-rwxr-xr-xbootstrap/vagrant-onap/lib/openstack75
-rwxr-xr-xbootstrap/vagrant-onap/lib/policy53
-rwxr-xr-xbootstrap/vagrant-onap/lib/portal98
-rwxr-xr-xbootstrap/vagrant-onap/lib/robot45
-rwxr-xr-xbootstrap/vagrant-onap/lib/sdc88
-rwxr-xr-xbootstrap/vagrant-onap/lib/sdnc64
-rwxr-xr-xbootstrap/vagrant-onap/lib/vfc96
-rwxr-xr-xbootstrap/vagrant-onap/lib/vid49
-rwxr-xr-xbootstrap/vagrant-onap/lib/vnfsdk47
-rwxr-xr-xbootstrap/vagrant-onap/lib/vvp40
32 files changed, 0 insertions, 3635 deletions
diff --git a/bootstrap/vagrant-onap/lib/_composed_functions b/bootstrap/vagrant-onap/lib/_composed_functions
deleted file mode 100755
index 9f2d0a1d8..000000000
--- a/bootstrap/vagrant-onap/lib/_composed_functions
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/bash
-
-# build_docker_image() - Build Docker container image from source code
-function build_docker_image {
- local src_folder=$1
- local profile=$2
- install_maven
- install_docker
- pushd $src_folder
-
- # Cleanup external repo
- sed -i 's|${docker.push.registry}/||g' pom.xml
- local mvn_docker="mvn clean package docker:build"
- if [ $profile ]; then
- mvn_docker+=" -P $profile"
- fi
- if [ $http_proxy ]; then
- if ! grep -ql "docker.buildArg.http_proxy" pom.xml ; then
- mvn_docker+=" -Ddocker.buildArg.http_proxy=$http_proxy"
- fi
- if ! grep -ql "docker.buildArg.HTTP_PROXY" pom.xml ; then
- mvn_docker+=" -Ddocker.buildArg.HTTP_PROXY=$http_proxy"
- fi
- fi
- if [ $https_proxy ]; then
- if ! grep -ql "docker.buildArg.https_proxy" pom.xml ; then
- mvn_docker+=" -Ddocker.buildArg.https_proxy=$https_proxy"
- fi
- if ! grep -ql "docker.buildArg.HTTPS_PROXY" pom.xml ; then
- mvn_docker+=" -Ddocker.buildArg.HTTPS_PROXY=$https_proxy"
- fi
- fi
- eval $mvn_docker
- popd
-}
diff --git a/bootstrap/vagrant-onap/lib/_onap_functions b/bootstrap/vagrant-onap/lib/_onap_functions
deleted file mode 100755
index c65e58958..000000000
--- a/bootstrap/vagrant-onap/lib/_onap_functions
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/bin/bash
-
-# create_configuration_files() - Store credentials in files
-function create_configuration_files {
- local onap_config_folder="/opt/config"
-
- mkdir -p $onap_config_folder
- pushd $onap_config_folder
- echo $nexus_docker_repo > nexus_docker_repo.txt
- echo $nexus_username > nexus_username.txt
- echo $nexus_password > nexus_password.txt
- echo $openstack_username > openstack_username.txt
- echo $openstack_tenant_id > tenant_id.txt
- echo $dmaap_topic > dmaap_topic.txt
- echo $docker_version > docker_version.txt
- popd
-}
-
-# docker_openecomp_login() - Login to OpenECOMP Docker Hub
-function docker_openecomp_login {
- install_docker
- docker login -u ${nexus_username:-docker} -p ${nexus_password:-docker} ${nexus_docker_repo:-nexus3.onap.org:10001}
-}
-
-# pull_openecomp_image() - Pull Docker container image from a Docker Registry Hub
-function pull_openecomp_image {
- local image=$1
- local tag=$2
- docker_openecomp_login
- pull_docker_image ${nexus_docker_repo:-nexus3.onap.org:10001}/openecomp/${image}:${docker_version:-latest} $tag
- docker logout
-}
-
-# pull_onap_image() - Pull Docker container image from a Docker Registry Hub
-function pull_onap_image {
- local image=$1
- local tag=$2
- docker_openecomp_login
- pull_docker_image ${nexus_docker_repo:-nexus3.onap.org:10001}/onap/${image}:${docker_version:-latest} $tag
- docker logout
-}
-
-# configure_bind()- Configure bind utils
-function configure_bind {
- _install_bind
- mkdir /etc/bind/zones
-
- curl -k $nexus_repo/org.openecomp.demo/boot/$artifacts_version/db_simpledemo_openecomp_org -o /etc/bind/zones/db.simpledemo.openecomp.org
- curl -k $nexus_repo/org.openecomp.demo/boot/$artifacts_version/named.conf.options -o /etc/bind/named.conf.options
- curl -k $nexus_repo/org.openecomp.demo/boot/$artifacts_version/named.conf.local -o /etc/bind/named.conf.local
-
- modprobe ip_gre
- sed -i "s/OPTIONS=.*/OPTIONS=\"-4 -u bind\"/g" /etc/default/bind9
- service bind9 restart
-}
-
-# _configure_maven() - This function creates a maven configuration file in case that doesn't exist
-function _configure_maven {
- local proxies_start=" <!--"
- local proxies=" \|"
- local proxies_end=" \|-->"
- local mvn_http=""
- local mvn_https=""
-
- if [ $http_proxy ] | [ $https_proxy ]; then
- proxies_start=" <proxies>"
- proxies=" "
- proxies_end=" <\/proxies>"
- if [ $http_proxy ]; then
- proxy_domain=`echo $http_proxy | awk -F/ '{print $3}' | awk -F: '{print $1}'`
- proxy_port=`echo $http_proxy | awk -F/ '{print $3}' | awk -F: '{print $2}'`
- mvn_http="<proxy>\n <id>http</id>\n <active>true</active>\n <protocol>http</protocol>\n <host>$proxy_domain</host>\n <port>$proxy_port</port>\n <nonProxyHosts>${no_proxy}</nonProxyHosts>\n </proxy>"
- fi
- if [ $https_proxy ]; then
- proxy_domain=`echo $https_proxy | awk -F/ '{print $3}' | awk -F: '{print $1}'`
- proxy_port=`echo $https_proxy | awk -F/ '{print $3}' | awk -F: '{print $2}'`
- mvn_https="<proxy>\n <id>https</id>\n <active>true</active>\n <protocol>https</protocol>\n <host>$proxy_domain</host>\n <port>$proxy_port</port>\n <nonProxyHosts>${no_proxy}</nonProxyHosts>\n </proxy>"
- fi
- fi
-
- mkdir -p $(dirname $mvn_conf_file)
- if [ ! -f $mvn_conf_file ]; then
- if [[ "$enable_oparent" == "True" ]]; then
- clone_repo oparent
- cp $git_src_folder/oparent/settings.xml $mvn_conf_file
- sed -i "s|<\/profiles>|<\/profiles>\n%PROXIES_START%\n%PROXIES% %HTTP_PROXY%\n%PROXIES% %HTTPS_PROXY%\n%PROXIES_END%|g" $mvn_conf_file
- else
- cp /var/onap/files/settings.xml $mvn_conf_file
- fi
-
- sed -e "
- s|%PROXIES_START%|$proxies_start|g;
- s|%PROXIES%|$proxies|g;
- s|%HTTP_PROXY%|$mvn_http|g;
- s|%HTTPS_PROXY%|$mvn_https|g;
- s|%PROXIES_END%|$proxies_end|g
- " -i $mvn_conf_file
- fi
-}
-
-# configure_service() - Download and configure a specific service in upstart
-function configure_service {
- local service_script=$1
- curl -k $nexus_repo/org.openecomp.demo/boot/$artifacts_version/$service_script -o /etc/init.d/$service_script
- chmod +x /etc/init.d/$service_script
- update-rc.d $service_script defaults
-}
diff --git a/bootstrap/vagrant-onap/lib/aai b/bootstrap/vagrant-onap/lib/aai
deleted file mode 100755
index 1ce3485c6..000000000
--- a/bootstrap/vagrant-onap/lib/aai
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-hbase_version=1.2.0
-
-# install_hadoop() - Function that installs Hadoop
-function install_hadoop {
- local release=titan
- local version=1.0.0
- local filename=$release-$version-hadoop1
- local dest_folder=/opt/hadoop/current
-
- if [ ! -d $dest_folder ]; then
- curl http://s3.thinkaurelius.com/downloads/$release/$filename.zip -o /tmp/${filename}.zip
- install_package unzip
- mkdir -p $dest_folder
- unzip /tmp/${filename}.zip -d $dest_folder
- fi
-
- pushd $dest_folder/${filename}
- # Change commitlog_directory and data_file_directories values (https://stackoverflow.com/a/26856246/1707651)
- sed -i "s|db/cassandra/data|/tmp/data|g" conf/cassandra/cassandra.yaml
- sed -i "s|db/cassandra/commitlog|/tmp/commitlog|g" conf/cassandra/cassandra.yaml
-
- install_java
- ./bin/titan.sh start
- popd
-}
-
-# install_haproxy() - Function that install HAProxy
-function install_haproxy {
- if is_package_installed haproxy; then
- return
- fi
- install_package software-properties-common
- add-apt-repository -y ppa:vbernat/haproxy-1.7
- update_repos
- install_package haproxy
- cp /var/onap/files/haproxy.cfg /etc/haproxy/
- cp /var/onap/files/aai.pem /etc/ssl/private/
- chmod 640 /etc/ssl/private/aai.pem
- chown root:ssl-cert /etc/ssl/private/aai.pem
- mkdir -p /usr/local/etc/haproxy
- #echo "127.0.0.1 localhost aai-traversal.api.simpledemo.openecomp.org aai-resources.api.simpledemo.openecomp.org" >> /etc/hosts
-
- service haproxy restart
-}
-
-# compile_aai_repos() - Function that compiles AAI source repo.
-function compile_aai_repos {
- local repos="aai/aai-common aai/resources aai/logging-service aai/traversal"
- if [[ "$compile_repo" == "True" ]]; then
- repos="${repos[aai]}"
- fi
-
- for repo in ${repos[@]}; do
- compile_src ${src_folders[aai]}${repo#*aai}
- done
-}
-
-# setup_titan() - Function that configures AAI services to connect to Hadoop Titan
-function setup_titan {
- local subdirectory="bundleconfig-local/etc/appprops"
- install_python_package crudini
-
- for dirc in resources/aai-resources traversal/aai-traversal; do
- for file in titan-cached.properties titan-realtime.properties; do
- crudini --set "${src_folders[aai]}/$dirc/$subdirectory/$file" "" "storage.backend" "cassandra"
- crudini --set "${src_folders[aai]}/$dirc/$subdirectory/$file" "" "storage.hostname" "localhost"
- done
- done
-
- # Add the schema to the local instance
- compile_src ${src_folders[aai]}/resources/aai-resources/
- uninstall_packages default-jre openjdk-7-jdk openjdk-7-jre openjdk-7-jre-headless
- pushd ${src_folders[aai]}
- java -DAJSC_HOME=${src_folders[aai]}/resources/aai-resources -DBUNDLECONFIG_DIR="bundleconfig-local" -cp aai-common/aai-core/target/aai-core-*.jar:resources/aai-resources/target/aai-resources.jar:resources/aai-resources/target/userjars/* org.onap.aai.dbgen.GenTester
- popd
-}
-
-# _start_data_managment() - Funtion that start a data management service
-function _start_data_managment {
- local service=$1
- local debug_port=$2
-
- install_maven
- pushd ${src_folders[aai]}/$service
- export MAVEN_OPTS="-Xms1024m -Xmx5120m -XX:PermSize=2024m -Xdebug -Xnoagent -Djava.compiler=NONE -Xrunjdwp:transport=dt_socket,address=$debug_port,server=y,suspend=n"
- mvn -P runAjsc &
- popd
-}
-
-# start_aai_microservices() - Function that starts AAI microservices
-function start_aai_microservices {
- _start_data_managment resources 9446
- sleep 360
- _start_data_managment traversal 9447
-}
-
-# install_aai() - Install AAI Services
-function install_aai {
- install_docker_compose
- pushd ${src_folders[aai]}/test-config
- ./deploy_vm2.sh
- ./deploy_vm1.sh
- popd
-}
-
-# get_aai_images() - Function that pulls or creates AAI docker images
-function get_aai_images {
- pull_docker_image elasticsearch:2.4.1
- docker_openecomp_login
- pull_docker_image ${nexus_docker_repo:-nexus3.onap.org:10001}/aaionap/hbase:${hbase_version}
-
- if [[ "$build_image" == "True" ]]; then
- unset MAVEN_OPTS
- for project in resources/aai-resources traversal/aai-traversal search-data-service data-router model-loader sparky-be; do
- build_docker_image ${src_folders[aai]}/$project
- done
- else
- for image in aai-resources aai-traversal search-data-service data-router model-loader sparky-be; do
- pull_onap_image $image
- done
- fi
-}
-
-# init_aai() - Function that initialize AAI services
-function init_aai {
- install_hadoop
- install_haproxy
-
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "aai"
- fi
- compile_aai_repos
-
- setup_titan
- #start_aai_microservices
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_aai_images
- fi
- if [[ "$skip_install" == "False" ]]; then
- install_aai
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/appc b/bootstrap/vagrant-onap/lib/appc
deleted file mode 100755
index 95654fc10..000000000
--- a/bootstrap/vagrant-onap/lib/appc
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-
-source /var/onap/sdnc
-source /var/onap/functions
-
-# _build_appc_images() - Function that creates APPC images from source code.
-function _build_appc_images {
- get_sdnc_images
- build_docker_image ${src_folders[appc]}/deployment/installation/appc docker
-}
-
-# get_appc_images() - Function that gets or build APPC docker images
-function get_appc_images {
- if [[ "$build_image" == "True" ]]; then
- _build_appc_images
- else
- for image in appc-image dgbuilder-sdnc-image; do
- pull_openecomp_image $image openecomp/$image:latest
- done
- fi
-}
-
-# install_appc() - Function that clones and installs the APPC services from source code
-function install_appc {
- run_docker_compose ${src_folders[appc]}/deployment/docker-compose
-}
-
-# init_appc() - Function that initialize APPC services
-function init_appc {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "appc"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "appc"
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_appc_images
- if [[ "$skip_install" == "False" ]]; then
- install_appc
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/ccsdk b/bootstrap/vagrant-onap/lib/ccsdk
deleted file mode 100755
index 93ee0c85d..000000000
--- a/bootstrap/vagrant-onap/lib/ccsdk
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# _build_ccsdk_images() - Build CCSDK Docker images from source code
-function _build_ccsdk_images {
- install_package unzip
- compile_src ${src_folders[ccsdk]}/distribution
- for image in ubuntu opendaylight odlsli dgbuilder-docker; do
- build_docker_image ${src_folders[ccsdk]}/distribution/$image docker
- done
-}
-
-# get_ccsdk_images() - Get CCSDK Docker images
-function get_ccsdk_images {
- if [[ "$build_image" == "True" ]]; then
- _build_ccsdk_images
- else
- for image in ubuntu odl dgbuilder; do
- pull_onap_image ccsdk-$image-image
- done
- fi
-}
-
-# init_ccsdk() - Function that initialize Multi Cloud services
-function init_ccsdk {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "ccsdk"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "ccsdk"
- fi
- fi
- if [[ "$skip_get_images" == "False" ]]; then
- get_ccsdk_images
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/commons b/bootstrap/vagrant-onap/lib/commons
deleted file mode 100755
index 90f73d230..000000000
--- a/bootstrap/vagrant-onap/lib/commons
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/bin/bash
-
-# update_repos() - Function that updates linux repositories
-function update_repos {
- echo "Updating repositories list..."
- if [ -f /var/onap/files/sources.list ]; then
- cp /var/onap/files/sources.list /etc/apt/sources.list
- fi
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- zypper -n ref
- ;;
- ubuntu|debian)
- if [[ "$debug" == "False" ]]; then
- apt-get update > /dev/null
- else
- apt-get update
- fi
- ;;
- rhel|centos|fedora)
- yum updateinfo
- ;;
- esac
-}
-
-# is_package_installed() - Function to tell if a package is installed
-function is_package_installed {
- if [[ -z "$@" ]]; then
- return 1
- fi
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- CHECK_CMD="zypper search --match-exact --installed"
- ;;
- ubuntu|debian)
- CHECK_CMD="dpkg -l"
- ;;
- rhel|centos|fedora)
- CHECK_CMD="rpm -q"
- ;;
- esac
- ${CHECK_CMD} "$@" &> /dev/null
-}
-
-# install_packages() - Install a list of packages
-function install_packages {
- local package=$@
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- ;;
- ubuntu|debian)
- apt-get install -y -qq $package
- ;;
- rhel|centos|fedora)
- ;;
- esac
-}
-
-# install_package() - Install specific package if doesn't exist
-function install_package {
- local package=$1
-
- if ! is_package_installed $package; then
- echo "Installing $package..."
-
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- zypper install -y $package
- ;;
- ubuntu|debian)
- if [[ "$debug" == "False" ]]; then
- apt-get install -y -qq -o=Dpkg::Use-Pty=0 $package
- else
- apt-get install -y $package
- fi
- ;;
- rhel|centos|fedora)
- PKG_MANAGER=$(which dnf || which yum)
- ${PKG_MANAGER} -y install $package
- ;;
- esac
- fi
-}
-
-# uninstall_packages() - Uninstall a list of packages
-function uninstall_packages {
- local packages=$@
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- ;;
- ubuntu|debian)
- apt-get purge -y -qq $packages
- ;;
- rhel|centos|fedora)
- ;;
- esac
-}
-
-# uninstall_package() - Uninstall specific package if exists
-function uninstall_package {
- local package=$1
- if is_package_installed $package; then
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- ;;
- ubuntu|debian)
- apt-get purge -y -qq $package
- ;;
- rhel|centos|fedora)
- ;;
- esac
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/config/env-vars b/bootstrap/vagrant-onap/lib/config/env-vars
deleted file mode 100755
index 7712de88e..000000000
--- a/bootstrap/vagrant-onap/lib/config/env-vars
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/bin/bash
-
-# Source code destination folder
-git_src_folder=/opt/onap
-
-declare -A src_folders
-src_folders=(
-["aai"]="$git_src_folder/aai"
-["appc"]="$git_src_folder/appc"
-["ccsdk"]="$git_src_folder/ccsdk"
-["dcae"]="$git_src_folder/dcae"
-["mr"]="$git_src_folder/dcae/message-router"
-["msb"]="$git_src_folder/msb"
-["mso"]="$git_src_folder/mso"
-["multicloud"]="$git_src_folder/multicloud"
-["oom"]="$git_src_folder/oom"
-["policy"]="$git_src_folder/policy"
-["portal"]="$git_src_folder/portal"
-["robot"]="$git_src_folder/testsuite"
-["sdc"]="$git_src_folder/sdc"
-["sdnc"]="$git_src_folder/openecomp/sdnc"
-["vfc"]="$git_src_folder/vfc"
-["vid"]="$git_src_folder/vid"
-["vnfsdk"]="$git_src_folder/vnfsdk"
-["vvp"]="$git_src_folder/vvp"
-)
-
-# Repositories list
-declare -A repos
-repos=(
-["aai"]="aai/aai-common aai/aai-config aai/aai-data aai/aai-service \
-aai/babel aai/champ aai/data-router aai/esr-gui aai/esr-server aai/gizmo \
-aai/logging-service aai/model-loader aai/resources aai/rest-client \
-aai/router-core aai/search-data-service aai/test-config aai/traversal \
-aai/sparky-fe aai/sparky-be"
-["appc"]="appc appc/deployment"
-["ccsdk"]="ccsdk ccsdk/dashboard ccsdk/distribution ccsdk/parent \
-ccsdk/platform/blueprints ccsdk/platform/nbapi \
-ccsdk/platform/plugins ccsdk/sli ccsdk/sli/adaptors ccsdk/sli/core \
-ccsdk/sli/northbound ccsdk/sli/plugins ccsdk/storage \
-ccsdk/storage/esaas ccsdk/storage/pgaas ccsdk/utils"
-["dcae"]="dcae dcae/apod dcae/apod/analytics dcae/apod/buildtools \
-dcae/apod/cdap dcae/collectors dcae/collectors/ves dcae/controller \
-dcae/controller/analytics dcae/dcae-inventory dcae/demo \
-dcae/demo/startup dcae/demo/startup/aaf dcae/demo/startup/controller \
-dcae/demo/startup/message-router dcae/dmaapbc dcae/operation \
-dcae/operation/utils dcae/orch-dispatcher dcae/pgaas dcae/utils \
-dcae/utils/buildtools"
-["msb"]="msb/apigateway msb/discovery msb/java-sdk msb/swagger-sdk"
-["mso"]="mso mso/chef-repo mso/docker-config mso/libs mso/mso-config"
-["multicloud"]="multicloud multicloud/framework multicloud/openstack \
-multicloud/openstack/vmware multicloud/openstack/windriver \
-multicloud/azure"
-["oom"]="oom oom/registrator"
-["policy"]="policy/api policy/common policy/docker \
-policy/drools-applications policy/drools-pdp policy/engine \
-policy/gui policy/pap policy/pdp"
-["portal"]="portal portal/sdk ecompsdkos ui/dmaapbc"
-["robot"]="testsuite testsuite/heatbridge testsuite/properties \
-testsuite/python-testing-utils"
-["sdc"]="sdc sdc/jtosca sdc/sdc-distribution-client \
-sdc/sdc-docker-base sdc/sdc-titan-cassandra sdc/sdc-tosca \
-sdc/sdc-vnfdesign sdc/sdc-workflow-designer sdc/sdc_common"
-["sdnc"]="sdnc/adaptors sdnc/architecture sdnc/core sdnc/features \
-sdnc/northbound sdnc/oam sdnc/parent sdnc/plugins"
-["vfc"]="vfc/gvnfm vfc/gvnfm/vnflcm vfc/gvnfm/vnfmgr \
-vfc/gvnfm/vnfres vfc/nfvo vfc/nfvo/catalog vfc/nfvo/driver \
-vfc/nfvo/driver/ems vfc/nfvo/driver/sfc vfc/nfvo/driver/vnfm \
-vfc/nfvo/driver/vnfm/gvnfm vfc/nfvo/driver/vnfm/svnfm vfc/nfvo/lcm \
-vfc/nfvo/resmanagement vfc/nfvo/wfengine"
-["vid"]="vid vid/asdcclient"
-["vnfsdk"]="vnfsdk/compliance vnfsdk/functest vnfsdk/lctest \
-vnfsdk/model vnfsdk/pkgtools vnfsdk/refrepo vnfsdk/validation"
-["vvp"]="vvp/ansible-ice-bootstrap vvp/cms vvp/devkit \
-vvp/documentation vvp/engagementmgr vvp/gitlab vvp/image-scanner \
-vvp/jenkins vvp/portal vvp/postgresql vvp/test-engine \
-vvp/validation-scripts"
-)
-
diff --git a/bootstrap/vagrant-onap/lib/dcae b/bootstrap/vagrant-onap/lib/dcae
deleted file mode 100755
index 25efddddc..000000000
--- a/bootstrap/vagrant-onap/lib/dcae
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# _create_config_file() - Creates a configuration yaml file for the controller
-function _create_config_file {
- cat > ${src_folders[dcae]}/controller/config.yaml << EOL
-ZONE: $dcae_zone
-STATE: $dcae_state
-DCAE-VERSION: $artifacts_version
-HORIZON-URL: https://mycloud.rackspace.com/cloud/$tenant_id
-KEYSTONE-URL: https://identity.api.rackspacecloud.com/v2.0
-OPENSTACK-TENANT-ID: $tenant_id
-OPENSTACK-TENANT-NAME: OPEN-ECOMP
-OPENSTACK-REGION: $openstack_region
-OPENSTACK-PRIVATE-NETWORK: $openstack_private_network_name
-OPENSTACK-USER: $openstack_user
-OPENSTACK-PASSWORD: $openstack_password
-OPENSTACK-KEYNAME: ${key_name}${rand_str}_dcae
-OPENSTACK-PUBKEY: $pub_key
-
-NEXUS-URL-ROOT: $nexus_repo_root
-NEXUS-USER: $nexus_username
-NEXUS-PASSWORD: $nexus_password
-NEXUS-URL-SNAPSHOTS: $nexus_url_snapshots
-NEXUS-RAWURL: $nexus_repo
-
-DOCKER-REGISTRY: $nexus_docker_repo
-
-GIT-MR-REPO: http://gerrit.onap.org/r/dcae/demo/startup/message-router.git
-EOL
-}
-
-# _build_dcae_images() Function that builds DCAE docker images from source code.
-function _build_dcae_images {
- if [[ "$compile_repo" != "True" ]]; then
- compile_repos "dcae"
- fi
- build_docker_image ${src_folders[dcae]}/dmaapbc openecomp/dcae-dmaapbc
- build_docker_image ${src_folders[dcae]}/orch-dispatcher dcae/orch-dispatcher
-
- pushd ${src_folders[dcae]}/demo
- bash dcae-demo-controller/src/main/docker-build/build.sh
- popd
-
- build_docker_image ${src_folders[dcae]}/dcae-inventory
-}
-
-# get_dcae_images() - Function that retrieves or builds DCAE docker images.
-function get_dcae_images {
- if [[ "$build_image" == "True" ]]; then
- _build_dcae_images
- else
- pull_openecomp_image dcae-dmaapbc openecomp/dcae-dmaapbc
- pull_openecomp_image dcae-controller
- fi
-}
-
-# install_dcae() - Function that clones and installs the DCAE controller services from source code
-function install_dcae {
- pushd ${src_folders[dcae]}/demo/startup/controller
- if [[ "$build_image" == "True" ]]; then
- dcae_image=`docker images | grep dcae-controller | awk '{print $1 ":" $2}'`
- sed -i "s|DOCKER-REGISTRY/openecomp/dcae-controller:DCAE-VERSION|$dcae_image|g" docker-compose.yml
- sed -i "s|MTU|$MTU|g" docker-compose.yml
- run_docker_compose .
- else
- bash init.sh
- install_package make
- make up
- fi
- popd
- # run_docker_image -p 8080:8080 -d -v <some local directory>/config.yml:/opt/config.yml --name dcae-inventory <docker registry>/dcae-inventory:<version>
-}
-
-# init_dcae() - Function that initialize DCAE Controller services
-function init_dcae {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "dcae"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "dcae"
- fi
- fi
-
- _create_config_file
- if [[ "$skip_get_images" == "False" ]]; then
- get_dcae_images
- if [[ "$skip_install" == "False" ]]; then
- install_dcae
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/files/aai.pem b/bootstrap/vagrant-onap/lib/files/aai.pem
deleted file mode 100644
index d446705c7..000000000
--- a/bootstrap/vagrant-onap/lib/files/aai.pem
+++ /dev/null
@@ -1,102 +0,0 @@
-Bag Attributes
- friendlyName: aaiopenecomp
- localKeyID: 54 69 6D 65 20 31 34 39 35 35 31 32 38 30 33 36 34 39
-subject=/C=US/ST=NJ/L=Bedminster/O=OpenECOMP/OU=SimpleDemo/CN=aai.api.simpledemo.openecomp.org/emailAddress=aai-host@api.simpledemo.openecomp.org
-issuer=/C=US/ST=NJ/L=Bedminster/O=OpenECOMP/OU=simpledemo/CN=OpenECOMP simpledemo Server CA X1/emailAddress=simpledemo@openecomp.org
------BEGIN CERTIFICATE-----
-MIIEiTCCA3GgAwIBAgIJAIPKfDLcn3MpMA0GCSqGSIb3DQEBCwUAMIGtMQswCQYD
-VQQGEwJVUzELMAkGA1UECAwCTkoxEzARBgNVBAcMCkJlZG1pbnN0ZXIxEjAQBgNV
-BAoMCU9wZW5FQ09NUDETMBEGA1UECwwKc2ltcGxlZGVtbzEqMCgGA1UEAwwhT3Bl
-bkVDT01QIHNpbXBsZWRlbW8gU2VydmVyIENBIFgxMScwJQYJKoZIhvcNAQkBFhhz
-aW1wbGVkZW1vQG9wZW5lY29tcC5vcmcwHhcNMTYxMTMwMTUzODM5WhcNMTcxMTMw
-MTUzODM5WjCBuTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAk5KMRMwEQYDVQQHDApC
-ZWRtaW5zdGVyMRIwEAYDVQQKDAlPcGVuRUNPTVAxEzARBgNVBAsMClNpbXBsZURl
-bW8xKTAnBgNVBAMMIGFhaS5hcGkuc2ltcGxlZGVtby5vcGVuZWNvbXAub3JnMTQw
-MgYJKoZIhvcNAQkBFiVhYWktaG9zdEBhcGkuc2ltcGxlZGVtby5vcGVuZWNvbXAu
-b3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwQrQl8A0rT0Jjlos
-Mr/7LEhT5UOif4GGPOk+3NCIxT3lOqAbUf+d9ZXyT2jWFRiKWua03vQ+Dxc8c2h2
-RRuH8LwEiOiWqPjWRxNqsARzZMI3ryHFCFBZh0FcpjH9kEeKVlLDYuV68k+ZucKd
-NiqUNn61lD7kbmEGwvzKwf91FrJ09+CBMx1OnWKm3gCNKDqAEFMZCOdn2MgesJYB
-/03lzPBS1jDfBXImXRcTBzpgA+wdCLn0cIQ1eLWUwS5tUqUJNh36nHdVyJ0P2Yjd
-JLuxhFcmBKOz1ShyyO+BBtKBO8EGbU6qKflOiwOw0Fsn8LjKcrHQ58NPui5y04BU
-Rypf3QIDAQABo4GdMIGaMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgO4MB0G
-A1UdDgQWBBQyMUOsE2J+CKzK0qd8KFBD2gaWyjBbBgNVHSAEVDBSMFAGBFUdIAAw
-SDBGBggrBgEFBQcCAjA6GjhLZWVwIGF3YXkgZnJvbSBjaGlsZHJlbi4gIFRoaXMg
-Y2VydGlmaWNhdGUgaXMgbm90IGEgdG95LjANBgkqhkiG9w0BAQsFAAOCAQEAnkoy
-2tWJOyyyIQwtVojUxv1GWQPnw3WCUcKpuX4CJhHXLxNErW1fBg7bmo08BNmBPPpq
-WrJsy5lbBgUo9kgpViux5Stfy1rRIRsRLfl/icgCvJmUAxkmRCZL7yUvwG4K7s+8
-DwT+nW/XuWNP6Hd/qHccexB6COJ8KwvTdVoxAkCdX8qw4MCb/f7Kb1yle/vwBM5Q
-UUONCJ4bEns1vnb9DGlNDUJNwCfwORAaVJpVS38Mv4UnSTmb2KMePtCWcx/dNsYR
-2XrSGqLDnTvHwOpyhbfFTmackysGoSuDytORXy8YbwEiF13BwEK8i3rgNN0Z2ojf
-cpmE2xxmaa+A2uuN6g==
------END CERTIFICATE-----
-Bag Attributes
- friendlyName: root
- 2.16.840.1.113894.746875.1.1: <Unsupported tag 6>
-subject=/C=US/ST=NJ/L=Bedminster/O=OpenECOMP/OU=simpledemo/CN=OpenECOMP simpledemo Server CA X1/emailAddress=simpledemo@openecomp.org
-issuer=/C=US/ST=NJ/L=Bedminster/O=OpenECOMP/OU=simpledemo/CN=OpenECOMP simpledemo Root Certification Authority/emailAddress=simpledemo@openecomp.org
------BEGIN CERTIFICATE-----
-MIIFpTCCA42gAwIBAgIJAJqx8dKnCZZoMA0GCSqGSIb3DQEBCwUAMIG9MQswCQYD
-VQQGEwJVUzELMAkGA1UECAwCTkoxEzARBgNVBAcMCkJlZG1pbnN0ZXIxEjAQBgNV
-BAoMCU9wZW5FQ09NUDETMBEGA1UECwwKc2ltcGxlZGVtbzE6MDgGA1UEAwwxT3Bl
-bkVDT01QIHNpbXBsZWRlbW8gUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEn
-MCUGCSqGSIb3DQEJARYYc2ltcGxlZGVtb0BvcGVuZWNvbXAub3JnMB4XDTE2MTEy
-ODIxMTQyNloXDTIxMTEyNzIxMTQyNlowga0xCzAJBgNVBAYTAlVTMQswCQYDVQQI
-DAJOSjETMBEGA1UEBwwKQmVkbWluc3RlcjESMBAGA1UECgwJT3BlbkVDT01QMRMw
-EQYDVQQLDApzaW1wbGVkZW1vMSowKAYDVQQDDCFPcGVuRUNPTVAgc2ltcGxlZGVt
-byBTZXJ2ZXIgQ0EgWDExJzAlBgkqhkiG9w0BCQEWGHNpbXBsZWRlbW9Ab3BlbmVj
-b21wLm9yZzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALr4rivKQuRk
-YNf5Ig40e1nqj6s6LB1vgMOYbKfRziOFpPcUpsHPOhusHowiUsrU1vdFSzPz6Ej7
-PjlmNSg2Qka8YCn9kd6QgM7U0KcPJvIucBp+qjifH3EvP0jgDPhDeVRYxzV454dv
-5kQ9uCpswJP7YAnX51dkWeH8nwPUoagt31bOl9LXENSrgxEThxdLYMJnQJWk2CmV
-otXM4tT1dxyJxFUrZ6uJCEAYw5VtlplqihHf8lHy+sWQavtsLz/4dc+sGeXSTfoI
-voKvoh3uZ5gEhGV8yfJxk1veX5y5/AxP80vQ+smWYjTnQL5QQ57y4bciez4XVBmQ
-SWimWtOi4e8CAwEAAaOBtTCBsjAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
-AwIBhjAdBgNVHQ4EFgQUTqdsYgGNGubdJHq9tsaJhM9HE5wwcAYDVR0gBGkwZzBl
-BgRVHSAAMF0wWwYIKwYBBQUHAgIwTxpNSWYgeW91IHRydXN0IHRoaXMgY2VydCB0
-aGVuIHdlIGhhdmUgYSBicmlkZ2UgdGhhdCB5b3UgbWlnaHQgYmUgaW50ZXJlc3Rl
-ZCBpbi4wDQYJKoZIhvcNAQELBQADggIBAKNNlRqFuE/JgV1BHyYK0xoSXH4aZP/7
-IoHtDVcSaZAOOuFOUrwVMUbzRBebbb6RpFwt/X+NLFUGysd+XNLF7W7lzxKtmFNX
-n4OpNkBe0y5O7yurus8rERHzu3jiOSgVo+WzDlGpYSRnG3hI2qPWqD+Puzx/WwI8
-XUTuzEQQ3gUSyVFfXHpay3VpYmLZiLJ9WKY5SDw7Ie6Sxrju4Qm1HwnFY8wHZGcs
-2KMQzorJ1ZNQf523yUTghbT0rKaSFaD8zugPtI2ONfFG/QgrkQXo78opzPsHnHwa
-SxGSiAgeLbwAUCvPNl27zr6k6+7TcNjV0VUivAs0OG3VEAdgi7UWYB+30KfWwHwE
-zGmvd4IAGqIqlqLcSVArN5z8JK1B5nfjQn5UrclU1vK+dnuiKE2X4rKuBTRYRFR/
-km+mj4koYFPKFHndmJl1uv2OCJK9l5CSIuKWeI1qv8BASKqgNdoT/SKBXqxgYlCb
-o+j4IDjxrxChRO+e5vl9lA7INfRrbljCkUjfLRa+v2q9tWQ3+EQUwwnSrSfihh2T
-j0Tksr6b8dDsvMlCdOKG1B+JPcEXORSFKNXVTEfjqpJG8s16kFAocWt3S6xO0k1t
-qbQp+3tWQgW2TGnX0rMZzB6NGRNfWhlYmq2zHgXkiCIZ26Ztgt/LNbwEvN3+VlLo
-z/Rd+SKtlrfb
------END CERTIFICATE-----
-Bag Attributes
- friendlyName: aaiopenecomp
- localKeyID: 54 69 6D 65 20 31 34 39 35 35 31 32 38 30 33 36 34 39
-Key Attributes: <No Attributes>
------BEGIN PRIVATE KEY-----
-MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBCtCXwDStPQmO
-Wiwyv/ssSFPlQ6J/gYY86T7c0IjFPeU6oBtR/531lfJPaNYVGIpa5rTe9D4PFzxz
-aHZFG4fwvASI6Jao+NZHE2qwBHNkwjevIcUIUFmHQVymMf2QR4pWUsNi5XryT5m5
-wp02KpQ2frWUPuRuYQbC/MrB/3UWsnT34IEzHU6dYqbeAI0oOoAQUxkI52fYyB6w
-lgH/TeXM8FLWMN8FciZdFxMHOmAD7B0IufRwhDV4tZTBLm1SpQk2Hfqcd1XInQ/Z
-iN0ku7GEVyYEo7PVKHLI74EG0oE7wQZtTqop+U6LA7DQWyfwuMpysdDnw0+6LnLT
-gFRHKl/dAgMBAAECggEBAJko2HkeIW01mUhdWOXnFgR7WjzzXZEmlffr41lVBr7f
-rejGsQZs9cms73R7rCdOsi8PDoA6bqaQfADg571K659fvYVWbHqh+3im+iWvUlKm
-GYIVG/vNrEq43CZsUU7Qw/xba/QiOFraNxCATTV1sORPwgddouXEi5XW9ZPX9/FJ
-wORx4L/K0DfHX1rr+rtOoHCJdZYhn3Ij87kmR8Mwg0fNeWhHqtxUEyM/itRjCvOe
-mgt2V8DORhmq12L4+5QJctBrkBVRp9Rh6YSZZBGnKbTSgf4q648BdkJDLSK4cguT
-D6BAw3gxj5V4wt5W0wn2JpjadFwnixrTzvMP/yAqfK0CgYEA93nBAoUPw8nzQkwk
-8iWBjfJ999Rw92hnnvk3xbcQcGfgUYuB4dxwe6FQTmFIVylt81er1YUvMb3ao7fo
-5ZcGnI5p1idjsd27kbZJLxb5Oh919hKu5IfkfYsVgnC0UdKCTgH5CaH0U4ATuXwt
-RL6qm0XcLALs5y2OO6z3s+mYhisCgYEAx7EQ8MA45bkXnRQiHBhGcIIcr2sRDfVJ
-OhHmGxx3EcYgtsIYKTqtQOyIt/nQxo6iyNL9bzfzBTybFJLuj63ZG1Ef4LosJedl
-eAU2NsKv5MlKYDSdNbLAJ0Op9I2Xu/pXQecPwY/3MkIQArdQCLevMLEGywCsuJTn
-BjkJNDkb9hcCgYAhoFiaiAwJVYKJSqFmibQd3opBR4uGApi54DE021gPff3b9rHS
-R8q88cFgtRVISqfW/d2qaKtt/dcckdvCfo/2a99zqux/+ZoIBZXSITQCMs4rfoRn
-JxPj/ycQD1JhH9J22QvGxEvXoLqNZJMeGS5DZO2yyT75dpYyA6Gwv5fq+wKBgQC5
-AhV917lfLELyZurLOLpaFlHZO8rLMcusH1UfHRo7v2IjsGOOHyzRD9QDD1IcA55R
-jRj8Z8uhuGq9fvvC5gBVTK3KGPI6E85wifOWfH1V7CAaTeSxEXDxb8EQL/a6U89v
-4VE5tdYCHC6VNZzS1staw0hV38QmJt57Z3Bdj+OV9QKBgE/b9fxpo+SVQ37BzNNY
-SEKTTijaddz8fdomApg6a2eFJL93Ej/op7N7gnHtPWMivPnRRza9ZjfnG+aZ7n2J
-sWyBiZK9xliS2TsF3l3q9Z0Vaq3i1nOlV7Bd20ZS8KjQjDtKnIRfLkQDkvmXbU5L
-emwkdsQZbpPFJch3mCGtI7JW
------END PRIVATE KEY-----
diff --git a/bootstrap/vagrant-onap/lib/files/all-in-one b/bootstrap/vagrant-onap/lib/files/all-in-one
deleted file mode 100644
index efdb2bfce..000000000
--- a/bootstrap/vagrant-onap/lib/files/all-in-one
+++ /dev/null
@@ -1,585 +0,0 @@
-# These initial groups are the only groups required to be modified. The
-# additional groups are for more control of the environment.
-[control]
-localhost ansible_connection=local
-
-[network]
-localhost ansible_connection=local
-
-[compute]
-localhost ansible_connection=local
-
-[storage]
-localhost ansible_connection=local
-
-[monitoring]
-localhost ansible_connection=local
-
-[deployment]
-localhost ansible_connection=local
-
-# You can explicitly specify which hosts run each project by updating the
-# groups in the sections below. Common services are grouped together.
-[chrony-server:children]
-haproxy
-
-[chrony:children]
-network
-compute
-storage
-monitoring
-
-[collectd:children]
-compute
-
-[baremetal:children]
-control
-
-[grafana:children]
-monitoring
-
-[etcd:children]
-control
-compute
-
-[karbor:children]
-control
-
-[kibana:children]
-control
-
-[telegraf:children]
-compute
-control
-monitoring
-network
-storage
-
-[elasticsearch:children]
-control
-
-[haproxy:children]
-network
-
-[hyperv]
-#hyperv_host
-
-[hyperv:vars]
-#ansible_user=user
-#ansible_password=password
-#ansible_port=5986
-#ansible_connection=winrm
-#ansible_winrm_server_cert_validation=ignore
-
-[mariadb:children]
-control
-
-[rabbitmq:children]
-control
-
-[outward-rabbitmq:children]
-control
-
-[qdrouterd:children]
-control
-
-[mongodb:children]
-control
-
-[keystone:children]
-control
-
-[glance:children]
-control
-
-[nova:children]
-control
-
-[neutron:children]
-network
-
-[openvswitch:children]
-network
-compute
-manila-share
-
-[opendaylight:children]
-network
-
-[cinder:children]
-control
-
-[cloudkitty:children]
-control
-
-[freezer:children]
-control
-
-[memcached:children]
-control
-
-[horizon:children]
-control
-
-[swift:children]
-control
-
-[barbican:children]
-control
-
-[heat:children]
-control
-
-[murano:children]
-control
-
-[ceph:children]
-control
-
-[ironic:children]
-control
-
-[influxdb:children]
-monitoring
-
-[magnum:children]
-control
-
-[sahara:children]
-control
-
-[solum:children]
-control
-
-[mistral:children]
-control
-
-[manila:children]
-control
-
-[panko:children]
-control
-
-[gnocchi:children]
-control
-
-[ceilometer:children]
-control
-
-[aodh:children]
-control
-
-[congress:children]
-control
-
-[tacker:children]
-control
-
-# Tempest
-[tempest:children]
-control
-
-[senlin:children]
-control
-
-[vmtp:children]
-control
-
-[trove:children]
-control
-
-[watcher:children]
-control
-
-[rally:children]
-control
-
-[searchlight:children]
-control
-
-[octavia:children]
-control
-
-[designate:children]
-control
-
-[placement:children]
-control
-
-[bifrost:children]
-deployment
-
-[zun:children]
-control
-
-[skydive:children]
-monitoring
-
-[redis:children]
-control
-
-# Additional control implemented here. These groups allow you to control which
-# services run on which hosts at a per-service level.
-#
-# Word of caution: Some services are required to run on the same host to
-# function appropriately. For example, neutron-metadata-agent must run on the
-# same host as the l3-agent and (depending on configuration) the dhcp-agent.
-
-# Glance
-[glance-api:children]
-glance
-
-[glance-registry:children]
-glance
-
-# Nova
-[nova-api:children]
-nova
-
-[nova-conductor:children]
-nova
-
-[nova-consoleauth:children]
-nova
-
-[nova-novncproxy:children]
-nova
-
-[nova-scheduler:children]
-nova
-
-[nova-spicehtml5proxy:children]
-nova
-
-[nova-compute-ironic:children]
-nova
-
-[nova-serialproxy:children]
-nova
-
-# Neutron
-[neutron-server:children]
-control
-
-[neutron-dhcp-agent:children]
-neutron
-
-[neutron-l3-agent:children]
-neutron
-
-[neutron-lbaas-agent:children]
-neutron
-
-[neutron-metadata-agent:children]
-neutron
-
-[neutron-vpnaas-agent:children]
-neutron
-
-[neutron-bgp-dragent:children]
-neutron
-
-# Ceph
-[ceph-mon:children]
-ceph
-
-[ceph-rgw:children]
-ceph
-
-[ceph-osd:children]
-storage
-
-# Cinder
-[cinder-api:children]
-cinder
-
-[cinder-backup:children]
-storage
-
-[cinder-scheduler:children]
-cinder
-
-[cinder-volume:children]
-storage
-
-# Cloudkitty
-[cloudkitty-api:children]
-cloudkitty
-
-[cloudkitty-processor:children]
-cloudkitty
-
-# Freezer
-[freezer-api:children]
-freezer
-
-# iSCSI
-[iscsid:children]
-compute
-storage
-ironic-conductor
-
-[tgtd:children]
-storage
-
-# Karbor
-[karbor-api:children]
-karbor
-
-[karbor-protection:children]
-karbor
-
-[karbor-operationengine:children]
-karbor
-
-# Manila
-[manila-api:children]
-manila
-
-[manila-scheduler:children]
-manila
-
-[manila-share:children]
-network
-
-[manila-data:children]
-manila
-
-# Swift
-[swift-proxy-server:children]
-swift
-
-[swift-account-server:children]
-storage
-
-[swift-container-server:children]
-storage
-
-[swift-object-server:children]
-storage
-
-# Barbican
-[barbican-api:children]
-barbican
-
-[barbican-keystone-listener:children]
-barbican
-
-[barbican-worker:children]
-barbican
-
-# Trove
-[trove-api:children]
-trove
-
-[trove-conductor:children]
-trove
-
-[trove-taskmanager:children]
-trove
-
-# Heat
-[heat-api:children]
-heat
-
-[heat-api-cfn:children]
-heat
-
-[heat-engine:children]
-heat
-
-# Murano
-[murano-api:children]
-murano
-
-[murano-engine:children]
-murano
-
-# Ironic
-[ironic-api:children]
-ironic
-
-[ironic-conductor:children]
-ironic
-
-[ironic-inspector:children]
-ironic
-
-[ironic-pxe:children]
-ironic
-
-# Magnum
-[magnum-api:children]
-magnum
-
-[magnum-conductor:children]
-magnum
-
-# Solum
-[solum-api:children]
-solum
-
-[solum-worker:children]
-solum
-
-[solum-deployer:children]
-solum
-
-[solum-conductor:children]
-solum
-
-# Mistral
-[mistral-api:children]
-mistral
-
-[mistral-executor:children]
-mistral
-
-[mistral-engine:children]
-mistral
-
-# Aodh
-[aodh-api:children]
-aodh
-
-[aodh-evaluator:children]
-aodh
-
-[aodh-listener:children]
-aodh
-
-[aodh-notifier:children]
-aodh
-
-# Panko
-[panko-api:children]
-panko
-
-# Gnocchi
-[gnocchi-api:children]
-gnocchi
-
-[gnocchi-statsd:children]
-gnocchi
-
-[gnocchi-metricd:children]
-gnocchi
-
-# Sahara
-[sahara-api:children]
-sahara
-
-[sahara-engine:children]
-sahara
-
-# Ceilometer
-[ceilometer-api:children]
-ceilometer
-
-[ceilometer-central:children]
-ceilometer
-
-[ceilometer-notification:children]
-ceilometer
-
-[ceilometer-collector:children]
-ceilometer
-
-[ceilometer-compute:children]
-compute
-
-# Congress
-[congress-api:children]
-congress
-
-[congress-datasource:children]
-congress
-
-[congress-policy-engine:children]
-congress
-
-# Multipathd
-[multipathd:children]
-compute
-
-# Watcher
-[watcher-api:children]
-watcher
-
-[watcher-engine:children]
-watcher
-
-[watcher-applier:children]
-watcher
-
-# Senlin
-[senlin-api:children]
-senlin
-
-[senlin-engine:children]
-senlin
-
-# Searchlight
-[searchlight-api:children]
-searchlight
-
-[searchlight-listener:children]
-searchlight
-
-# Octavia
-[octavia-api:children]
-octavia
-
-[octavia-health-manager:children]
-octavia
-
-[octavia-housekeeping:children]
-octavia
-
-[octavia-worker:children]
-octavia
-
-# Designate
-[designate-api:children]
-designate
-
-[designate-central:children]
-designate
-
-[designate-mdns:children]
-network
-
-[designate-worker:children]
-designate
-
-[designate-sink:children]
-designate
-
-[designate-backend-bind9:children]
-designate
-
-# Placement
-[placement-api:children]
-placement
-
-# Zun
-[zun-api:children]
-zun
-
-[zun-compute:children]
-compute
-
-# Skydive
-[skydive-analyzer:children]
-skydive
-
-[skydive-agent:children]
-compute
-network
-
-# Tacker
-[tacker-server:children]
-tacker
-
-[tacker-conductor:children]
-tacker
diff --git a/bootstrap/vagrant-onap/lib/files/globals.yml b/bootstrap/vagrant-onap/lib/files/globals.yml
deleted file mode 100644
index d10cc3d83..000000000
--- a/bootstrap/vagrant-onap/lib/files/globals.yml
+++ /dev/null
@@ -1,2 +0,0 @@
----
-openstack_release: "master"
diff --git a/bootstrap/vagrant-onap/lib/files/haproxy.cfg b/bootstrap/vagrant-onap/lib/files/haproxy.cfg
deleted file mode 100644
index ac4b75498..000000000
--- a/bootstrap/vagrant-onap/lib/files/haproxy.cfg
+++ /dev/null
@@ -1,120 +0,0 @@
-global
- log /dev/log local0
- stats socket /usr/local/etc/haproxy/haproxy.socket mode 660 level admin
- stats timeout 30s
- user root
- group root
- daemon
- #################################
- # Default SSL material locations#
- #################################
- ca-base /etc/ssl/certs
- crt-base /etc/ssl/private
-
- # Default ciphers to use on SSL-enabled listening sockets.
- # For more information, see ciphers(1SSL). This list is from:
- # https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
- # An alternative list with additional directives can be obtained from
- # https://mozilla.github.io/server-side-tls/ssl-config-generator/?server=haproxy
- tune.ssl.default-dh-param 2048
-
-defaults
- log global
- mode http
- option httplog
-# option dontlognull
-# errorfile 400 /etc/haproxy/errors/400.http
-# errorfile 403 /etc/haproxy/errors/403.http
-# errorfile 408 /etc/haproxy/errors/408.http
-# errorfile 500 /etc/haproxy/errors/500.http
-# errorfile 502 /etc/haproxy/errors/502.http
-# errorfile 503 /etc/haproxy/errors/503.http
-# errorfile 504 /etc/haproxy/errors/504.http
-
- option http-server-close
- option forwardfor except 127.0.0.1
- retries 6
- option redispatch
- maxconn 50000
- timeout connect 50000
- timeout client 480000
- timeout server 480000
- timeout http-keep-alive 30000
-
-
-frontend IST_8443
- mode http
- bind 0.0.0.0:8443 name https ssl crt /etc/ssl/private/aai.pem
-# log-format %ci:%cp\ [%t]\ %ft\ %b/%s\ %Tq/%Tw/%Tc/%Tr/%Tt\ %ST\ %B\ %CC\ %CS\ %tsc\ %ac/%fc/%bc/%sc/%rc\ %sq/%bq\ %hr\ %hs\ {%[ssl_c_verify],%{+Q}[ssl_c_s_dn],%{+Q}[ssl_c_i_dn]}\ %{+Q}r
- log-format "%ci:%cp [%tr] %ft %b/%s %TR/%Tw/%Tc/%Tr/%Ta %ST %B %CC \ %CS %tsc %ac/%fc/%bc/%sc/%rc %sq/%bq %hr %hs %{+Q}r"
- option httplog
- log global
- option logasap
- option forwardfor
- capture request header Host len 100
- capture response header Host len 100
- option log-separate-errors
- option forwardfor
- http-request set-header X-Forwarded-Proto https if { ssl_fc }
- http-request set-header X-AAI-Client-SSL TRUE if { ssl_c_used }
- http-request set-header X-AAI-SSL %[ssl_fc]
- http-request set-header X-AAI-SSL-Client-Verify %[ssl_c_verify]
- http-request set-header X-AAI-SSL-Client-DN %{+Q}[ssl_c_s_dn]
- http-request set-header X-AAI-SSL-Client-CN %{+Q}[ssl_c_s_dn(cn)]
- http-request set-header X-AAI-SSL-Issuer %{+Q}[ssl_c_i_dn]
- http-request set-header X-AAI-SSL-Client-NotBefore %{+Q}[ssl_c_notbefore]
- http-request set-header X-AAI-SSL-Client-NotAfter %{+Q}[ssl_c_notafter]
- http-request set-header X-AAI-SSL-ClientCert-Base64 %{+Q}[ssl_c_der,base64]
- http-request set-header X-AAI-SSL-Client-OU %{+Q}[ssl_c_s_dn(OU)]
- http-request set-header X-AAI-SSL-Client-L %{+Q}[ssl_c_s_dn(L)]
- http-request set-header X-AAI-SSL-Client-ST %{+Q}[ssl_c_s_dn(ST)]
- http-request set-header X-AAI-SSL-Client-C %{+Q}[ssl_c_s_dn(C)]
- http-request set-header X-AAI-SSL-Client-O %{+Q}[ssl_c_s_dn(O)]
- reqadd X-Forwarded-Proto:\ https
- reqadd X-Forwarded-Port:\ 8443
-
-#######################
-#ACLS FOR PORT 8446####
-#######################
-
- acl is_Port_8446_generic path_reg -i ^/aai/v[0-9]+/search/generic-query$
- acl is_Port_8446_nodes path_reg -i ^/aai/v[0-9]+/search/nodes-query$
- acl is_Port_8446_version path_reg -i ^/aai/v[0-9]+/query$
- acl is_named-query path_beg -i /aai/search/named-query
- acl is_search-model path_beg -i /aai/search/model
- use_backend IST_AAI_8446 if is_Port_8446_generic or is_Port_8446_nodes or is_Port_8446_version or is_named-query or is_search-model
-
- default_backend IST_Default_8447
-
-
-#######################
-#DEFAULT BACKEND 847###
-#######################
-
-backend IST_Default_8447
- balance roundrobin
- http-request set-header X-Forwarded-Port %[src_port]
- http-response set-header Strict-Transport-Security max-age=16000000;\ includeSubDomains;\ preload;
- server aai aai:8447 port 8447 ssl verify none
-
-#######################
-# BACKEND 8446#########
-#######################
-
-backend IST_AAI_8446
- balance roundrobin
- http-request set-header X-Forwarded-Port %[src_port]
- http-response set-header Strict-Transport-Security max-age=16000000;\ includeSubDomains;\ preload;
- server aai aai:8446 port 8446 ssl verify none
-
-listen IST_AAI_STATS
- mode http
- bind *:8080
- stats uri /stats
- stats enable
- stats refresh 30s
- stats hide-version
- stats auth admin:admin
- stats show-legends
- stats show-desc IST AAI APPLICATION NODES
- stats admin if TRUE
diff --git a/bootstrap/vagrant-onap/lib/files/kolla-build.conf b/bootstrap/vagrant-onap/lib/files/kolla-build.conf
deleted file mode 100644
index 8dd14e6c6..000000000
--- a/bootstrap/vagrant-onap/lib/files/kolla-build.conf
+++ /dev/null
@@ -1,5 +0,0 @@
-[DEFAULT]
-base = ubuntu
-profile = main
-
-[profiles]
diff --git a/bootstrap/vagrant-onap/lib/files/kubectl_config_generator.py b/bootstrap/vagrant-onap/lib/files/kubectl_config_generator.py
deleted file mode 100644
index 6b5a6e9f6..000000000
--- a/bootstrap/vagrant-onap/lib/files/kubectl_config_generator.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import requests
-import os
-import base64
-
-RANCHER_URL = str(os.environ['RANCHER_URL'])
-RANCHER_ENVIRONMENT_ID = str(os.environ['RANCHER_ENVIRONMENT'])
-data = requests.post(RANCHER_URL + '/v1/projects/' + RANCHER_ENVIRONMENT_ID + '/apikeys',
- {"accountId": RANCHER_ENVIRONMENT_ID,
- "description": "ONAP on Kubernetes",
- "name": "ONAP on Kubernetes",
- "publicValue": "string",
- "secretValue": "password"})
-json_dct = data.json()
-access_key = json_dct['publicValue']
-secret_key = json_dct['secretValue']
-auth_header = 'Basic ' + base64.b64encode(access_key + ':' + secret_key)
-token = "\"" + str(base64.b64encode(auth_header)) + "\""
-dct = \
-"""
-apiVersion: v1
-kind: Config
-clusters:
-- cluster:
- api-version: v1
- insecure-skip-tls-verify: true
- server: "{}/r/projects/{}/kubernetes:6443"
- name: "onap_on_kubernetes"
-contexts:
-- context:
- cluster: "onap_on_kubernetes"
- user: "onap_on_kubernetes"
- name: "onap_on_kubernetes"
-current-context: "onap_on_kubernetes"
-users:
-- name: "onap_on_kubernetes"
- user:
- token: {}
-""".format(RANCHER_URL, RANCHER_ENVIRONMENT_ID, token)
-with open("config", "w") as file:
- file.write(dct)
diff --git a/bootstrap/vagrant-onap/lib/files/passwords.yml b/bootstrap/vagrant-onap/lib/files/passwords.yml
deleted file mode 100644
index f376e31f0..000000000
--- a/bootstrap/vagrant-onap/lib/files/passwords.yml
+++ /dev/null
@@ -1,216 +0,0 @@
----
-###################
-# Ceph options
-####################
-# These options must be UUID4 values in string format
-# XXXXXXXX-XXXX-4XXX-XXXX-XXXXXXXXXXXX
-ceph_cluster_fsid:
-ceph_rgw_keystone_password:
-# for backward compatible consideration, rbd_secret_uuid is only used for nova,
-# cinder_rbd_secret_uuid is used for cinder
-rbd_secret_uuid:
-cinder_rbd_secret_uuid:
-
-###################
-# Database options
-####################
-database_password:
-
-####################
-# Docker options
-####################
-# This should only be set if you require a password for your Docker registry
-docker_registry_password:
-
-######################
-# OpenDaylight options
-######################
-opendaylight_password:
-
-####################
-# OpenStack options
-####################
-aodh_database_password:
-aodh_keystone_password:
-
-barbican_database_password:
-barbican_keystone_password:
-barbican_p11_password:
-barbican_crypto_key:
-
-keystone_admin_password:
-keystone_database_password:
-
-grafana_database_password:
-grafana_admin_password:
-
-glance_database_password:
-glance_keystone_password:
-
-gnocchi_database_password:
-gnocchi_keystone_password:
-
-karbor_database_password:
-karbor_keystone_password:
-karbor_openstack_infra_id:
-
-kuryr_keystone_password:
-
-nova_database_password:
-nova_api_database_password:
-nova_keystone_password:
-
-placement_keystone_password:
-
-neutron_database_password:
-neutron_keystone_password:
-metadata_secret:
-
-cinder_database_password:
-cinder_keystone_password:
-
-cloudkitty_database_password:
-cloudkitty_keystone_password:
-
-panko_database_password:
-panko_keystone_password:
-
-freezer_database_password:
-freezer_keystone_password:
-
-sahara_database_password:
-sahara_keystone_password:
-
-designate_database_password:
-designate_pool_manager_database_password:
-designate_keystone_password:
-# This option must be UUID4 value in string format
-designate_pool_id:
-# This option must be HMAC-MD5 value in string format
-designate_rndc_key:
-
-swift_keystone_password:
-swift_hash_path_suffix:
-swift_hash_path_prefix:
-
-heat_database_password:
-heat_keystone_password:
-heat_domain_admin_password:
-
-murano_database_password:
-murano_keystone_password:
-murano_agent_rabbitmq_password:
-
-ironic_database_password:
-ironic_keystone_password:
-
-ironic_inspector_database_password:
-ironic_inspector_keystone_password:
-
-magnum_database_password:
-magnum_keystone_password:
-
-mistral_database_password:
-mistral_keystone_password:
-
-trove_database_password:
-trove_keystone_password:
-
-ceilometer_database_password:
-ceilometer_keystone_password:
-
-watcher_database_password:
-watcher_keystone_password:
-
-congress_database_password:
-congress_keystone_password:
-
-rally_database_password:
-
-senlin_database_password:
-senlin_keystone_password:
-
-solum_database_password:
-solum_keystone_password:
-
-horizon_secret_key:
-horizon_database_password:
-
-telemetry_secret_key:
-
-manila_database_password:
-manila_keystone_password:
-
-octavia_database_password:
-octavia_keystone_password:
-octavia_ca_password:
-
-searchlight_keystone_password:
-
-tacker_database_password:
-tacker_keystone_password:
-
-zun_database_password:
-zun_keystone_password:
-
-memcache_secret_key:
-
-#HMAC secret key
-osprofiler_secret:
-
-nova_ssh_key:
- private_key:
- public_key:
-
-kolla_ssh_key:
- private_key:
- public_key:
-
-keystone_ssh_key:
- private_key:
- public_key:
-
-bifrost_ssh_key:
- private_key:
- public_key:
-
-####################
-# Gnocchi options
-####################
-gnocchi_project_id:
-gnocchi_resource_id:
-gnocchi_user_id:
-
-####################
-# Qdrouterd options
-####################
-qdrouterd_password:
-
-####################
-# RabbitMQ options
-####################
-rabbitmq_password:
-rabbitmq_cluster_cookie:
-outward_rabbitmq_password:
-outward_rabbitmq_cluster_cookie:
-
-####################
-# HAProxy options
-####################
-haproxy_password:
-keepalived_password:
-
-####################
-# Kibana options
-####################
-kibana_password:
-
-####################
-# etcd options
-####################
-etcd_cluster_token:
-
-####################
-# redis options
-####################
-redis_master_password:
diff --git a/bootstrap/vagrant-onap/lib/files/settings.xml b/bootstrap/vagrant-onap/lib/files/settings.xml
deleted file mode 100644
index 862a3e865..000000000
--- a/bootstrap/vagrant-onap/lib/files/settings.xml
+++ /dev/null
@@ -1,369 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">
- <!-- offline | Determines whether maven should attempt to connect to the
- network when executing a build. | This will have an effect on artifact downloads,
- artifact deployment, and others. | | Default: false <offline>false</offline> -->
- <!-- proxies | This is a list of proxies which can be used on this machine
- to connect to the network. | Unless otherwise specified (by system property
- or command-line switch), the first proxy | specification in this list marked
- as active will be used. | -->
-%PROXIES_START%
-%PROXIES% %HTTP_PROXY%
-%PROXIES% %HTTPS_PROXY%
-%PROXIES_END%
- <!-- mirrors | This is a list of mirrors to be used in downloading artifacts
- from remote repositories. | | It works like this: a POM may declare a repository
- to use in resolving certain artifacts. | However, this repository may have
- problems with heavy traffic at times, so people have mirrored | it to several
- places. | | That repository definition will have a unique id, so we can create
- a mirror reference for that | repository, to be used as an alternate download
- site. The mirror site will be the preferred | server for that repository.
- | -->
- <!-- profiles | This is a list of profiles which can be activated in a variety
- of ways, and which can modify | the build process. Profiles provided in the
- settings.xml are intended to provide local machine- | specific paths and
- repository locations which allow the build to work in the local environment.
- | | For example, if you have an integration testing plugin - like cactus
- - that needs to know where | your Tomcat instance is installed, you can provide
- a variable here such that the variable is | dereferenced during the build
- process to configure the cactus plugin. | | As noted above, profiles can
- be activated in a variety of ways. One way - the activeProfiles | section
- of this document (settings.xml) - will be discussed later. Another way essentially
- | relies on the detection of a system property, either matching a particular
- value for the property, | or merely testing its existence. Profiles can also
- be activated by JDK version prefix, where a | value of '1.4' might activate
- a profile when the build is executed on a JDK version of '1.4.2_07'. | Finally,
- the list of active profiles can be specified directly from the command line.
- | | NOTE: For profiles defined in the settings.xml, you are restricted to
- specifying only artifact | repositories, plugin repositories, and free-form
- properties to be used as configuration | variables for plugins in the POM.
- | | -->
- <profiles>
- <profile>
- <id>00_maven</id>
- <repositories>
- <repository>
- <id>00_maven</id>
- <url>https://maven.restlet.com</url>
- </repository>
- </repositories>
- </profile>
- <profile>
- <id>10_nexus</id>
- <repositories>
- <repository>
- <id>10_nexus</id>
- <url>http://repo.maven.apache.org/maven2/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>true</enabled>
- </snapshots>
- </repository>
- </repositories>
- <pluginRepositories>
- <pluginRepository>
- <id>10_nexus</id>
- <url>http://repo.maven.apache.org/maven2/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>true</enabled>
- </snapshots>
- </pluginRepository>
- </pluginRepositories>
- </profile>
- <profile>
- <id>20_openecomp-public</id>
- <repositories>
- <repository>
- <id>20_openecomp-public</id>
- <name>20_openecomp-public</name>
- <url>https://nexus.onap.org/content/repositories/public/</url>
- <releases>
- <enabled>true</enabled>
- <updatePolicy>daily</updatePolicy>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </repository>
- </repositories>
- <pluginRepositories>
- <pluginRepository>
- <id>20_openecomp-public</id>
- <name>20_openecomp-public</name>
- <url>https://nexus.onap.org/content/repositories/public/</url>
- <releases>
- <enabled>true</enabled>
- <updatePolicy>daily</updatePolicy>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </pluginRepository>
- </pluginRepositories>
- </profile>
- <profile>
- <id>30_openecomp-staging</id>
- <repositories>
- <repository>
- <id>30_openecomp-staging</id>
- <name>30_openecomp-staging</name>
- <url>https://nexus.onap.org/content/repositories/staging/</url>
- <releases>
- <enabled>true</enabled>
- <updatePolicy>daily</updatePolicy>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </repository>
- </repositories>
- <pluginRepositories>
- <pluginRepository>
- <id>30_openecomp-staging</id>
- <name>30_openecomp-staging</name>
- <url>https://nexus.onap.org/content/repositories/staging/</url>
- <releases>
- <enabled>true</enabled>
- <updatePolicy>daily</updatePolicy>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </pluginRepository>
- </pluginRepositories>
- </profile>
- <profile>
- <id>40_openecomp-release</id>
- <repositories>
- <repository>
- <id>40_openecomp-release</id>
- <name>40_openecomp-release</name>
- <url>https://nexus.onap.org/content/repositories/releases/</url>
- <releases>
- <enabled>true</enabled>
- <updatePolicy>daily</updatePolicy>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </repository>
- </repositories>
- <pluginRepositories>
- <pluginRepository>
- <id>40_openecomp-release</id>
- <name>40_openecomp-release</name>
- <url>https://nexus.onap.org/content/repositories/releases/</url>
- <releases>
- <enabled>true</enabled>
- <updatePolicy>daily</updatePolicy>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </pluginRepository>
- </pluginRepositories>
- </profile>
- <profile>
- <id>50_openecomp-snapshots</id>
- <repositories>
- <repository>
- <id>50_openecomp-snapshot</id>
- <name>50_openecomp-snapshot</name>
- <url>https://nexus.onap.org/content/repositories/snapshots/</url>
- <releases>
- <enabled>false</enabled>
- </releases>
- <snapshots>
- <enabled>true</enabled>
- </snapshots>
- </repository>
- </repositories>
- <pluginRepositories>
- <pluginRepository>
- <id>50_openecomp-snapshot</id>
- <name>50_openecomp-snapshot</name>
- <url>https://nexus.onap.org/content/repositories/snapshots/</url>
- <releases>
- <enabled>false</enabled>
- </releases>
- <snapshots>
- <enabled>true</enabled>
- </snapshots>
- </pluginRepository>
- </pluginRepositories>
- </profile>
- <profile>
- <id>60_opendaylight-release</id>
- <repositories>
- <repository>
- <id>60_opendaylight-mirror</id>
- <name>60_opendaylight-mirror</name>
- <url>https://nexus.opendaylight.org/content/repositories/public/</url>
- <releases>
- <enabled>true</enabled>
- <updatePolicy>daily</updatePolicy>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </repository>
- </repositories>
- <pluginRepositories>
- <pluginRepository>
- <id>60_opendaylight-mirror</id>
- <name>60_opendaylight-mirror</name>
- <url>https://nexus.opendaylight.org/content/repositories/public/</url>
- <releases>
- <enabled>true</enabled>
- <updatePolicy>daily</updatePolicy>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </pluginRepository>
- </pluginRepositories>
- </profile>
- <profile>
- <id>70_opendaylight-snapshots</id>
- <repositories>
- <repository>
- <id>70_opendaylight-snapshot</id>
- <name>70_opendaylight-snapshot</name>
- <url>https://nexus.opendaylight.org/content/repositories/opendaylight.snapshot/</url>
- <releases>
- <enabled>false</enabled>
- </releases>
- <snapshots>
- <enabled>true</enabled>
- </snapshots>
- </repository>
- </repositories>
- <pluginRepositories>
- <pluginRepository>
- <id>70_opendaylight-snapshot</id>
- <name>70_opendaylight-snapshot</name>
- <url>https://nexus.opendaylight.org/content/repositories/opendaylight.snapshot/</url>
- <releases>
- <enabled>false</enabled>
- </releases>
- <snapshots>
- <enabled>true</enabled>
- </snapshots>
- </pluginRepository>
- </pluginRepositories>
- </profile>
- <profile>
- <id>80_onap</id>
- <repositories>
- <repository>
- <id>onap-snapshots</id>
- <name>onap-snapshots</name>
- <url>https://nexus.onap.org/content/repositories/snapshots/</url>
- <releases>
- <enabled>false</enabled>
- </releases>
- <snapshots>
- <enabled>true</enabled>
- </snapshots>
- </repository>
- <repository>
- <id>onap-staging</id>
- <name>onap-staging</name>
- <url>https://nexus.onap.org/content/repositories/staging/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </repository>
- <repository>
- <id>onap-releases</id>
- <name>onap-releases</name>
- <url>https://nexus.onap.org/content/repositories/releases/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </repository>
- <repository>
- <id>onap-public</id>
- <name>onap-public</name>
- <url>https://nexus.onap.org/content/repositories/public/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </repository>
- </repositories>
- <pluginRepositories>
- <pluginRepository>
- <id>onap-snapshots</id>
- <name>onap-snapshots</name>
- <url>https://nexus.onap.org/content/repositories/snapshots/</url>
- <releases>
- <enabled>false</enabled>
- </releases>
- <snapshots>
- <enabled>true</enabled>
- </snapshots>
- </pluginRepository>
- <pluginRepository>
- <id>onap-staging</id>
- <name>onap-staging</name>
- <url>https://nexus.onap.org/content/repositories/staging/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </pluginRepository>
- <pluginRepository>
- <id>onap-releases</id>
- <name>onap-releases</name>
- <url>https://nexus.onap.org/content/repositories/releases/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </pluginRepository>
- <pluginRepository>
- <id>onap-public</id>
- <name>onap-public</name>
- <url>https://nexus.onap.org/content/repositories/public/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </pluginRepository>
- </pluginRepositories>
- </profile>
- </profiles>
- <activeProfiles>
- <activeProfile>00_maven</activeProfile>
- <activeProfile>10_nexus</activeProfile>
- <activeProfile>20_openecomp-public</activeProfile>
- <activeProfile>30_openecomp-staging</activeProfile>
- <activeProfile>40_openecomp-release</activeProfile>
- <activeProfile>50_openecomp-snapshots</activeProfile>
- <activeProfile>60_opendaylight-release</activeProfile>
- <activeProfile>70_opendaylight-snapshots</activeProfile>
- <activeProfile>80_onap</activeProfile>
- </activeProfiles>
-</settings>
diff --git a/bootstrap/vagrant-onap/lib/functions b/bootstrap/vagrant-onap/lib/functions
deleted file mode 100755
index f40761f59..000000000
--- a/bootstrap/vagrant-onap/lib/functions
+++ /dev/null
@@ -1,450 +0,0 @@
-#!/bin/bash
-
-source /var/onap/commons
-source /var/onap/config/env-vars
-source /var/onap/_composed_functions
-source /var/onap/_onap_functions
-
-export MTU=$(/sbin/ifconfig | grep MTU | sed 's/.*MTU://' | sed 's/ .*//' |sort -n | head -1)
-export NIC=$(ip route get 8.8.8.8 | awk '{ print $5; exit }')
-export IP_ADDRESS=$(ifconfig $NIC | grep "inet addr" | tr -s ' ' | cut -d' ' -f3 | cut -d':' -f2)
-
-mvn_conf_file=/root/.m2/settings.xml
-
-# configure_dns() - DNS/GW IP address configuration
-function configure_dns {
- echo "nameserver 10.0.0.1" >> /etc/resolvconf/resolv.conf.d/head
- resolvconf -u
-}
-
-# get_next_ip() - Function that provides the next ip
-function get_next_ip {
- local ip=${1:-$IP_ADDRESS}
- ip_hex=$(printf '%.2X%.2X%.2X%.2X\n' `echo $ip | sed -e 's/\./ /g'`)
- next_ip_hex=$(printf %.8X `echo $(( 0x$ip_hex + 1 ))`)
- echo $(printf '%d.%d.%d.%d\n' `echo $next_ip_hex | sed -r 's/(..)/0x\1 /g'`)
-}
-
-# _git_timed() - git can sometimes get itself infinitely stuck with transient network
-# errors or other issues with the remote end. This wraps git in a
-# timeout/retry loop and is intended to watch over non-local git
-# processes that might hang.
-function _git_timed {
- local count=0
- local timeout=0
-
- install_package git
- until timeout -s SIGINT ${timeout} git "$@"; do
- # 124 is timeout(1)'s special return code when it reached the
- # timeout; otherwise assume fatal failure
- if [[ $? -ne 124 ]]; then
- exit 1
- fi
-
- count=$(($count + 1))
- if [ $count -eq 3 ]; then
- exit 1
- fi
- sleep 5
- done
-}
-
-# clone_repo() - Clone Git repository into specific folder
-function clone_repo {
- local repo_url=${3:-"https://git.onap.org/"}
- local repo=$1
- local dest_folder=${2:-$git_src_folder/$repo}
- if [ ! -d $dest_folder ]; then
- if [[ "$debug" == "False" ]]; then
- _git_timed clone --quiet ${repo_url}${repo} $dest_folder
- else
- _git_timed clone ${repo_url}${repo} $dest_folder
- fi
- fi
-}
-
-# clone_repos() - Function that clones source repositories for a given project
-function clone_repos {
- local project=$1
- local repo_name=${2:-$project}
-
- for repo in ${repos[$project]}; do
- clone_repo $repo ${src_folders[$project]}${repo#*$repo_name}
- done
-}
-
-# _install_bind() - Install bind utils
-function _install_bind {
- install_packages bind9 bind9utils
-}
-
-# install_java() - Install java binaries
-function install_java {
- if is_package_installed openjdk-8-jdk; then
- return
- fi
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- ;;
- ubuntu|debian)
- install_package software-properties-common
- add-apt-repository -y ppa:openjdk-r/ppa
- ;;
- rhel|centos|fedora)
- ;;
- esac
- update_repos
-
- # Remove Java 7
- uninstall_packages default-jre openjdk-7-jdk openjdk-7-jre openjdk-7-jre-headless
-
- install_package openjdk-8-jdk
- # ca-certificates-java is not a dependency in the Oracle JDK/JRE so this must be explicitly installed.
- /var/lib/dpkg/info/ca-certificates-java.postinst configure
-}
-
-# install_maven() - Install maven binaries
-function install_maven {
- if is_package_installed maven3; then
- return
- fi
- install_java
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- ;;
- ubuntu|debian)
- install_package software-properties-common
- add-apt-repository -y ppa:andrei-pozolotin/maven3
- ;;
- rhel|centos|fedora)
- ;;
- esac
- update_repos
- install_package maven3
-
- # Remove Java 7
- uninstall_package openjdk-7-jdk
-
- _configure_maven
-}
-
-# _configure_docker_settings() - Configures Docker settings
-function _configure_docker_settings {
- local docker_conf_backup=/tmp/docker.backup
- local docker_conf=/etc/default/docker
- local chameleonsocks_filename=chameleonsocks.sh
- local max_concurrent_downloads=${1:-3}
-
- cp ${docker_conf} ${docker_conf_backup}
- if [ $http_proxy ]; then
- echo "export http_proxy=$http_proxy" >> $docker_conf
- fi
- if [ $https_proxy ]; then
- echo "export https_proxy=$https_proxy" >> $docker_conf
- #If you have a socks proxy, then use that to connect to the nexus repo
- #via a redsocks container
- if [ $socks_proxy ]; then
- wget https://raw.githubusercontent.com/crops/chameleonsocks/master/$chameleonsocks_filename
- chmod 755 $chameleonsocks_filename
- socks=$(echo $socks_proxy | sed -e "s/^.*\///" | sed -e "s/:.*$//")
- port=$(echo $socks_proxy | sed -e "s/^.*://")
- PROXY=$socks PORT=$port ./$chameleonsocks_filename --install
- rm $chameleonsocks_filename
- cp ${docker_conf_backup} ${docker_conf}
- fi
- fi
- rm ${docker_conf_backup}
-
- echo "DOCKER_OPTS=\"-H tcp://0.0.0.0:2375 -H unix:///var/run/docker.sock --max-concurrent-downloads $max_concurrent_downloads \"" >> $docker_conf
- usermod -aG docker $USER
-
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- ;;
- ubuntu|debian)
- service docker restart
- sleep 10
- ;;
- rhel|centos|fedora)
- ;;
- esac
-}
-
-# install_nodejs() - Download and install NodeJS
-function install_nodejs {
- if is_package_installed nodejs; then
- return
- fi
- curl -sL https://deb.nodesource.com/setup_6.x | sudo -E bash -
- install_package nodejs
-
- # Update NPM to latest version
- npm install npm -g
-}
-
-# install_python() - Install Python 2.7 and other tools necessary for development.
-function install_python {
- install_packages python2.7 python-dev
-}
-
-# _install_pip() - Install Python Package Manager
-function _install_pip {
- install_python
- if ! which pip; then
- curl -sL https://bootstrap.pypa.io/get-pip.py | python
- fi
-}
-
-# install_python_package() - Install python modules
-function install_python_package {
- local python_packages=$@
-
- _install_pip
- pip install $python_packages
-}
-
-# install_python_requirements() - Install a list of python modules defined in requirement.txt file
-function install_python_requirements {
- local python_project_path=$1
-
- _install_pip
- pushd $python_project_path
- pip install -r requirements.txt
- popd
-}
-
-# install_docker() - Download and install docker-engine
-function install_docker {
- if $(docker version &>/dev/null); then
- return
- fi
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- ;;
- ubuntu|debian)
- install_packages software-properties-common linux-image-extra-$(uname -r) linux-image-extra-virtual apt-transport-https ca-certificates curl
- curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
- add-apt-repository \
- "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
- $(lsb_release -cs) stable"
- ;;
- rhel|centos|fedora)
- ;;
- esac
- update_repos
-
- install_package docker-ce
- _configure_docker_settings
-}
-
-# pull_docker_image() - Pull Docker container image from the Public Docker Registry Hub
-function pull_docker_image {
- install_docker
- local image=$1
- local tag=$2
- docker pull ${image}
- if [ ${tag} ]; then
- docker tag ${image} $tag
- fi
-}
-
-# wait_docker_pull() - Function that waits for all docker pull processes
-function wait_docker_pull {
- local counter=60
- local delay=${1:-60}
-
- sleep $delay
- while [ $(ps -ef | grep "docker pull" | wc -l) -gt 1 ]; do
- sleep $delay
- counter=$((counter - 1))
- if [ "$counter" -eq 0 ]; then
- break
- fi
- done
-}
-
-# run_docker_image() - Starts a Docker instance
-function run_docker_image {
- install_docker
- docker run $@
-}
-
-# run_docker_compose() - Ensures that docker compose is installed and run it in background
-function run_docker_compose {
- local folder=$1
-
- install_docker_compose
- pushd $folder
- /opt/docker/docker-compose up -d
- popd
-}
-
-# install_docker_compose() - Download and install docker-engine
-function install_docker_compose {
- local docker_compose_version=${1:-1.12.0}
- if [ ! -d /opt/docker ]; then
- mkdir /opt/docker
- curl -L https://github.com/docker/compose/releases/download/$docker_compose_version/docker-compose-`uname -s`-`uname -m` > /opt/docker/docker-compose
- chmod +x /opt/docker/docker-compose
- fi
-}
-
-# install_chefdk() - Install ChefDK package
-function install_chefdk {
- local chefdk_version="2.4.17"
-
- if is_package_installed chefdk; then
- return
- fi
- pushd $(mktemp -d)
- source /etc/os-release || source /usr/lib/os-release
- case ${ID,,} in
- *suse)
- ;;
- ubuntu|debian)
- chefdk_pkg="chefdk_$chefdk_version-1_amd64.deb"
- chefdk_url="https://packages.chef.io/files/stable/chefdk/$chefdk_version/ubuntu/$VERSION_ID/$chefdk_pkg"
-
- wget $chefdk_url
- dpkg -i $chefdk_pkg
- apt-get install -f -y
- ;;
- rhel|centos|fedora)
- rpm -Uvh "https://packages.chef.io/files/stable/chefdk/$chefdk_version/el/7/chefdk-$chefdk_version-1.el7.x86_64.rpm"
- ;;
- esac
- popd
-}
-
-# _install_ODL() - Download and Install OpenDayLight SDN controller
-function _install_ODL {
- if [ ! -d /opt/opendaylight/current ]; then
- mkdir -p /opt/opendaylight/
- wget "https://nexus.opendaylight.org/content/repositories/public/org/opendaylight/integration/distribution-karaf/"$odl_version"/distribution-karaf-"$odl_version".tar.gz" -P /opt/
- tar xvf "/opt/distribution-karaf-"$odl_version".tar.gz" -C /tmp/
- mv "/tmp/distribution-karaf-"$odl_version /opt/opendaylight/current
- rm -rf "/opt/distribution-karaf-"$odl_version".tar.gz"
- fi
-}
-
-# start_ODL() - Start OpenDayLight SDN controller
-function start_ODL {
- _install_ODL
- if [ -d /opt/opendaylight ]; then
- export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre
- /opt/opendaylight/current/bin/start
- sleep 180
- /opt/opendaylight/current/bin/client feature:install odl-dlux-all
- fi
-}
-
-# compile_src() - Function that compiles the java source code thru maven
-function compile_src {
- local src_folder=$1
- pushd $src_folder
- local mvn_build='mvn clean install -DskipTests=true -Dmaven.test.skip=true -Dmaven.javadoc.skip=true -Dadditionalparam=-Xdoclint:none'
- if [[ "$debug" == "False" ]]; then
- mvn_build+=" -q"
- fi
- if [ -f pom.xml ]; then
- install_maven
- echo "Compiling $src_folder folder..."
- eval $mvn_build
- fi
- popd
-}
-
-# compile_repos() - Function that compiles source repositories for a given project
-function compile_repos {
- local project=$1
-
- for repo in ${repos[$project]}; do
- compile_src ${src_folders[$project]}${repo#*$project}
- done
-}
-
-# build_docker_image() - Build Docker container image from source code
-function build_docker_image {
- local src_folder=$1
- local profile=$2
- install_docker
- pushd $src_folder
-
- if [ -f pom.xml ]; then
- install_maven
- # Cleanup external repo
- sed -i 's|${docker.push.registry}/||g' pom.xml
- local docker_build="mvn clean package docker:build -DskipTests=true -Dmaven.test.skip=true -Dmaven.javadoc.skip=true"
- if [ $profile ]; then
- docker_build+=" -P $profile"
- fi
- if [[ "$debug" == "False" ]]; then
- docker_build+=" -q"
- fi
- if [ $http_proxy ]; then
- if ! grep -ql "docker.buildArg.http_proxy" pom.xml ; then
- docker_build+=" -Ddocker.buildArg.http_proxy=$http_proxy"
- fi
- if ! grep -ql "docker.buildArg.HTTP_PROXY" pom.xml ; then
- docker_build+=" -Ddocker.buildArg.HTTP_PROXY=$http_proxy"
- fi
- fi
- if [ $https_proxy ]; then
- if ! grep -ql "docker.buildArg.https_proxy" pom.xml ; then
- docker_build+=" -Ddocker.buildArg.https_proxy=$https_proxy"
- fi
- if ! grep -ql "docker.buildArg.HTTPS_PROXY" pom.xml ; then
- docker_build+=" -Ddocker.buildArg.HTTPS_PROXY=$https_proxy"
- fi
- fi
- elif [ -f Dockerfile ]; then
- # NOTE: Workaround for dmmapbc images
- sed -i '/LocalKey/d' Dockerfile
- sed -i "s/nexus3.onap.org\:10003\///g" Dockerfile
- local docker_build="docker build -t $profile -f ./Dockerfile ."
- if [ $http_proxy ]; then
- docker_build+=" --build-arg http_proxy=$http_proxy"
- docker_build+=" --build-arg HTTP_PROXY=$http_proxy"
- fi
- if [ $https_proxy ]; then
- docker_build+=" --build-arg https_proxy=$https_proxy"
- docker_build+=" --build-arg HTTPS_PROXY=$https_proxy"
- fi
- fi
- echo $docker_build
- eval $docker_build
- popd
-}
-
-# mount_external_partition() - Create partition and mount the external volume
-function mount_external_partition {
- local dev_name="/dev/$1"
- local mount_dir=$2
-
- sfdisk $dev_name << EOF
-;
-EOF
- mkfs -t ext4 ${dev_name}1
- mkdir -p $mount_dir
- mount ${dev_name}1 $mount_dir
- echo "${dev_name}1 $mount_dir ext4 errors=remount-ro,noatime,barrier=0 0 1" >> /etc/fstab
-}
-
-# add_no_proxy_value() - Add no_proxy values into environment file, used for internal IPs generated at deploy time
-function add_no_proxy_value {
- if [[ `grep "no_proxy" /etc/environment` ]]; then
- sed -i.bak "s/^no_proxy.*$/&,$1/" /etc/environment
- else
- echo "no_proxy=$1" >> /etc/environment
- fi
- if [[ `grep "NO_PROXY" /etc/environment` ]]; then
- sed -i.bak "s/^NO_PROXY.*$/&,$1/" /etc/environment
- else
- echo "NO_PROXY=$1" >> /etc/environment
- fi
-}
-
diff --git a/bootstrap/vagrant-onap/lib/mr b/bootstrap/vagrant-onap/lib/mr
deleted file mode 100755
index bba748618..000000000
--- a/bootstrap/vagrant-onap/lib/mr
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# get_mr_images() - Function that retrieves the Message Router Docker images
-function get_mr_images {
- pull_docker_image attos/dmaap
- pull_docker_image wurstmeister/zookeeper
-}
-
-# install_message_router() - Downloads and configure message router source code
-function install_message_router {
- install_docker_compose
-
- pushd ${src_folders[mr]}
- bash deploy.sh
- popd
-}
-
-# init_mr() - Function that initialize Message Router services
-function init_mr {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repo dcae/demo/startup/message-router ${src_folders[mr]}
- fi
- if [[ "$skip_get_images" == "False" ]]; then
- get_mr_images
- if [[ "$skip_install" == "False" ]]; then
- install_message_router
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/msb b/bootstrap/vagrant-onap/lib/msb
deleted file mode 100755
index bcf27fe75..000000000
--- a/bootstrap/vagrant-onap/lib/msb
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# _build_msb_images() - Function that creates Microservices Docker images from source code
-function _build_msb_images {
- if [[ "$compile_repo" != "True" ]]; then
- compile_repos "msb"
- fi
-
- build_docker_image ${src_folders[msb]}/apigateway/distributions/msb-apigateway/src/main/basedocker onap/msb/msb_base
- build_docker_image ${src_folders[msb]}/apigateway/distributions/msb-apigateway/src/main/docker onap/msb/msb_apigateway
- build_docker_image ${src_folders[msb]}/discovery/distributions/msb-discovery/src/main/docker onap/msb/msb_discovery
-}
-
-# get_msb_images() - Function that retrieves the Microservices Bus images
-function get_msb_images {
- pull_docker_image "consul:0.9.3"
- if [[ "$build_image" == "True" ]]; then
- _build_msb_images
- else
- unset docker_version
- for image in base apigateway discovery; do
- pull_onap_image msb/msb_$image
- done
- fi
-}
-
-# install_msb() - Downloads and configure Microservices Bus source code
-function install_msb {
- run_docker_image -d --net=host --name msb_consul consul:0.9.3
- run_docker_image -d --net=host --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery
- run_docker_image -d --net=host -e "ROUTE_LABELS=visualRange:1" --name msb_internal_apigateway nexus3.onap.org:10001/onap/msb/msb_apigateway
-}
-
-# init_msb() - Function that initialize Message Router services
-function init_msb {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "msb"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "msb"
- fi
- fi
- if [[ "$skip_get_images" == "False" ]]; then
- get_msb_images
- if [[ "$skip_install" == "False" ]]; then
- install_msb
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/mso b/bootstrap/vagrant-onap/lib/mso
deleted file mode 100755
index 6dd0676eb..000000000
--- a/bootstrap/vagrant-onap/lib/mso
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# get_mso_images() - Function that retrieves or create MSO Docker images
-function get_mso_images {
- if [[ "$build_image" == "True" ]]; then
- export GIT_NO_PROJECT=/opt/
- compile_src ${src_folders[mso]}
- build_docker_image ${src_folders[mso]}/packages/docker docker
- fi
-}
-
-# install_mso() - Install MSO Docker configuration project
-function install_mso {
- MSO_ENCRYPTION_KEY=$(cat /opt/mso/docker-config/encryption.key)
- echo -n "$openstack_api_key" | openssl aes-128-ecb -e -K $MSO_ENCRYPTION_KEY -nosalt | xxd -c 256 -p > /opt/config/api_key.txt
-
- # Deployments in OpenStack require a keystone file
- if [ -e /opt/config/keystone.txt ]; then
- KEYSTONE_URL=$(cat /opt/config/keystone.txt)
- DCP_CLLI="DEFAULT_KEYSTONE"
- AUTH_TYPE="USERNAME_PASSWORD"
- else
- KEYSTONE_URL="https://identity.api.rackspacecloud.com/v2.0"
- DCP_CLLI="RAX_KEYSTONE"
- AUTH_TYPE="RACKSPACE_APIKEY"
- fi
-
- # Update the MSO configuration file.
- read -d '' MSO_CONFIG_UPDATES <<-EOF
-{
-"default_attributes":
- {
- "asdc-connections":
- {
- "asdc-controller1":
- {
- "environmentName": "$dmaap_topic"
- }
- },
- "mso-po-adapter-config":
- {
- "identity_services":
- [
- {
- "dcp_clli": "$DCP_CLLI",
- "identity_url": "$KEYSTONE_URL",
- "mso_id": "$openstack_username",
- "mso_pass": "$openstack_password",
- "admin_tenant": "service",
- "member_role": "admin",
- "tenant_metadata": "true",
- "identity_server_type": "KEYSTONE",
- "identity_authentication_type": "$AUTH_TYPE"
- }
- ]
- }
- }
-}
-EOF
- export MSO_CONFIG_UPDATES
- export MSO_DOCKER_IMAGE_VERSION=$docker_version
-
- install_docker
- install_docker_compose
- # Deploy the environment
- pushd ${src_folders[mso]}/docker-config
- chmod +x deploy.sh
- if [[ "$build_image" == "True" ]]; then
- bash deploy.sh
- else
- # This script takes in input 2 nexus repos (the first one for the MSO image, the second one for mariadb)
- bash deploy.sh $nexus_docker_repo $nexus_username $nexus_password $nexus_docker_repo $nexus_username $nexus_password
- fi
- popd
-}
-
-# init_mso() - Function that initialize MSO services
-function init_mso {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "mso"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "mso"
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_mso_images
- if [[ "$skip_install" == "False" ]]; then
- install_mso
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/multicloud b/bootstrap/vagrant-onap/lib/multicloud
deleted file mode 100755
index ff6f9708c..000000000
--- a/bootstrap/vagrant-onap/lib/multicloud
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-openstack_release="newton"
-
-# _build_multicloud_images() - Function that builds docker images from source code
-function _build_multicloud_images {
- install_docker
- pushd ${src_folders[multicloud]}/openstack/$openstack_release
- install_python_requirements .
- python setup.py develop
- #bash build_image.sh
- popd
-}
-
-# get_multicloud_images() -
-function get_multicloud_images {
- if [[ "$build_image" == "True" ]]; then
- _build_multicloud_images
- else
- pull_onap_image multicloud/openstack-$openstack_release
- fi
-}
-
-# install_multicloud() -
-function install_multicloud {
- #run_docker_compose ${src_folders[multicloud]}/openstack/$openstack_release
- if [[ "$build_image" == "True" ]]; then
- multicloud-api --port 9003 --host 0.0.0.0 &
- else
- docker_id=`docker images | grep onap/multicloud/openstack-$openstack_release | grep latest | awk '{print $3; exit}'`
- docker run -d -p 0.0.0.0:9003:9003 $docker_id
- fi
-}
-
-# init_multicloud() - Function that initialize Multi Cloud services
-function init_multicloud {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "multicloud"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "multicloud"
- fi
- fi
- if [[ "$skip_get_images" == "False" ]]; then
- get_multicloud_images
- if [[ "$skip_install" == "False" ]]; then
- install_multicloud
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/oom b/bootstrap/vagrant-onap/lib/oom
deleted file mode 100755
index d52c029e4..000000000
--- a/bootstrap/vagrant-onap/lib/oom
+++ /dev/null
@@ -1,207 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-RANCHER_PORT=8880
-oom_delay=30
-export RANCHER_URL=http://localhost:$RANCHER_PORT
-export RANCHER_ACCESS_KEY='access_key'
-export RANCHER_SECRET_KEY='secret_key'
-
-# _install_docker() - Function that installs Docker version for Rancher
-function _install_docker {
- if ! $(docker version &>/dev/null); then
- curl https://releases.rancher.com/install-docker/1.12.sh | sh
- _configure_docker_settings 15
- fi
-}
-
-# _pull_rancher_images() - Function that retrieves Rancher images required for k8s
-function _pull_rancher_images {
- for image in "net:v0.13.5" "k8s:v1.8.5-rancher3" \
-"lb-service-rancher:v0.7.17" "network-manager:v0.7.18" "metadata:v0.9.5" \
-"kubectld:v0.8.5" "kubernetes-agent:v0.6.6" "dns:v0.15.3" \
-"kubernetes-auth:v0.0.8" "healthcheck:v0.3.3" "etcd:v2.3.7-13" \
-"etc-host-updater:v0.0.3" "net:holder"; do
- pull_docker_image rancher/$image &
- done
-}
-
-# _pull_k8s_images() - Function that retrieves Google k8s images
-function _pull_k8s_images {
- for image in "kubernetes-dashboard-amd64:v1.7.1" \
-"k8s-dns-sidecar-amd64:1.14.5" "k8s-dns-kube-dns-amd64:1.14.5" \
-"k8s-dns-dnsmasq-nanny-amd64:1.14.5" "heapster-influxdb-amd64:v1.3.3" \
-"heapster-grafana-amd64:v4.4.3" "heapster-amd64:v1.4.0" "pause-amd64:3.0"; do
- pull_docker_image gcr.io/google_containers/$image &
- done
-}
-
-# _install_rancher() - Function that installs Rancher CLI and container
-function _install_rancher {
- local rancher_version=v0.6.5
- local rancher_server_version=v1.6.10
- local rancher_server=rancher/server:$rancher_server_version
-
- if [ ! -d /opt/rancher/current ]; then
- mkdir -p /opt/rancher/current
- wget https://github.com/rancher/cli/releases/download/$rancher_version/rancher-linux-amd64-$rancher_version.tar.gz
- tar -xzf rancher-linux-amd64-$rancher_version.tar.gz -C /tmp
- mv /tmp/rancher-$rancher_version/rancher /opt/rancher/current/
- fi
-
- _install_docker
- pull_docker_image $rancher_server
- run_docker_image -d --restart=unless-stopped -p $RANCHER_PORT:8080 $rancher_server
- while true; do
- if curl --fail -X GET $RANCHER_URL; then
- break
- fi
- echo "waiting for racher"
- sleep $oom_delay
- done
-}
-
-# _install_kubernetes() - Function that deploys kubernetes via RancherOS host registration
-function _install_kubernetes {
- local rancher_agent_version=v1.2.7
- local rancher_agent=rancher/agent:$rancher_agent_version
-
- _install_rancher
-
- _pull_rancher_images
- _pull_k8s_images
- pull_docker_image $rancher_agent
- wait_docker_pull
-
- pushd /opt/rancher/current/
- export RANCHER_ENVIRONMENT=`./rancher env create -t kubernetes onap_on_kubernetes`
- popd
-
- install_python_package rancher-agent-registration
- export no_proxy=$no_proxy,$IP_ADDRESS
- rancher-agent-registration --host-ip $IP_ADDRESS --url http://$IP_ADDRESS:$RANCHER_PORT --environment $RANCHER_ENVIRONMENT --key $RANCHER_ACCESS_KEY --secret $RANCHER_SECRET_KEY
-}
-
-# _install_kubectl() - Function that installs kubectl as client for kubernetes
-function _install_kubectl {
- if ! $(kubectl version &>/dev/null); then
- rm -rf ~/.kube
- curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl
- chmod +x ./kubectl
- mv ./kubectl /usr/local/bin/kubectl
- mkdir ~/.kube
- pushd ~/.kube
- python /var/onap/files/kubectl_config_generator.py
- popd
- fi
-}
-
-# _install_helm() - Function that install Kubernetes Package Manager
-function _install_helm {
- local helm_version=v2.3.0
-
- if ! $(helm version &>/dev/null); then
- wget http://storage.googleapis.com/kubernetes-helm/helm-${helm_version}-linux-amd64.tar.gz
- tar -zxvf helm-${helm_version}-linux-amd64.tar.gz -C /tmp
- mv /tmp/linux-amd64/helm /usr/local/bin/helm
- helm init
- fi
-}
-
-# _pull_images_from_yaml() - Function that parses a yaml file and pull their images
-function _pull_images_from_yaml_file {
- local values_file=$1
- local prefix=$2
- local s='[[:space:]]*'
- local w='[a-zA-Z0-9_]*'
- fs=`echo @|tr @ '\034'`
-
- for line in $(sed -ne "s|^\($s\):|\1|" \
--e "s|^\($s\)\($w\)$s:$s[\"']\(.*\)[\"']$s\$|\1$fs\2$fs\3|p" \
--e "s|^\($s\)\($w\)$s:$s\(.*\)$s\$|\1$fs\2$fs\3|p" $values_file |
-awk -F$fs '{
-indent = length($1)/2;
-vname[indent] = $2;
-for (i in vname) {
- if (i > indent) {
- delete vname[i]}
- }
- if (length($3) > 0) {
- vn=""; for (i=0; i<indent; i++) {vn=(vn)(vname[i])(".")}
- printf("%s%s%s=%s\n", "'$prefix'",vn, $2, $3);
- }
-}' | grep image); do
- echo $line
- if echo $line | grep -q Version ; then
- pull_docker_image "$image_name:$(echo $line | awk -F "=" '{print $2}')" &
- else
- image_name=`echo ${line#*=}`
- if [[ ${image_name#*${nexus_docker_repo:-nexus3.onap.org:10001}} == *:* ]]; then
- pull_docker_image $image_name &
- else
- pull_docker_image $image_name:latest
- fi
- fi
- done
-}
-
-# get_oom_images() - Function that retrieves ONAP images from official hub
-function get_oom_images {
- if [[ "$build_image" == "True" ]]; then
- # TODO(electrocucaracha): Create a function for calling the build docker function of every ONAP project
- echo "Not Implemented"
- else
- if [[ "$clone_repo" != "True" ]]; then
- clone_repos "oom"
- fi
-
- docker_openecomp_login
- for values_file in `find ${src_folders[oom]}/kubernetes -name values.yaml -type f`; do
- _pull_images_from_yaml_file $values_file
- done
- docker logout
- wait_docker_pull
- fi
-}
-
-# _install_oom() - Function that clones OOM and deploys ONAP
-function install_oom {
- if [[ "$clone_repo" != "True" ]]; then
- clone_repos "oom"
- fi
- pushd ${src_folders[oom]}/kubernetes/oneclick
- source setenv.bash
-
- pushd ${src_folders[oom]}/kubernetes/config
- cp onap-parameters-sample.yaml onap-parameters.yaml
- ./createConfig.sh -n onap
- popd
-
- for app in consul msb mso message-router sdnc vid robot portal policy appc aai sdc dcaegen2 log cli multicloud clamp vnfsdk uui aaf vfc kube2msb; do
- ./createAll.bash -n onap -a $app
- done
- popd
-}
-
-# init_oom() - Function that deploys ONAP using OOM
-function init_oom {
- mount_external_partition sda /var/lib/docker/
- _install_kubernetes
- _install_kubectl
- _install_helm
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "oom"
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_oom_images
- if [[ "$skip_install" == "False" ]]; then
- until kubectl cluster-info; do
- echo "waiting for kubernetes host"
- sleep $oom_delay
- done
- install_oom
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/openstack b/bootstrap/vagrant-onap/lib/openstack
deleted file mode 100755
index 5e5189086..000000000
--- a/bootstrap/vagrant-onap/lib/openstack
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-kolla_config=/etc/kolla
-kolla_build=$kolla_config/kolla-build.conf
-kolla_passwords=$kolla_config/passwords.yml
-kolla_globals=$kolla_config/globals.yml
-kolla_inventory=/var/onap/files/all-in-one
-
-# install_dependencies() - Function that installs Kolla-Ansible requirements
-function install_dependencies {
- install_docker
-
- mkdir -p /etc/systemd/system/docker.service.d
- tee /etc/systemd/system/docker.service.d/kolla.conf <<-'EOF'
-[Service]
-MountFlags=shared
-EOF
- systemctl daemon-reload
- systemctl restart docker
-
- install_python_package ansible docker kolla-ansible python-openstackclient
-}
-
-# configure_deploy() - Function that modifies configuration files
-function configure_deploy {
- local network_id=$1
- local enable_opendaylight=${2-False}
- local openstack_services="main = ceilometer,cinder,glance,heat,horizon,isci,keystone,neutron,nova-,swift"
- nic=$(ip route get $network_id | awk '{ print $4; exit }')
- ip_address=$(ip route get $network_id | awk '{ print $6; exit }')
- internal_vip_address=$(get_next_ip $ip_address)
-
- if [[ `env | grep -i "proxy"` ]]; then
- add_no_proxy_value $internal_vip_address
- fi
-
- mkdir -p $kolla_config
- cp /var/onap/files/globals.yml $kolla_globals
- cp /var/onap/files/passwords.yml $kolla_passwords
- cp /var/onap/files/kolla-build.conf $kolla_build
- kolla-genpwd
- echo "network_interface: \"$nic\"" >> $kolla_globals
- echo "kolla_internal_vip_address: \"$internal_vip_address\"" >> $kolla_globals
- echo "api_interface: \"{{ network_interface }}\"" >> $kolla_globals
- if [[ $enable_opendaylight == True ]]; then
- echo "enable_opendaylight: \"yes\"" >> $kolla_globals
- openstack_services+=",opendaylight"
- fi
- echo $openstack_services >> $kolla_build
-
- echo "$ip_address $(hostname)" >> /etc/hosts
-}
-
-# get_openstack_images() - Function that retrieves or builds docker images
-function get_openstack_images {
- if [[ "$build_image" == "True" ]]; then
- install_python_package kolla
- kolla-build --config-file $kolla_build
- else
- kolla-ansible pull -i $kolla_inventory
- fi
-}
-
-# deploy_openstack() - Function that provisions an OpenStack deployment
-function deploy_openstack {
- install_dependencies
- configure_deploy ${1:-"192.168.53.0"} "True"
-
- get_openstack_images
- kolla-ansible deploy -i $kolla_inventory
- kolla-ansible post-deploy
- echo "source /etc/kolla/admin-openrc.sh" >> ${HOME}/.bashrc
-}
diff --git a/bootstrap/vagrant-onap/lib/policy b/bootstrap/vagrant-onap/lib/policy
deleted file mode 100755
index 1e633bef1..000000000
--- a/bootstrap/vagrant-onap/lib/policy
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# _build_policy_images() - Function that build Policy docker images from source code
-function _build_policy_images {
- compile_src ${src_folders[policy]}/docker
- pushd ${src_folders[policy]}/docker
- install_maven
- mvn prepare-package
- cp -r target/policy-pe/* policy-pe/
- cp -r target/policy-drools/* policy-drools
- install_docker
- bash docker_verify.sh
- popd
-}
-
-# get_policy_images() - Function that retrieves Policy docker images
-function get_policy_images {
- if [[ "$build_image" == "True" ]]; then
- _build_policy_images
- else
- for image in db pe drools nexus; do
- pull_onap_image policy/policy-$image onap/policy/policy-$image:latest
- done
- fi
-}
-
-# install_policy() - Function that clones and installs the Policy services from source code
-function install_policy {
- pushd ${src_folders[policy]}/docker
- chmod +x config/drools/drools-tweaks.sh
- echo $IP_ADDRESS > config/pe/ip_addr.txt
- run_docker_compose .
- popd
-}
-
-# init_policy() - Function that initialize Policy services
-function init_policy {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "policy"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "policy"
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_policy_images
- if [[ "$skip_install" == "False" ]]; then
- install_policy
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/portal b/bootstrap/vagrant-onap/lib/portal
deleted file mode 100755
index fe5469822..000000000
--- a/bootstrap/vagrant-onap/lib/portal
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# clone_all_portal_repos() - Function that clones Portal source repo.
-function clone_all_portal_repos {
- for repo in ${repos[portal]}; do
- if [[ "$repo" == "ui/dmaapbc" ]];then
- prefix="ui"
- else
- prefix="portal"
- fi
- clone_repo $repo ${src_folders[portal]}/${repo#*$prefix}
- done
-}
-
-# compile_all_portal_repos() - Function that compiles Portal source repo.
-function compile_all_portal_repos {
- for repo in ${repos[portal]}; do
- if [[ "$repo" == "ui/dmaapbc" ]];then
- prefix="ui"
- else
- prefix="portal"
- fi
- compile_src ${src_folders[portal]}/${repo#*$prefix}
- done
-}
-
-# _build_portal_images() - Function that builds Portal Docker images from source code
-function _build_portal_images {
- install_maven
-
- pushd ${src_folders[portal]}/deliveries
- chmod +x *.sh
- export MVN=$(which mvn)
- export GLOBAL_SETTINGS_FILE=/usr/share/maven3/conf/settings.xml
- export SETTINGS_FILE=$HOME/.m2/settings.xml
- bash build_portalapps_dockers.sh
- popd
-}
-
-# get_portal_images() - Function to get Portal images.
-function get_portal_images {
- if [[ "$build_image" == "True" ]]; then
- _build_portal_images
- else
- pull_openecomp_image portaldb ecompdb:portal
- pull_openecomp_image portalapps ep:1610-1
- fi
- pull_docker_image mariadb
-}
-
-# _install_mariadb() - Pull and create a MariaDB container
-function _install_mariadb {
- docker create --name data_vol_portal -v /var/lib/mysql mariadb
-}
-
-# install_portal() - Function that installs the source code of Portal
-function install_portal {
- install_docker
- docker rm -f ecompdb_portal
- docker rm -f 1610-1
-
- pushd ${src_folders[portal]}/deliveries
- mkdir -p /PROJECT/OpenSource/UbuntuEP/logs
- install_package unzip
- unzip -o etc.zip -d /PROJECT/OpenSource/UbuntuEP/
-
- _install_mariadb
- install_docker_compose
- bash portal_vm_init.sh
-
- sleep 180
-
- if [ ! -e /opt/config/boot.txt ]; then
- install_package mysql-client
- mysql -u root -p'Aa123456' -h $IP_ADDRESS < Apps_Users_OnBoarding_Script.sql
- echo "yes" > /opt/config/boot.txt
- fi
- popd
-}
-
-# init_portal() - Function that initialize Portal services
-function init_portal {
- if [[ "$clone_repo" == "True" ]]; then
- clone_all_portal_repos
- if [[ "$compile_repo" == "True" ]]; then
- compile_all_portal_repos
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_portal_images
- if [[ "$skip_install" == "False" ]]; then
- install_portal
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/robot b/bootstrap/vagrant-onap/lib/robot
deleted file mode 100755
index 70f8cf704..000000000
--- a/bootstrap/vagrant-onap/lib/robot
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# _setup_ete_folder() - Create and copy ete folder structure
-function _setup_ete_folder {
- mkdir -p /opt/eteshare/config
-
- cp ${src_folders[robot]}/integration_* /opt/eteshare/config
- cp ${src_folders[robot]}/vm_config2robot.sh /opt/eteshare/config
- cp ${src_folders[robot]}/ete.sh /opt
- cp ${src_folders[robot]}/demo.sh /opt
-
- chmod +x /opt/ete.sh
- chmod +x /opt/demo.sh
-}
-
-# get_robot_images() - Pull or build the Robot Docker images
-function get_robot_images {
- pull_openecomp_image testsuite
-}
-
-# install_robot() - Run Robot services
-function install_robot {
- docker rm -f openecompete_container
- run_docker_image -d --name openecompete_container -v /opt/eteshare:/share -p 88:88 $nexus_docker_repo/openecomp/testsuite:$docker_version
-}
-
-# init_robot() - Function that initialize Robot services
-function init_robot {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "robot" "testsuite"
- _setup_ete_folder
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "robot"
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_robot_images
- if [[ "$skip_install" == "False" ]]; then
- install_robot
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/sdc b/bootstrap/vagrant-onap/lib/sdc
deleted file mode 100755
index 71a5fea86..000000000
--- a/bootstrap/vagrant-onap/lib/sdc
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# _init_data_folders() - Function that initialize the data folders
-function _init_data_folders {
- mkdir -p /data/environments
- mkdir -p /data/scripts
- mkdir -p /data/logs/BE
- mkdir -p /data/logs/FE
- chmod 777 /data
- chmod 777 /data/logs
-}
-
-# _setup_docker_aliases() - Function that setups the aliases required by sdc scripts
-function _setup_docker_aliases {
- cat <<EOL > /root/.bash_aliases
-alias dcls='/data/scripts/docker_clean.sh \$1'
-alias dlog='/data/scripts/docker_login.sh \$1'
-alias drun='/data/scripts/docker_run.sh'
-alias health='/data/scripts/docker_health.sh'
-EOL
-}
-
-# get_sdc_images() - Function that retrieves the SDC docker images
-function get_sdc_images {
- build_docker_image ${src_folders[sdc]}/sdc-docker-base
- build_docker_image ${src_folders[sdc]}/utils/webseal-simulator docker
- if [[ "$build_image" == "True" ]]; then
- compile_src ${src_folders[sdc]}
- for project in catalog-fe test-apis-ci; do
- compile_src ${src_folders[sdc]}/$project
- done
- build_docker_image ${src_folders[sdc]}/sdc-os-chef docker
- else
- for image in elasticsearch init-elasticsearch cassandra kibana backend frontend sanity; do
- pull_onap_image sdc-$image &
- done
- wait_docker_pull
- fi
-}
-
-# install_sdc() - Function that pull templates and executes
-function install_sdc {
- local ENV_NAME=$dmaap_topic
- local MR_IP_ADDR='10.0.11.1'
-
- pushd ${src_folders[sdc]}/utils/webseal-simulator
- bash scripts/simulator_docker_run.sh
- popd
-
- _init_data_folders
-
- cp ${src_folders[sdc]}/sdc-os-chef/scripts/{docker_run.sh,docker_health.sh,docker_login.sh,docker_clean.sh,simulator_docker_run.sh} /data/scripts
- chmod +x /data/scripts/*.sh
-
- cat ${src_folders[sdc]}/sdc-os-chef/environments/Template.json | sed "s/yyy/"$IP_ADDRESS"/g" > /data/environments/$ENV_NAME.json
- sed -i "s/xxx/"$ENV_NAME"/g" /data/environments/$ENV_NAME.json
- sed -i "s/\"ueb_url_list\":.*/\"ueb_url_list\": \""$MR_IP_ADDR","$MR_IP_ADDR"\",/g" /data/environments/$ENV_NAME.json
- sed -i "s/\"fqdn\":.*/\"fqdn\": [\""$MR_IP_ADDR"\", \""$MR_IP_ADDR"\"]/g" /data/environments/$ENV_NAME.json
-
- install_docker
- if [[ "$skip_get_images" == "False" ]]; then
- bash /data/scripts/docker_run.sh -e $ENV_NAME -l
- else
- bash /data/scripts/docker_run.sh -e $ENV_NAME -r $docker_version -p $(echo $nexus_docker_repo | cut -d':' -f2)
- fi
- install_chefdk
-}
-
-# init_sdc() - Function that initialize SDC services
-function init_sdc {
- mount_external_partition sdb /data/
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "sdc"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "sdc"
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_sdc_images
- fi
- if [[ "$skip_install" == "False" ]]; then
- install_sdc
- fi
- _setup_docker_aliases
-}
diff --git a/bootstrap/vagrant-onap/lib/sdnc b/bootstrap/vagrant-onap/lib/sdnc
deleted file mode 100755
index 8dacf9e16..000000000
--- a/bootstrap/vagrant-onap/lib/sdnc
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-source /var/onap/ccsdk
-
-# compile_all_sdnc_repos() - Function that compiles SDNC source repo.
-function compile_all_sdnc_repos {
- for repo in ${repos[sdnc]}; do
- if [[ "$repo" == "sdnc/core" ]]; then
- compile_src ${src_folders[sdnc]}/core/rootpom
- fi
- compile_src ${src_folders[sdnc]}${repo#*sdnc}
- done
-}
-
-# _build_sdnc_images() - Builds SDNC images from source code
-function _build_sdnc_images {
- local folder=${src_folders[sdnc]}/oam
-
- get_ccsdk_images
- install_package unzip
- # The OAM code depends on all the SDNC repos which should be downloaded and compiled first
- if [[ "$compile_repo" != "True" ]]; then
- compile_src $folder
- fi
- for dirc in ubuntu sdnc admportal dgbuilder; do
- build_docker_image $folder/installation/$dirc
- done
-}
-
-# get_sdnc_images() - Build or retrieve necessary images
-function get_sdnc_images {
- if [[ "$build_image" == "True" ]]; then
- _build_sdnc_images
- else
- for image in sdnc-image admportal-sdnc-image dgbuilder-sdnc-image; do
- pull_openecomp_image $image openecomp/$image:latest
- done
- fi
- pull_docker_image mysql/mysql-server:5.6
-}
-
-# install_sdnc() - Download and install SDNC services from source code
-function install_sdnc {
- run_docker_compose ${src_folders[sdnc]}/oam/installation/src/main/yaml
-}
-
-# init_sdnc() - Function that initialize SDNC services
-function init_sdnc {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "sdnc"
- if [[ "$compile_repo" == "True" ]]; then
- compile_all_sdnc_repos
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_sdnc_images
- if [[ "$skip_install" == "False" ]]; then
- start_ODL
- install_sdnc
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/vfc b/bootstrap/vagrant-onap/lib/vfc
deleted file mode 100755
index 64f7df00e..000000000
--- a/bootstrap/vagrant-onap/lib/vfc
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# compile_all_vfc_repos() - Function that compiles VF-C source repo.
-function compile_all_vfc_repos {
- install_python_package tox
-
- tox_repos=("gvnfm/vnflcm/lcm" "gvnfm/vnfmgr/mgr" "gvnfm/vnfres/res" "nfvo/lcm" \
- "nfvo/driver/vnfm/gvnfm/gvnfmadapter" "nfvo/driver/vnfm/svnfm/zte/vmanager")
- for dirc in ${tox_repos[@]}; do
- pushd ${src_folders[vfc]}/$dirc
- tox -e py27
- popd
- done
-
- # TODO(sshank): Add compile for other vfc_repos. (Java based.)
-
- # Java based:
- # nfvo/catalog
- # nfvo/driver/ems/ems/sems/boco/ems-driver
- # nfvo/driver/sfc/zte/sfc-driver
- # nfvo/driver/vnfm/gvnfm/juju/juju-vnfmadapter
- # nfvo/driver/vnfm/svnfm/huawei/vnfmadapter
- # nfvo/resmanagement
- # nfvo/wfengine
-}
-
-# _build_vfc_image() - Build VFC docker image
-function _build_vfc_image {
- pushd ${src_folders[vfc]}/$1/docker
- sed -i "s/^push_image/#push_image/g" build_image.sh
- sed -i 's|IMAGE_NAME="${DOCKER_REPOSITORY}/${ORG}/${PROJECT}/${IMAGE}"|IMAGE_NAME=${ORG}/${IMAGE}|g' build_image.sh
- ./build_image.sh
- popd
-}
-
-# get_vfc_images() - Build VFC docker images
-function get_vfc_images {
- if [[ "$build_image" == "True" ]]; then
- install_docker
- # Separate methods are required since the image build process will change.
- _build_vfc_image gvnfm/vnflcm/lcm onap/nslcm
- _build_vfc_image gvnfm/vnfmgr/mgr onap/gvnfmdriver
- _build_vfc_image gvnfm/vnfres/res onap/vnfres
- _build_vfc_image nfvo/lcm onap/vnflcm
- _build_vfc_image nfvo/driver/vnfm/gvnfm/gvnfmadapter
-
- build_gvnfm_lcm_image
- build_gvnfm_vnfmgr_image
- build_gvnfm_vnfres_image
- build_nfvo_lcm_image
- build_nfvo_vnfm_gvnfmadapter_image
- # TODO(sshank): Add other VFC component docker image builds.
- else
- for image in gvnfm/vnflcm/lcm gvnfm/vnfmgr/mgr gvnfm/vnfres/res nfvo/lcm nfvo/driver/vnfm/gvnfm/gvnfmadapter; do
- pull_onap_image vfc/$image
- done
- fi
-}
-
-# install_vfc() - Download and install vfc service from source code
-function install_vfc {
- nslcm_image=`docker images | grep nslcm | grep latest| awk '{print $1 ":" $2}'`
- vnflcm_image=`docker images | grep vnflcm | grep latest| awk '{print $1 ":" $2}'`
- vnfmgr_image=`docker images | grep vnfmgr | grep latest| awk '{print $1 ":" $2}'`
- vnfres_image=`docker images | grep vnfres | grep latest| awk '{print $1 ":" $2}'`
- gvnfmdriver_image=`docker images | grep gvnfmdriver | grep latest| awk '{print $1 ":" $2}'`
-
- run_docker_image -d --name vfc-nslcm -p 8403:8403 -e MSB_ADDR=127.0.0.1 $nslcm_image
- run_docker_image -d --name vfc-vnflcm -p 8801:8801 -e MSB_ADDR=127.0.0.1 $vnflcm_image
- run_docker_image -d --name vfc-vnfmgr -p 8803:8803 -e MSB_ADDR=127.0.0.1 $vnfmgr_image
- run_docker_image -d --name vfc-vnfres -p 8802:8802 -e MSB_ADDR=127.0.0.1 $vnfres_image
- run_docker_image -d --name vfc-gvnfmdriver -p 8484:8484 -e MSB_ADDR=127.0.0.1 $gvnfmdriver_image
-
- # TODO(sshank): Run other VFC component docker images.
-}
-
-# init_vfc() - Function that initialize VF-C services
-function init_vfc {
- install_package libmysqlclient-dev
-
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "vfc"
- if [[ "$compile_repo" == "True" ]]; then
- compile_all_vfc_repos
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_vfc_images
- if [[ "$skip_install" == "False" ]]; then
- install_vfc
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/vid b/bootstrap/vagrant-onap/lib/vid
deleted file mode 100755
index 0c7ad8536..000000000
--- a/bootstrap/vagrant-onap/lib/vid
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# _build_vid_images() - Function that builds VID docker images
-function _build_vid_images {
- if [[ "$compile_repo" != "True" ]]; then
- compile_src ${src_folders[vid]}
- fi
- build_docker_image ${src_folders[vid]}/deliveries
-}
-
-# get_vid_images() - Function that retrieves VID docker images
-function get_vid_images {
- if [[ "$build_image" == "True" ]]; then
- _build_vid_images
- else
- pull_openecomp_image vid
- fi
- pull_docker_image mariadb:10
-}
-
-# install_vid() - Download and configure Vid source code
-function install_vid {
- vid_image=`docker images | grep vid | grep latest| awk '{print $1 ":" $2}'`
-
- docker rm -f vid-mariadb
- docker rm -f vid-server
-
- run_docker_image --name vid-mariadb -e MYSQL_DATABASE=vid_openecomp -e MYSQL_USER=vidadmin -e MYSQL_PASSWORD=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U -e MYSQL_ROOT_PASSWORD=LF+tp_1WqgSY -v /opt/vid/lf_config/vid-my.cnf:/etc/mysql/my.cnf -v /opt/vid/lf_config/vid-pre-init.sql:/docker-entrypoint-initdb.d/vid-pre-init.sql -v /var/lib/mysql -d mariadb:10
- run_docker_image -e VID_MYSQL_DBNAME=vid_openecomp -e VID_MYSQL_PASS=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U --name vid-server -p 8080:8080 --link vid-mariadb:vid-mariadb-docker-instance -d $vid_image
-}
-
-# init_vid() - Function that initialize Vid services
-function init_vid {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "vid"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "vid"
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_vid_images
- if [[ "$skip_install" == "False" ]]; then
- install_vid
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/vnfsdk b/bootstrap/vagrant-onap/lib/vnfsdk
deleted file mode 100755
index ea7fa3332..000000000
--- a/bootstrap/vagrant-onap/lib/vnfsdk
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# _build_vnfsdk_images() - Builds VNFSDK images from source code
-function _build_vnfsdk_images {
- install_package unzip
- pushd ${src_folders[vnfsdk]}/refrepo/vnfmarket-be/deployment/docker/docker-refrepo
- build_docker_image .
- popd
-}
-
-# get_vnfsdk_images - Function that clones vnfsdk Docker images
-function get_vnfsdk_images {
- if [[ "$build_image" == "True" ]]; then
- # TODO(sshank): Has errors building.
- _build_vnfsdk_images
- else
- pull_docker_image refrepo:1.0-STAGING-latest
- pull_docker_image refrepo:latest
- fi
-}
-
-# install_vnfsdk - Function that installs vnfsdk Docker images
-function install_vnfsdk {
- install_docker_compose
- pushd ${src_folders[vnfsdk]}/refrepo/vnfmarket-be/deployment/install
- /opt/docker/docker-compose up -d
- popd
-}
-
-# init_vnfsdk() - Init VNFSDK services
-function init_vnfsdk {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "vnfsdk"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "vnfsdk"
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_vnfsdk_images
- if [[ "$skip_install" == "False" ]]; then
- install_vnfsdk
- fi
- fi
-}
diff --git a/bootstrap/vagrant-onap/lib/vvp b/bootstrap/vagrant-onap/lib/vvp
deleted file mode 100755
index f24431ee6..000000000
--- a/bootstrap/vagrant-onap/lib/vvp
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-source /var/onap/functions
-
-# _build_vvp_images() - Builds VNFSDK images from source code
-function _build_vvp_images {
- echo "pass"
-}
-
-# get_vvp_images - Function that clones vvp Docker images
-function get_vvp_images {
- if [[ "$build_image" == "True" ]]; then
- _build_vvp_images
- else
- pull_docker_image refrepo:1.0-STAGING-latest
- pull_docker_image refrepo:latest
- fi
-}
-
-# install_vvp - Function that installs vvp Docker images
-function install_vvp {
- echo "pass"
-}
-
-# init_vvp() - Init VNFSDK services
-function init_vvp {
- if [[ "$clone_repo" == "True" ]]; then
- clone_repos "vvp"
- if [[ "$compile_repo" == "True" ]]; then
- compile_repos "vvp"
- fi
- fi
-
- if [[ "$skip_get_images" == "False" ]]; then
- get_vvp_images
- if [[ "$skip_install" == "False" ]]; then
- install_vvp
- fi
- fi
-}