diff options
-rw-r--r-- | bootstrap/Dockerfile-template | 19 | ||||
-rw-r--r-- | bootstrap/README-docker.md | 114 | ||||
-rwxr-xr-x | bootstrap/installer-docker.sh-template | 320 | ||||
-rw-r--r-- | bootstrap/pom.xml | 173 | ||||
-rwxr-xr-x | mvn-phase-script.sh | 372 | ||||
-rw-r--r-- | pom.xml | 313 | ||||
-rw-r--r-- | scripts/pom.xml | 173 |
7 files changed, 1226 insertions, 258 deletions
diff --git a/bootstrap/Dockerfile-template b/bootstrap/Dockerfile-template new file mode 100644 index 0000000..f15261b --- /dev/null +++ b/bootstrap/Dockerfile-template @@ -0,0 +1,19 @@ +FROM ubuntu:16.04 +MAINTAINER maintainer +ENV INSROOT /opt/app +ENV APPUSER installer +RUN apt-get update\ + && apt-get install -y wget python-virtualenv python-pip ssh ed curl\ + && apt-get clean\ + && pip install --upgrade pip\ + && mkdir -p ${INSROOT}/${APPUSER}/blueprints\ + && useradd -d ${INSROOT}/${APPUSER} ${APPUSER} +COPY installer-docker.sh ${INSROOT}/${APPUSER}/installer +# COPY *.yaml ${INSROOT}/${APPUSER}/blueprints/ +RUN wget -P ${INSROOT}/${APPUSER}/blueprints/ {{ ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_blueprints_releases }}/blueprints/centos_vm.yaml +RUN wget -P ${INSROOT}/${APPUSER}/blueprints/ {{ ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_blueprints_releases }}/blueprints/consul_cluster.yaml +WORKDIR ${INSROOT}/${APPUSER} +RUN chown -R ${APPUSER}:${APPUSER} ${INSROOT}/${APPUSER} && chmod +x ${INSROOT}/${APPUSER}/installer +USER ${APPUSER} +ENTRYPOINT exec "${INSROOT}/${APPUSER}/installer" + diff --git a/bootstrap/README-docker.md b/bootstrap/README-docker.md new file mode 100644 index 0000000..af4edff --- /dev/null +++ b/bootstrap/README-docker.md @@ -0,0 +1,114 @@ +## Dockerized bootstrap for Cloudify Manager and Consul cluster + +1. Preparations +a) Add a public key to openStack, note its name (we will use KEYNAME as example for below). Save the private key (we will use KAYPATH as its path example), make sure it's permission is globally readable. +b) Load the folowing base VM images to OpenStack: a CentOS 7 base image and a Ubuntu 16.04 base image. +c) Obatin the resource IDs/UUIDs for resources needed by the inputs.yaml file, as explained belowi, from OpenStack. +d) DCAEGEN2 boot straping assumes that VMs are assigned private IP addresses from a network. Each VM can also be assigned a floating public IP address from another network. + + +2. On dev machine, edit an inputs.yaml file at INPUTSYAMLPATH +``` +1 centos7image_id: '7c8d7524-de1f-490b-8418-db294bfa2d65' +2 ubuntu1604image_id: '4b09c18b-d69e-4ba8-a1bd-562cab91ff20' +3 flavor_id: '4' +4 security_group: '55a11193-6559-4f6c-b2d2-0119a9817062' +5 public_net: 'admin_floating_228_net' +6 private_net: 'onap-f-net' +7 openstack: +8 username: 'MY_LOGIN' +9 password: 'MY_PASSWORD' +10 tenant_name: 'TENANT_NAME' +11 auth_url: 'KEYSTONE_AUTH_URL' +12 region: 'RegionOne' +13 keypair: 'KEYNME' +14 key_filename: '/opt/dcae/key' +15 location_prefix: 'onapr1' +16 location_domain: 'onap-f.onap.homer.att.com' +17 codesource_url: 'https://nexus01.research.att.com:8443/repository' +18 codesource_version: 'solutioning01-mte2' +``` +Here is a line-by-line explanation of the arameters +1 UUID of the OpenStack's CentOD 7 VM image +2 UUID of the OpenStack's Ubuntu 16.04 VM image +3 ID of the OpenStack's VM flavor to be used by DCAEGEN2 VMs +4 UUID of the OpenStack's security group to be used for DCAEGEN2 VMs +5 The name of the OpenStack network where public IP addresses are allocated from +6 The name of the OpenStack network where private IP addresses are allocated from +7 Group header for OpenStack Keystone parameters +8 User name +9 Password +10 Name of the OpenStack tenant/project where DCAEGEN2 VMs are deployed +11 Openstack authentication API URL, for example 'https://horizon.playground.onap.org:5000/v2.0' +12 Name of the OpenStack region where DCAEGEN2 VMs are deployed, for example 'RegionOne' +13 Name of the public key uploaded to OpenStack in the Prepration step +14 Path to the private key within the conatiner (!! Do not change!!) +15 Prefix (location code) of all DCAEGEN2 VMs +16 Domain name of the OpenStack tenant 'onapr1.playground.onap.org' +17 Location of the raw artifact repo hosting additional boot scripts called by DCAEGEN2 VMs' cloud-init, for example: + 'https://nexus.onap.org/service/local/repositories/raw/content' +18 Path to the boot scripts within the raw artifact repo, for example: 'org.onap.dcaegen2.deployments.scripts/releases/' + + +3. Pull and run the docker conatiner +``` +docker pull nexus3.onap.org:10003/onap/org.onap.dcaegen2.deployments.bootstrap:1.0 + + +docker run -d -v /home/ubuntu/JFLucasBootStrap/utils/platform_base_installation/key:/opt/app/installer/config/key -v /home/ubuntu/JFLucasBootStrap/utils/platform_base_installation/inputs.yaml:/opt/app/installer/config/inputs.yaml -e "LOCATION=dg2" bootstrap + +docker run -d -v KEYPATH:/opt/app/installer/config/key -v INPUTSYAMLPATH:/opt/app/installer/config/inputs.yaml -e "LOCATION=dg2" nexus3.onap.org:10003/onap/org.onap.dcaegen2.deployments.bootstrap:1.0 + +``` + + +R +`expand.sh` expands the blueprints and the installer script so they +point to the repo where the necessary artifacts (plugins, type files) +are store. + +`docker build -t bootstrap .` builds the image + +`docker run -d -v /path/to/worldreadable_private_key:/opt/app/installer/config/key -v /path/to/inputs_file:/opt/app/installer/config/inputs.yaml -e "LOCATION=location_id_here" --name bsexec bootstrap` runs the container and (if you're lucky) does the deployment. + +( +1. the private key is THE private key for the public key added to OpenStack +2. the path to inputs and key file are FULL path starting from / +3. --name is optional. if so the container name will be random +) + + +`example-inputs.yaml` is, as the name suggests, an example inputs file. The values in it work in the ONAP-Future environment, except for the +user name and password. + +To watch the action use +`docker logs -f bsexec` + +The container stays up even after the installation is complete. +To enter the running container: +`docker exec -it bsexec /bin/bash` +Once in the container, to uninstall CM and the host VM and its supporting entities +`source dcaeinstall/bin/active` +`cfy local uninstall` + +(But remember--before uninstalling CM, be sure to go to CM first and uninstall the Consul cluster.) + + +####TODOS: +- Integrate with the maven-based template expansion. +- Integrate with maven-based Docker build and push to LF Docker repo +- Add full list of plugins to be installed onto CM +- Separate the Docker stuff from the non-Docker installation. (The blueprints are common to both methods.) +- Get rid of any AT&T-isms +- (Maybe) Move the installation of the Cloudify CLI and the sshkeyshare and dnsdesig plugins into the Dockerfile, +so the image has everything set up and can just enter the vevn and start the Centos VM installation. +- Figure out what (if anything) needs to change if the container is deployed by Kubernetes rather than vanilla Docker +- Make sure the script never exits, even in the face of errors. We need the container to stay up so we can do uninstalls. +- Figure out how to add in the deployments for the rest of the DCAE platform components. (If this container deploys all of DCAE, +should it move out of the CCSDK project and into DCAE?) +- Figure out the right way to get the Cloudify OpenStack plugins and the Cloudify Fabric plugins onto CM. Right now there are +handbuilt wagons in the Nexus repo. (In theory, CM should be able to install these plugins whenever a blueprint calls for them. However, +they require gcc, and we're not installing gcc on our CM host.) +- Maybe look at using a different base image--the Ubuntu 16.04 image needs numerous extra packages installed. +- The blueprint for Consul shows up in Cloudify Manager with the name 'blueprints'. I'll leave it as an exercise for the reader to figure why +and to figure out how to change it. (See ~ line 248 of installer-docker.sh-template.) diff --git a/bootstrap/installer-docker.sh-template b/bootstrap/installer-docker.sh-template new file mode 100755 index 0000000..672f397 --- /dev/null +++ b/bootstrap/installer-docker.sh-template @@ -0,0 +1,320 @@ +#!/bin/bash +# +# ============LICENSE_START========================================== +# =================================================================== +# Copyright © 2017 AT&T Intellectual Property. All rights reserved. +# =================================================================== +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END============================================ +# +# ECOMP and OpenECOMP are trademarks +# and service marks of AT&T Intellectual Property. +# + +# URLs for artifacts needed for installation +DESIGTYPES={{ ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_plugins_releases }}/type_files/dnsdesig/dns_types.yaml +DESIGPLUG={{ ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_plugins_releases }}/plugins/dnsdesig-1.0.0-py27-none-any.wgn +SSHKEYTYPES={{ ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_plugins_releases }}/type_files/sshkeyshare/sshkey_types.yaml +SSHKEYPLUG={{ ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_plugins_releases }}/plugins/sshkeyshare-1.0.0-py27-none-any.wgn +OSPLUGINZIP=https://github.com/cloudify-cosmo/cloudify-openstack-plugin/archive/1.4.zip + +# Make sure ssh doesn't prompt for new host or choke on a new host with an IP it's seen before +SSHOPTS="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" +STARTDIR=$(pwd) + +SSHUSER=centos +PVTKEY=./config/key +INPUTS=./config/inputs.yaml + +if ["$LOCATION" = "" ] +then + echo 'Environment variable LOCATION not set. Should be set to location ID for this installation.' + exit 1 +fi + +set -e +set -x + +# Docker workaround for SSH key +# In order for the container to be able to access the key when it's mounted from the Docker host, +# the key file has to be world-readable. But ssh itself will not work with a private key that's world readable. +# So we make a copy and change permissions on the copy. +# NB -- the key on the Docker host has to be world-readable, which means that, from the host machine, you +# can't use it with ssh. It needs to be a world-readable COPY. +PVTKEY=./key600 +cp ./config/key ${PVTKEY} +chmod 600 ${PVTKEY} + +# Create a virtual environment +virtualenv dcaeinstall +source dcaeinstall/bin/activate + +# Install Cloudify +pip install cloudify==3.4.0 + +# Install the Cloudify OpenStack plugin +wget -qO- ${OSPLUGINZIP} > openstack.zip +pip install openstack.zip + +# Spin up a VM + +# Get the Designate and SSH key type files and plugins +mkdir types +wget -qO- ${DESIGTYPES} > types/dns_types.yaml +wget -qO- ${SSHKEYTYPES} > types/sshkey_types.yaml + +wget -O dnsdesig.wgn ${DESIGPLUG} +wget -O sshkeyshare.wgn ${SSHKEYPLUG} + +wagon install -s dnsdesig.wgn +wagon install -s sshkeyshare.wgn + +## Fix up the inputs file to get the private key locally +sed -e "s#key_filename:.*#key_filename: $PVTKEY#" < ${INPUTS} > /tmp/local_inputs + +# Now install the VM +# Don't exit on error after this point--keep container running so we can do uninstalls after a failure +set +e +if wget -P ./blueprints/ {{ ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_blueprints_releases }}/blueprints/centos_vm.yaml; then + echo "Succeeded in getting the newest centos_vm.yaml" +else + echo "Failed to update centos_vm.yaml, using default version" +fi +set -e +cfy local init --install-plugins -p ./blueprints/centos_vm.yaml -i /tmp/local_inputs -i "datacenter=$LOCATION" +cfy local execute -w install --task-retries=10 +PUBIP=$(cfy local outputs | grep -Po '"public_ip": "\K.*?(?=")') + + +## It's probably not completely ready when the installation finish, so wait +sleep 180 + +echo "Installing Cloudify Manager on ${PUBIP}." + +PVTIP=$(ssh $SSHOPTS -i "$PVTKEY" "$SSHUSER"@"$PUBIP" 'echo PVTIP=`curl --silent http://169.254.169.254/2009-04-04/meta-data/local-ipv4`' | grep PVTIP | sed 's/PVTIP=//') +if [ "$PVTIP" = "" ] +then + echo Cannot access specified machine at $PUBIP using supplied credentials + # Don't exit--keep the container up so we can uninstall the VM and supporting entities + while true + do + sleep 300 + done +fi + + +# Copy private key onto Cloudify Manager VM +PVTKEYPATH=$(cat ${INPUTS} | grep "key_filename" | cut -d "'" -f2) +PVTKEYNAME=$(basename $PVTKEYPATH) +PVTKEYDIR=$(dirname $PVTKEYPATH) +scp $SSHOPTS -i $PVTKEY $PVTKEY $SSHUSER@$PUBIP:/tmp/$PVTKEYNAME +ssh -t $SSHOPTS -i $PVTKEY $SSHUSER@$PUBIP sudo mkdir -p $PVTKEYDIR +ssh -t $SSHOPTS -i $PVTKEY $SSHUSER@$PUBIP sudo mv /tmp/$PVTKEYNAME $PVTKEYPATH + +ESMAGIC=$(uuidgen -r) +WORKDIR=$HOME/cmtmp +BSDIR=$WORKDIR/cmbootstrap +PVTKEY2=$BSDIR/id_rsa.cfybootstrap +TMPBASE=$WORKDIR/tmp +TMPDIR=$TMPBASE/lib +SRCS=$WORKDIR/srcs.tar +TOOL=$WORKDIR/tool.py +rm -rf $WORKDIR +mkdir -p $BSDIR $TMPDIR/cloudify/wheels $TMPDIR/cloudify/sources $TMPDIR/manager +chmod 700 $WORKDIR +cp "$PVTKEY" $PVTKEY2 +cat >$TOOL <<!EOF +#!/usr/local/bin/python +# +import yaml +import sys +bsdir = sys.argv[1] +with open(bsdir + '/simple-manager-blueprint-inputs.yaml', 'r') as f: + inpyaml = yaml.load(f) +with open(bsdir + '/simple-manager-blueprint.yaml', 'r') as f: + bpyaml = yaml.load(f) +for param, value in bpyaml['inputs'].items(): + if value.has_key('default') and not inpyaml.has_key(param): + inpyaml[param] = value['default'] +print inpyaml['manager_resources_package'] +!EOF + +# +# Try to disable attempt to download virtualenv when not needed +# +ssh $SSHOPTS -t -i $PVTKEY2 $SSHUSER@$PUBIP 'sudo bash -xc "echo y; mkdir -p /root/.virtualenv; echo '"'"'[virtualenv]'"'"' >/root/.virtualenv/virtualenv.ini; echo no-download=true >>/root/.virtualenv/virtualenv.ini"' + +# Gather installation artifacts +# from documentation, URL for manager blueprints archive +BSURL=https://github.com/cloudify-cosmo/cloudify-manager-blueprints/archive/3.4.tar.gz +BSFILE=$(basename $BSURL) + +umask 022 +wget -qO- $BSURL >$BSDIR/$BSFILE +cd $BSDIR +tar xzvf $BSFILE +MRPURL=$(python $TOOL $BSDIR/cloudify-manager-blueprints-3.4) +MRPFILE=$(basename $MRPURL) +wget -qO- $MRPURL >$TMPDIR/cloudify/sources/$MRPFILE + +tar cf $SRCS -C $TMPDIR cloudify +rm -rf $TMPBASE +# +# Load required package files onto VM +# +scp $SSHOPTS -i $PVTKEY2 $SRCS $SSHUSER@$PUBIP:/tmp/. +ssh -t $SSHOPTS -i $PVTKEY2 $SSHUSER@$PUBIP 'sudo bash -xc "cd /opt; tar xf /tmp/srcs.tar; chown -R root:root /opt/cloudify /opt/manager; rm -rf /tmp/srcs.tar"' +# +# Install config file -- was done by DCAE controller. What now? +# +ssh $SSHOPTS -t -i $PVTKEY2 $SSHUSER@$PUBIP 'sudo bash -xc '"'"'mkdir -p /opt/dcae; if [ -f /tmp/cfy-config.txt ]; then cp /tmp/cfy-config.txt /opt/dcae/config.txt && chmod 644 /opt/dcae/config.txt; fi'"'" +cd $WORKDIR + +# +# Check for and set up https certificate information +# +rm -f $BSDIR/cloudify-manager-blueprints-3.4/resources/ssl/server.key $BSDIR/cloudify-manager-blueprints-3.4/resources/ssl/server.crt +ssh -t $SSHOPTS -i $PVTKEY2 $SSHUSER@$PUBIP 'sudo bash -xc "openssl pkcs12 -in /opt/app/dcae-certificate/certificate.pkcs12 -passin file:/opt/app/dcae-certificate/.password -nodes -chain"' | awk 'BEGIN{x="/dev/null";}/-----BEGIN CERTIFICATE-----/{x="'$BSDIR'/cloudify-manager-blueprints-3.4/resources/ssl/server.crt";}/-----BEGIN PRIVATE KEY-----/{x="'$BSDIR'/cloudify-manager-blueprints-3.4/resources/ssl/server.key";}{print >x;}/-----END /{x="/dev/null";}' +USESSL=false +if [ -f $BSDIR/cloudify-manager-blueprints-3.4/resources/ssl/server.key -a -f $BSDIR/cloudify-manager-blueprints-3.4/resources/ssl/server.crt ] +then + USESSL=true +fi +# +# Set up configuration for the bootstrap +# +export CLOUDIFY_USERNAME=admin CLOUDIFY_PASSWORD=encc0fba9f6d618a1a51935b42342b17658 +cd $BSDIR/cloudify-manager-blueprints-3.4 +cp simple-manager-blueprint.yaml bootstrap-blueprint.yaml +ed bootstrap-blueprint.yaml <<'!EOF' +/^node_types:/-1a + plugin_resources: + description: > + Holds any archives that should be uploaded to the manager. + default: [] + dsl_resources: + description: > + Holds a set of dsl required resources + default: [] +. +/^ upload_resources:/a + plugin_resources: { get_input: plugin_resources } +. +w +q +!EOF + +sed <simple-manager-blueprint-inputs.yaml >bootstrap-inputs.yaml \ + -e "s;.*public_ip: .*;public_ip: '$PUBIP';" \ + -e "s;.*private_ip: .*;private_ip: '$PVTIP';" \ + -e "s;.*ssh_user: .*;ssh_user: '$SSHUSER';" \ + -e "s;.*ssh_key_filename: .*;ssh_key_filename: '$PVTKEY2';" \ + -e "s;.*elasticsearch_java_opts: .*;elasticsearch_java_opts: '-Des.cluster.name=$ESMAGIC';" \ + -e "/ssl_enabled: /s/.*/ssl_enabled: $USESSL/" \ + -e "/security_enabled: /s/.*/security_enabled: $USESSL/" \ + -e "/admin_password: /s/.*/admin_password: '$CLOUDIFY_PASSWORD'/" \ + -e "/admin_username: /s/.*/admin_username: '$CLOUDIFY_USERNAME'/" \ + -e "s;.*manager_resources_package: .*;manager_resources_package: 'http://169.254.169.254/nosuchthing/$MRPFILE';" \ + -e "s;.*ignore_bootstrap_validations: .*;ignore_bootstrap_validations: true;" \ + +# Add plugin resources +# TODO Add the other plugins when they're available +cat >>bootstrap-inputs.yaml <<'!EOF' +plugin_resources: + - 'http://repository.cloudifysource.org/org/cloudify3/wagons/cloudify-openstack-plugin/1.4/cloudify_openstack_plugin-1.4-py27-none-linux_x86_64-centos-Core.wgn' + - 'http://repository.cloudifysource.org/org/cloudify3/wagons/cloudify-fabric-plugin/1.4.1/cloudify_fabric_plugin-1.4.1-py27-none-linux_x86_64-centos-Core.wgn' + - '{{ ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_plugins_releases }}/plugins/dnsdesig-1.0.0-py27-none-any.wgn' + - '{{ ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_plugins_releases }}/plugins/sshkeyshare-1.0.0-py27-none-any.wgn' +!EOF +# +# And away we go +# +cfy init -r +cfy bootstrap --install-plugins -p bootstrap-blueprint.yaml -i bootstrap-inputs.yaml +rm -f resources/ssl/server.key + +# Install Consul VM via a blueprint +cd $STARTDIR +mkdir consul +cd consul +cfy init -r +cfy use -t ${PUBIP} +echo "Deploying Consul VM" + +set +e +if wget -P ../blueprints/ {{ ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_blueprints_releases }}/blueprints/consul_cluster.yaml; then + echo "Succeeded in getting the newest consul_cluster.yaml" +else + echo "Failed to update consul_cluster.yaml, using default version" +fi +set -e +cfy install -p ../blueprints/consul_cluster.yaml -d consul -i ../${INPUTS} -i "datacenter=$LOCATION" + +# Get the floating IP for one member of the cluster +# Needed for instructing the Consul agent on CM host to join the cluster +CONSULIP=$(cfy deployments outputs -d consul | grep -Po 'Value: \K.*') +echo Consul deployed at $CONSULIP + +# Wait for Consul API to come up +until curl http://$CONSULIP:8500/v1/agent/services +do + echo Waiting for Consul API + sleep 60 +done + +# Wait for a leader to be elected +until [[ "$(curl -Ss http://$CONSULIP:8500/v1/status/leader)" != '""' ]] +do + echo Waiting for leader + sleep 30 +done + +# Instruct the client-mode Consul agent running on the CM to join the cluster +curl http://$PUBIP:8500/v1/agent/join/$CONSULIP + +# Register Cloudify Manager in Consul via the local agent on CM host + +REGREQ=" +{ + \"Name\" : \"cloudify_manager\", + \"ID\" : \"cloudify_manager\", + \"Tags\" : [\"http://${PUBIP}/api/v2.1\"], + \"Address\": \"${PUBIP}\", + \"Port\": 80, + \"Check\" : { + \"Name\" : \"cloudify_manager_health\", + \"Interval\" : \"300s\", + \"HTTP\" : \"http://${PUBIP}/api/v2.1/status\", + \"Status\" : \"passing\", + \"DeregisterCriticalServiceAfter\" : \"30m\" + } +} +" + +curl -X PUT -H 'Content-Type: application/json' --data-binary "$REGREQ" http://$PUBIP:8500/v1/agent/service/register +# Make Consul address available to plugins on Cloudify Manager +# TODO probably not necessary anymore +ENVINI=$(mktemp) +cat <<!EOF > $ENVINI +[$LOCATION] +CONSUL_HOST=$CONSULIP +CONFIG_BINDING_SERVICE=config_binding_service +!EOF +scp $SSHOPTS -i ../$PVTKEY $ENVINI $SSHUSER@$PUBIP:/tmp/env.ini +ssh -t $SSHOPTS -i ../$PVTKEY $SSHUSER@$PUBIP sudo mv /tmp/env.ini /opt/env.ini +rm $ENVINI + +while true +do + sleep 300 +done diff --git a/bootstrap/pom.xml b/bootstrap/pom.xml new file mode 100644 index 0000000..06f6977 --- /dev/null +++ b/bootstrap/pom.xml @@ -0,0 +1,173 @@ +<?xml version="1.0"?> +<!-- +================================================================================ +Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +================================================================================ +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============LICENSE_END========================================================= + +ECOMP is a trademark and service mark of AT&T Intellectual Property. +--> +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.onap.dcaegen2</groupId> + <artifactId>deployments</artifactId> + <version>1.0.0-SNAPSHOT</version> + </parent> + <groupId>org.onap.dcaegen2.deployments</groupId> + <artifactId>bootstrap</artifactId> + <name>bootstrap</name> + <version>1.0.0-SNAPSHOT</version> + <url>http://maven.apache.org</url> + <properties> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <sonar.skip>true</sonar.skip> + <sonar.sources>.</sonar.sources> + <!-- customize the SONARQUBE URL --> + <!-- sonar.host.url>http://localhost:9000</sonar.host.url --> + <!-- below are language dependent --> + <!-- for Python --> + <sonar.language>py</sonar.language> + <sonar.pluginName>Python</sonar.pluginName> + <sonar.inclusions>**/*.py</sonar.inclusions> + <!-- for JavaScaript --> + <!-- + <sonar.language>js</sonar.language> + <sonar.pluginName>JS</sonar.pluginName> + <sonar.inclusions>**/*.js</sonar.inclusions> + --> + </properties> + <build> + <finalName>${project.artifactId}-${project.version}</finalName> + <plugins> + <!-- plugin> + <artifactId>maven-assembly-plugin</artifactId> + <version>2.4.1</version> + <configuration> + <descriptors> + <descriptor>assembly/dep.xml</descriptor> + </descriptors> + </configuration> + <executions> + <execution> + <id>make-assembly</id> + <phase>package</phase> + <goals> + <goal>single</goal> + </goals> + </execution> + </executions> + </plugin --> + <!-- now we configure custom action (calling a script) at various lifecycle phases --> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>exec-maven-plugin</artifactId> + <version>1.2.1</version> + <executions> + <execution> + <id>clean phase script</id> + <phase>clean</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>clean</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>generate-sources script</id> + <phase>generate-sources</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>generate-sources</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>compile script</id> + <phase>compile</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>compile</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>package script</id> + <phase>package</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>package</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>test script</id> + <phase>test</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>test</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>install script</id> + <phase>install</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>install</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>deploy script</id> + <phase>deploy</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>deploy</argument> + </arguments> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> +</project> diff --git a/mvn-phase-script.sh b/mvn-phase-script.sh index 45e62fc..567850d 100755 --- a/mvn-phase-script.sh +++ b/mvn-phase-script.sh @@ -1,92 +1,388 @@ #!/bin/bash -# ============LICENSE_START==================================================== -# org.onap.dcae -# ============================================================================= + +# ================================================================================ # Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. -# ============================================================================= +# ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# ============LICENSE_END====================================================== +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +set -ex + echo "running script: [$0] for module [$1] at stage [$2]" -echo "=> Prepare environment " +MVN_PROJECT_MODULEID="$1" +MVN_PHASE="$2" + + +FQDN="${MVN_PROJECT_GROUPID}.${MVN_PROJECT_ARTIFACTID}" +if [ "$MVN_PROJECT_MODULEID" == "__" ]; then + MVN_PROJECT_MODULEID="" +fi + +if [[ "$MVN_PROJECT_VERSION" == *SNAPSHOT ]]; then + echo "=> for SNAPSHOT artifact build" + MVN_DEPLOYMENT_TYPE='SNAPSHOT' +else + echo "=> for STAGING/RELEASE artifact build" + MVN_DEPLOYMENT_TYPE='STAGING' +fi +echo "MVN_DEPLOYMENT_TYPE is [$MVN_DEPLOYMENT_TYPE]" -# This is the base for where "deploy" will upload -# MVN_NEXUSPROXY is set in the pom.xml -REPO=$MVN_NEXUSPROXY/content/sites/raw TIMESTAMP=$(date +%C%y%m%dT%H%M%S) -export BUILD_NUMBER="${TIMESTAMP}" # expected environment variables if [ -z "${MVN_NEXUSPROXY}" ]; then echo "MVN_NEXUSPROXY environment variable not set. Cannot proceed" exit fi -MVN_NEXUSPROXY_HOST=$(echo $MVN_NEXUSPROXY |cut -f3 -d'/' | cut -f1 -d':') +MVN_NEXUSPROXY_HOST=$(echo "$MVN_NEXUSPROXY" |cut -f3 -d'/' | cut -f1 -d':') +echo "=> Nexus Proxy at $MVN_NEXUSPROXY_HOST, $MVN_NEXUSPROXY" +if [ -z "$WORKSPACE" ]; then + WORKSPACE=$(pwd) +fi + +if [ -z "$SETTINGS_FILE" ]; then + echo "SETTINGS_FILE environment variable not set. Cannot proceed" + exit +fi + -# use the version text detect which phase we are in in LF CICD process: verify, merge, or (daily) release # mvn phase in life cycle MVN_PHASE="$2" + +echo "MVN_PROJECT_MODULEID is [$MVN_PROJECT_MODULEID]" +echo "MVN_PHASE is [$MVN_PHASE]" +echo "MVN_PROJECT_GROUPID is [$MVN_PROJECT_GROUPID]" +echo "MVN_PROJECT_ARTIFACTID is [$MVN_PROJECT_ARTIFACTID]" +echo "MVN_PROJECT_VERSION is [$MVN_PROJECT_VERSION]" +echo "MVN_NEXUSPROXY is [$MVN_NEXUSPROXY]" +echo "MVN_RAWREPO_BASEURL_UPLOAD is [$MVN_RAWREPO_BASEURL_UPLOAD]" +echo "MVN_RAWREPO_BASEURL_DOWNLOAD is [$MVN_RAWREPO_BASEURL_DOWNLOAD]" +MVN_RAWREPO_HOST=$(echo "$MVN_RAWREPO_BASEURL_UPLOAD" | cut -f3 -d'/' |cut -f1 -d':') +echo "MVN_RAWREPO_HOST is [$MVN_RAWREPO_HOST]" +echo "MVN_RAWREPO_SERVERID is [$MVN_RAWREPO_SERVERID]" +echo "MVN_DOCKERREGISTRY_DAILY is [$MVN_DOCKERREGISTRY_DAILY]" +echo "MVN_DOCKERREGISTRY_RELEASE is [$MVN_DOCKERREGISTRY_RELEASE]" + +clean_templated_files() +{ + TEMPLATE_FILES=$(find . -name "*-template") + for F in $TEMPLATE_FILES; do + F2=$(echo "$F" | sed 's/-template$//') + rm -f "$F2" + done +} + + +expand_templates() +{ + # set up env variables, get ready for template resolution + # NOTE: CCSDK artifacts do not distinguish REALESE vs SNAPSHOTs + export ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_plugins_releases="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.ccsdk.platform.plugins" + export ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_plugins_snapshots="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.ccsdk.platform.plugins" + export ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_blueprints_releases="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.ccsdk.platform.blueprints" + export ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_blueprints_snapshots="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.ccsdk.platform.blueprints" + + export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_releases="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2/releases" + export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_snapshots="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2/snapshots" + export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_plugins_releases="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2.platform.plugins/releases" + export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_plugins_snapshots="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2.platform.plugins/snapshots" + export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_blueprints_releases="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2.platform.blueprints/releases" + export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_blueprints_snapshots="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2.platform.blueprints/snapshots" + + export ONAPTEMPLATE_PYPIURL_org_onap_dcaegen2="${MVN_NEXUSPROXY}/content/sites/pypi" + + export ONAPTEMPLATE_DOCKERREGURL_org_onap_dcaegen2_releases="$MVN_DOCKERREGISTRY_DAILY" + export ONAPTEMPLATE_DOCKERREGURL_org_onap_dcaegen2_snapshots="$MVN_DOCKERREGISTRY_DAILY/snapshots" + + + TEMPLATE_FILES=$(find . -name "*-template") + for F in $TEMPLATE_FILES; do + F2=$(echo "$F" | sed 's/-template$//') + cp "$F" "$F2" + MOD=$(stat --format '%a' "$F") + chmod "$MOD" "$F2" + done + + + TEMPLATES=$(env |grep ONAPTEMPLATE) + if [ -z "$TEMPLATES" ]; then + return 0 + fi + + echo "====> Resolving the following temaplate from environment variables " + echo "[$TEMPLATES]" + SELFFILE=$(echo "$0" | rev | cut -f1 -d '/' | rev) + for TEMPLATE in $TEMPLATES; do + KEY=$(echo "$TEMPLATE" | cut -f1 -d'=') + VALUE=$(echo "$TEMPLATE" | cut -f2 -d'=') + VALUE2=$(echo "$TEMPLATE" | cut -f2 -d'=' |sed 's/\//\\\//g') + FILES=$(grep -rlv "$KEY") + + if [ -z "$FILES" ]; then + continue + fi + + # assuming FILES is not longer than 2M bytes, the limit for variable value max size on this VM + for F in $FILES; do + if [[ $F == *"$SELFFILE" ]]; then + continue + fi + if [[ "$F" == *-template ]]; then + continue + fi + + echo "======> Resolving template $KEY to value $VALUE for file $F" + sed -i "s/{{[[:space:]]*$KEY[[:space:]]*}}/$VALUE2/g" "$F" + #cat "$F" + done + + #if [ ! -z "$FILES" ]; then + # echo "====> Resolving template $VALUE to value $VALUE" + # #CMD="grep -rl \"$VALUE\" | tr '\n' '\0' | xargs -0 sed -i \"s/{{[[:space:]]*$VALUE[[:space:]]*}}/$VALUE/g\"" + # grep -rl "$KEY" | tr '\n' '\0' | xargs -0 sed -i 's/$KEY/$VALUE2/g' + # #echo $CMD + # #eval $CMD + #fi + done + echo "====> Done template reolving" +} + + +run_tox_test() +{ + set -x + CURDIR=$(pwd) + TOXINIS=$(find . -name "tox.ini") + for TOXINI in "${TOXINIS[@]}"; do + DIR=$(echo "$TOXINI" | rev | cut -f2- -d'/' | rev) + cd "${CURDIR}/${DIR}" + rm -rf ./venv-tox ./.tox + virtualenv ./venv-tox + source ./venv-tox/bin/activate + pip install --upgrade pip + pip install --upgrade tox argparse + pip freeze + tox + deactivate + rm -rf ./venv-tox ./.tox + done +} + +build_wagons() +{ + rm -rf ./*.wgn venv-pkg + + SETUPFILES=$(find . -name "setup.py") + for SETUPFILE in $SETUPFILES; do + PLUGIN_DIR=$(echo "$SETUPFILE" |rev | cut -f 2- -d '/' |rev) + PLUGIN_NAME=$(grep 'name' "$SETUPFILE" | cut -f2 -d'=' | sed 's/[^0-9a-zA-Z\.]*//g') + PLUGIN_VERSION=$(grep 'version' "$SETUPFILE" | cut -f2 -d'=' | sed 's/[^0-9\.]*//g') + + echo "In $PLUGIN_DIR, $PLUGIN_NAME, $PLUGIN_VERSION" + + virtualenv ./venv-pkg + source ./venv-pkg/bin/activate + pip install --upgrade pip + pip install wagon + wagon create --format tar.gz "$PLUGIN_DIR" + deactivate + rm -rf venv-pkg + + PKG_FILE_NAMES=( "${PLUGIN_NAME}-${PLUGIN_VERSION}"*.wgn ) + echo Built package: "${PKG_FILE_NAMES[@]}" + done +} + + +upload_raw_file() +{ + # Extract the username and password to the nexus repo from the settings file + USER=$(xpath -q -e "//servers/server[id='$MVN_RAWREPO_SERVERID']/username/text()" "$SETTINGS_FILE") + PASS=$(xpath -q -e "//servers/server[id='$MVN_RAWREPO_SERVERID']/password/text()" "$SETTINGS_FILE") + NETRC=$(mktemp) + echo "machine $MVN_RAWREPO_HOST login $USER password $PASS" > "$NETRC" + + REPO="$MVN_RAWREPO_BASEURL_UPLOAD" + + OUTPUT_FILE=$1 + EXT=$(echo "$OUTPUT_FILE" | rev |cut -f1 -d '.' |rev) + if [ "$EXT" == 'yaml' ]; then + OUTPUT_FILE_TYPE='text/x-yaml' + elif [ "$EXT" == 'sh' ]; then + OUTPUT_FILE_TYPE='text/x-shellscript' + elif [ "$EXT" == 'gz' ]; then + OUTPUT_FILE_TYPE='application/gzip' + elif [ "$EXT" == 'wgn' ]; then + OUTPUT_FILE_TYPE='application/gzip' + else + OUTPUT_FILE_TYPE='application/octet-stream' + fi + + + if [ "$MVN_DEPLOYMENT_TYPE" == 'SNAPSHOT' ]; then + SEND_TO="${REPO}/${FQDN}/snapshots" + elif [ "$MVN_DEPLOYMENT_TYPE" == 'STAGING' ]; then + SEND_TO="${REPO}/${FQDN}/releases" + else + echo "Unreconfnized deployment type, quit" + exit + fi + if [ ! -z "$MVN_PROJECT_MODULEID" ]; then + SEND_TO="$SEND_TO/$MVN_PROJECT_MODULEID" + fi + + echo "Sending ${OUTPUT_FILE} to Nexus: ${SEND_TO}" + curl -vkn --netrc-file "${NETRC}" --upload-file "${OUTPUT_FILE}" -X PUT -H "Content-Type: $OUTPUT_FILE_TYPE" "${SEND_TO}/${OUTPUT_FILE}-${MVN_PROJECT_VERSION}-${TIMESTAMP}" + curl -vkn --netrc-file "${NETRC}" --upload-file "${OUTPUT_FILE}" -X PUT -H "Content-Type: $OUTPUT_FILE_TYPE" "${SEND_TO}/${OUTPUT_FILE}-${MVN_PROJECT_VERSION}" + curl -vkn --netrc-file "${NETRC}" --upload-file "${OUTPUT_FILE}" -X PUT -H "Content-Type: $OUTPUT_FILE_TYPE" "${SEND_TO}/${OUTPUT_FILE}" +} + + + +upload_wagons_and_type_yamls() +{ + WAGONS=$(ls -1 ./*.wgn) + for WAGON in $WAGONS ; do + WAGON_NAME=$(echo "$WAGON" | cut -f1 -d '-') + WAGON_VERSION=$(echo "$WAGON" | cut -f2 -d '-') + WAGON_TYPEFILE=$(grep -rl "$WAGON_NAME" | grep yaml | head -1) + + upload_raw_file "$WAGON" + upload_raw_file "$WAGON_TYPEFILE" + done +} + +upload_files_of_extension() +{ + FILES=$(ls -1 ./*."$1") + for F in $FILES ; do + upload_raw_file "$F" + done +} + + + +build_and_push_docker() +{ + IMAGENAME="onap/${FQDN}.${MVN_PROJECT_MODULEID}" + IMAGENAME=$(echo "$IMAGENAME" | sed -e 's/_*$//g' -e 's/\.*$//g') + + # use the major and minor version of the MVN artifact version as docker image version + VERSION="${MVN_PROJECT_VERSION//[^0-9.]/}" + VERSION2=$(echo "$VERSION" | cut -f1-2 -d'.') + + LFQI="${IMAGENAME}:${VERSION}-${TIMESTAMP}" + BUILD_PATH="${WORKSPACE}" + # build a docker image + docker build --rm -f "${WORKSPACE}"/Dockerfile -t "${LFQI}" "${BUILD_PATH}" + + REPO="" + if [ $MVN_DEPLOYMENT_TYPE == "SNAPSHOT" ]; then + REPO=$MVN_DOCKERREGISTRY_DAILY + elif [ $MVN_DEPLOYMENT_TYPE == "STAGING" ]; then + # there seems to be no staging docker registry? set to use SNAPSHOT also + #REPO=$MVN_DOCKERREGISTRY_RELEASE + REPO=$MVN_DOCKERREGISTRY_DAILY + else + echo "Fail to determine DEPLOYMENT_TYPE" + REPO=$MVN_DOCKERREGISTRY_DAILY + fi + echo "DEPLOYMENT_TYPE is: $MVN_DEPLOYMENT_TYPE, repo is $REPO" + + if [ ! -z "$REPO" ]; then + USER=$(xpath -e "//servers/server[id='$REPO']/username/text()" "$SETTINGS_FILE") + PASS=$(xpath -e "//servers/server[id='$REPO']/password/text()" "$SETTINGS_FILE") + if [ -z "$USER" ]; then + echo "Error: no user provided" + fi + if [ -z "$PASS" ]; then + echo "Error: no password provided" + fi + [ -z "$PASS" ] && PASS_PROVIDED="<empty>" || PASS_PROVIDED="<password>" + echo docker login "$REPO" -u "$USER" -p "$PASS_PROVIDED" + docker login "$REPO" -u "$USER" -p "$PASS" + + if [ $MVN_DEPLOYMENT_TYPE == "SNAPSHOT" ]; then + REPO="$REPO/snapshots" + elif [ $MVN_DEPLOYMENT_TYPE == "STAGING" ]; then + # there seems to be no staging docker registry? set to use SNAPSHOT also + #REPO=$MVN_DOCKERREGISTRY_RELEASE + REPO="$REPO" + else + echo "Fail to determine DEPLOYMENT_TYPE" + REPO="$REPO/unknown" + fi + + OLDTAG="${LFQI}" + PUSHTAGS="${REPO}/${IMAGENAME}:${VERSION2}-${TIMESTAMP} ${REPO}/${IMAGENAME}:${VERSION2} ${REPO}/${IMAGENAME}:${VERSION2}-latest" + for NEWTAG in ${PUSHTAGS} + do + echo "tagging ${OLDTAG} to ${NEWTAG}" + docker tag "${OLDTAG}" "${NEWTAG}" + echo "pushing ${NEWTAG}" + docker push "${NEWTAG}" + OLDTAG="${NEWTAG}" + done + fi + +} + + + +# Customize the section below for each project case $MVN_PHASE in clean) echo "==> clean phase script" - # Nothing to do + clean_templated_files + rm -rf ./venv-* ./*.wgn ;; generate-sources) echo "==> generate-sources phase script" - # Nothing to do + expand_templates ;; compile) echo "==> compile phase script" - # Nothing to do ;; test) echo "==> test phase script" - # Nothing to do ;; package) echo "==> package phase script" - # Nothing to do ;; install) echo "==> install phase script" - # Nothing to do ;; deploy) echo "==> deploy phase script" - # Just upload files to Nexus - set -e -x - function setnetrc { - # Turn off -x so won't leak the credentials - set +x - hostport=$(echo $1 | cut -f3 -d /) - host=$(echo $hostport | cut -f1 -d:) - settings=$HOME/.m2/settings.xml - ( echo machine $host; echo login $(xpath $settings "//servers/server[id='$MVN_SERVER_ID']/username/text()"); echo password $(xpath $settings "//servers/server[id='$MVN_SERVER_ID']/password/text()") ) >$HOME/.netrc - chmod 600 $HOME/.netrc - set -x - } - function putraw { - curl -X PUT -H "Content-Type: text/plain" --netrc --upload-file $1 --url $REPO/$2 - } - setnetrc $REPO - putraw scripts/instconsulagentub16.sh cloud_init/instconsulagentub16.sh - putraw scripts/cdap-init.sh cloud_init/cdap-init.sh - set +e +x + case $MVN_PROJECT_MODULEID in + bootstrap) + build_and_push_docker + ;; + scripts) + upload_files_of_extension sh + ;; + *) + echo "====> unknown mvn project module" + ;; + esac ;; *) echo "==> unprocessed phase" @@ -29,15 +29,23 @@ limitations under the License. <!--- CHANGE THE FOLLOWING 3 OBJECTS for your own repo --> <groupId>org.onap.dcaegen2</groupId> <artifactId>deployments</artifactId> - <name>dcaegen2-deployments</name> + <name>deployments</name> <version>1.0.0-SNAPSHOT</version> <url>http://maven.apache.org</url> + <packaging>pom</packaging> + + <modules> + <module>bootstrap</module> + <module>scripts</module> + </modules> + <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <sonar.skip>true</sonar.skip> <sonar.sources>.</sonar.sources> <!-- customize the SONARQUBE URL --> - <sonar.host.url>http://localhost:9000</sonar.host.url> + <!-- sonar.host.url>http://localhost:9000</sonar.host.url --> <!-- below are language dependent --> <!-- for Python --> <sonar.language>py</sonar.language> @@ -50,237 +58,102 @@ limitations under the License. <sonar.inclusions>**/*.js</sonar.inclusions> --> </properties> - <build> <finalName>${project.artifactId}-${project.version}</finalName> <pluginManagement> <plugins> - <plugin> - <groupId>org.codehaus.mojo</groupId> - <artifactId>sonar-maven-plugin</artifactId> - <version>2.7.1</version> - </plugin> - - <!-- nexus-staging-maven-plugin is called during deploy phase by default behavior. - we do not need it --> + <!-- the following plugins are invoked from oparent, we do not need them --> <plugin> <groupId>org.sonatype.plugins</groupId> <artifactId>nexus-staging-maven-plugin</artifactId> <version>1.6.7</version> <configuration> <skipNexusStagingDeployMojo>true</skipNexusStagingDeployMojo> + <skip>true</skip> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-deploy-plugin</artifactId> + <!-- This version supports the "deployAtEnd" parameter --> + <version>2.8</version> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <!-- first disable the default Java plugins at various stages --> + <!-- maven-resources-plugin is called during "*resource" phases by default behavior. it prepares + the resources dir. we do not need it --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-resources-plugin</artifactId> + <version>2.6</version> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <!-- maven-compiler-plugin is called during "compile" phases by default behavior. we do not need it --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.1</version> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <!-- maven-jar-plugin is called during "compile" phase by default behavior. we do not need it --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>2.4</version> + <executions> + <execution> + <id>default-jar</id> + <phase/> + </execution> + </executions> + </plugin> + <!-- maven-install-plugin is called during "install" phase by default behavior. it tries to copy stuff under + target dir to ~/.m2. we do not need it --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-install-plugin</artifactId> + <version>2.4</version> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <!-- maven-surefire-plugin is called during "test" phase by default behavior. it triggers junit test. + we do not need it --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <version>2.12.4</version> + <configuration> + <skipTests>true</skipTests> + </configuration> + </plugin> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>exec-maven-plugin</artifactId> + <version>1.2.1</version> + <configuration> + <executable>${session.executionRootDirectory}/mvn-phase-script.sh</executable> + <environmentVariables> + <!-- make mvn properties as env for our script --> + <MVN_PROJECT_GROUPID>${project.parent.groupId}</MVN_PROJECT_GROUPID> + <MVN_PROJECT_ARTIFACTID>${project.parent.artifactId}</MVN_PROJECT_ARTIFACTID> + <MVN_PROJECT_VERSION>${project.parent.version}</MVN_PROJECT_VERSION> + <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> + <MVN_RAWREPO_BASEURL_UPLOAD>${onap.nexus.rawrepo.baseurl.upload}</MVN_RAWREPO_BASEURL_UPLOAD> + <MVN_RAWREPO_BASEURL_DOWNLOAD>${onap.nexus.rawrepo.baseurl.download}</MVN_RAWREPO_BASEURL_DOWNLOAD> + <MVN_RAWREPO_SERVERID>${onap.nexus.rawrepo.serverid}</MVN_RAWREPO_SERVERID> + <MVN_DOCKERREGISTRY_DAILY>${onap.nexus.dockerregistry.daily}</MVN_DOCKERREGISTRY_DAILY> + <MVN_DOCKERREGISTRY_RELEASE>${onap.nexus.dockerregistry.release}</MVN_DOCKERREGISTRY_RELEASE> + </environmentVariables> </configuration> </plugin> </plugins> </pluginManagement> - - <plugins> - - <!-- first disable the default Java plugins at various stages --> - <!-- maven-resources-plugin is called during "*resource" phases by default behavior. it prepares the resources - dir. we do not need it --> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-resources-plugin</artifactId> - <version>2.6</version> - <configuration> - <skip>true</skip> - </configuration> - </plugin> - - <!-- maven-compiler-plugin is called during "compile" phases by default behavior. we do not need it --> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-compiler-plugin</artifactId> - <version>3.1</version> - <configuration> - <skip>true</skip> - </configuration> - </plugin> - - <!-- maven-jar-plugin is called during "compile" phase by default behavior. we do not need it --> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-jar-plugin</artifactId> - <version>2.4</version> - <executions> - <execution> - <id>default-jar</id> - <phase/> - </execution> - </executions> - </plugin> - - <!-- maven-install-plugin is called during "install" phase by default behavior. it tries to copy stuff under - target dir to ~/.m2. we do not need it --> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-install-plugin</artifactId> - <version>2.4</version> - <configuration> - <skip>true</skip> - </configuration> - </plugin> - - <!-- maven-surefire-plugin is called during "test" phase by default behavior. it triggers junit test. - we do not need it --> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-surefire-plugin</artifactId> - <version>2.12.4</version> - <configuration> - <skipTests>true</skipTests> - </configuration> - </plugin> - - <!-- now we configure custom action (calling a script) at various lifecycle phases --> - <plugin> - <groupId>org.codehaus.mojo</groupId> - <artifactId>exec-maven-plugin</artifactId> - <version>1.2.1</version> - <executions> - <execution> - <id>clean phase script</id> - <phase>clean</phase> - <goals><goal>exec</goal></goals> - <configuration> - <executable>${session.executionRootDirectory}/mvn-phase-script.sh</executable> - <arguments> - <argument>${project.artifactId}</argument> - <argument>clean</argument> - </arguments> - <environmentVariables> - <!-- make mvn properties as env for our script --> - <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> - <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> - <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> - <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> - </environmentVariables> - </configuration> - </execution> - - <execution> - <id>generate-sources script</id> - <phase>generate-sources</phase> - <goals><goal>exec</goal></goals> - <configuration> - <executable>mvn-phase-script.sh</executable> - <arguments> - <argument>${project.artifactId}</argument> - <argument>generate-sources</argument> - </arguments> - <environmentVariables> - <!-- make mvn properties as env for our script --> - <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> - <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> - <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> - <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> - </environmentVariables> - </configuration> - </execution> - - <execution> - <id>compile script</id> - <phase>compile</phase> - <goals><goal>exec</goal></goals> - <configuration> - <executable>mvn-phase-script.sh</executable> - <arguments> - <argument>${project.artifactId}</argument> - <argument>compile</argument> - </arguments> - <environmentVariables> - <!-- make mvn properties as env for our script --> - <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> - <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> - <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> - <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> - </environmentVariables> - </configuration> - </execution> - - <execution> - <id>package script</id> - <phase>package</phase> - <goals><goal>exec</goal></goals> - <configuration> - <executable>mvn-phase-script.sh</executable> - <arguments> - <argument>${project.artifactId}</argument> - <argument>package</argument> - </arguments> - <environmentVariables> - <!-- make mvn properties as env for our script --> - <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> - <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> - <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> - <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> - </environmentVariables> - </configuration> - </execution> - - <execution> - <id>test script</id> - <phase>test</phase> - <goals><goal>exec</goal></goals> - <configuration> - <executable>mvn-phase-script.sh</executable> - <arguments> - <argument>${project.artifactId}</argument> - <argument>test</argument> - </arguments> - <environmentVariables> - <!-- make mvn properties as env for our script --> - <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> - <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> - <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> - <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> - </environmentVariables> - </configuration> - </execution> - - <execution> - <id>install script</id> - <phase>install</phase> - <goals><goal>exec</goal></goals> - <configuration> - <executable>mvn-phase-script.sh</executable> - <arguments> - <argument>${project.artifactId}</argument> - <argument>install</argument> - </arguments> - <environmentVariables> - <!-- make mvn properties as env for our script --> - <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> - <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> - <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> - <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> - </environmentVariables> - </configuration> - </execution> - - <execution> - <id>deploy script</id> - <phase>deploy</phase> - <goals><goal>exec</goal></goals> - <configuration> - <executable>mvn-phase-script.sh</executable> - <arguments> - <argument>${project.artifactId}</argument> - <argument>deploy</argument> - </arguments> - <environmentVariables> - <!-- make mvn properties as env for our script --> - <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> - <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> - <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> - <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> - <MVN_SERVER_ID>${project.distributionManagement.snapshotRepository.id}</MVN_SERVER_ID> - </environmentVariables> - </configuration> - </execution> - </executions> - </plugin> - </plugins> </build> </project> diff --git a/scripts/pom.xml b/scripts/pom.xml new file mode 100644 index 0000000..de2ca84 --- /dev/null +++ b/scripts/pom.xml @@ -0,0 +1,173 @@ +<?xml version="1.0"?> +<!-- +================================================================================ +Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +================================================================================ +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============LICENSE_END========================================================= + +ECOMP is a trademark and service mark of AT&T Intellectual Property. +--> +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.onap.dcaegen2</groupId> + <artifactId>deployments</artifactId> + <version>1.0.0-SNAPSHOT</version> + </parent> + <groupId>org.onap.dcaegen2.deployments</groupId> + <artifactId>scripts</artifactId> + <name>scripts</name> + <version>1.0.0-SNAPSHOT</version> + <url>http://maven.apache.org</url> + <properties> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <sonar.skip>true</sonar.skip> + <sonar.sources>.</sonar.sources> + <!-- customize the SONARQUBE URL --> + <!-- sonar.host.url>http://localhost:9000</sonar.host.url --> + <!-- below are language dependent --> + <!-- for Python --> + <sonar.language>py</sonar.language> + <sonar.pluginName>Python</sonar.pluginName> + <sonar.inclusions>**/*.py</sonar.inclusions> + <!-- for JavaScaript --> + <!-- + <sonar.language>js</sonar.language> + <sonar.pluginName>JS</sonar.pluginName> + <sonar.inclusions>**/*.js</sonar.inclusions> + --> + </properties> + <build> + <finalName>${project.artifactId}-${project.version}</finalName> + <plugins> + <!-- plugin> + <artifactId>maven-assembly-plugin</artifactId> + <version>2.4.1</version> + <configuration> + <descriptors> + <descriptor>assembly/dep.xml</descriptor> + </descriptors> + </configuration> + <executions> + <execution> + <id>make-assembly</id> + <phase>package</phase> + <goals> + <goal>single</goal> + </goals> + </execution> + </executions> + </plugin --> + <!-- now we configure custom action (calling a script) at various lifecycle phases --> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>exec-maven-plugin</artifactId> + <version>1.2.1</version> + <executions> + <execution> + <id>clean phase script</id> + <phase>clean</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>clean</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>generate-sources script</id> + <phase>generate-sources</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>generate-sources</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>compile script</id> + <phase>compile</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>compile</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>package script</id> + <phase>package</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>package</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>test script</id> + <phase>test</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>test</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>install script</id> + <phase>install</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>install</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>deploy script</id> + <phase>deploy</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>deploy</argument> + </arguments> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> +</project> |