summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorTony Hansen <tony@att.com>2017-09-29 02:03:39 +0000
committerTony Hansen <tony@att.com>2017-09-29 02:03:53 +0000
commit16a1fcbc814ec20e59cbe95cb506bca29f104208 (patch)
treefaef0c5a6b382a22943f35774f8469f75d0e2b88
parent8cdeeeb9a2e8b51f9897173dc26d672e55b00304 (diff)
remove check-blueprint-vs-input, repackage
check-blueprint-vs-input was moved to blueprints module repackage was moved to ccsdk/pgaas module Change-Id: If9d04d4ee1c0e5a3a2b837fc253c00dee226a2ac Signed-off-by: Tony Hansen <tony@att.com> Issue-ID: DCAEGEN2-49
-rwxr-xr-xcheck-blueprint-vs-input/bin/check-blueprint-vs-input215
-rw-r--r--check-blueprint-vs-input/man/check-blueprint-vs-input.150
-rw-r--r--check-blueprint-vs-input/man/check-blueprint-vs-input.man55
-rw-r--r--check-blueprint-vs-input/pom.xml250
-rwxr-xr-xmvn-phase-script.sh30
-rw-r--r--pom.xml2
-rwxr-xr-xrepackage/bin/repackage680
-rwxr-xr-xrepackage/bin/yamltojson39
-rw-r--r--repackage/man/repackage.1123
-rw-r--r--repackage/man/yamltojson.122
-rw-r--r--repackage/pom.xml252
11 files changed, 0 insertions, 1718 deletions
diff --git a/check-blueprint-vs-input/bin/check-blueprint-vs-input b/check-blueprint-vs-input/bin/check-blueprint-vs-input
deleted file mode 100755
index 9174526..0000000
--- a/check-blueprint-vs-input/bin/check-blueprint-vs-input
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-# -*- indent-tabs-mode: nil -*-
-# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this code except in compliance
-# with the License. You may obtain a copy of the License
-# at http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied. See the License for the specific language governing
-# permissions and limitations under the License.
-
-from __future__ import print_function
-
-"""
-
- NAME
- check-blueprint-vs-input - given a blueprint and inputs file pair, validate them against each other
-
- USAGE
- check-blueprint-vs-input [-v] [-t] -b BLUEPRINT [-B exclusion-list] -i INPUTS [-B exclusion-list]
-
- DESCRIPTION
-"""
-description = """
- Validate a blueprint and inputs file against each other. This looks for the inputs: node of the blueprint
- file, the inputs used by {get_input} within the blueprint, and the values found in the inputs file. The
- files may be in either YAML or JSON formats. The names default to blueprint.yaml and inputs.yaml. If
- a blueprint inputs name has a default value, it is not considered an error if it is not in the inputs file.
-
- If using a template inputs file, add the -t/--template option. This will look for the inputs under
- an "inputs:" node instead of at the top level.
-
- If there are blueprint nodes or inputs nodes that should not be considered an error, specify them
- using the -B/--blueprint-exclusion-list and -I/inputs-exclusion-list parameters.
-
- "check-blueprint-vs-input --help" will list all of the available options.
-"""
-epilog = """
- NOTE
- Values specified within the inputs file with no value or a value of {} (such as 'openstack: {}')
- do not get loaded from the yaml dictionary, and will show up as false positives. It is suggested that
- -B/--blueprint-exclusion-list be used for these, and then do a subsequent grep for those variables
- being in the inputs file, as in:
-
- grep 'openstack: {}' $(BLUEPRINT)
-
-"""
-
-import yaml
-import sys
-import argparse
-
-def main():
- DEF_BLUEPRINT_NAME = "blueprint.yaml"
- DEF_INPUTS_NAME = "inputs.yaml"
- parser = argparse.ArgumentParser(description=description, epilog=epilog)
- parser.add_argument("-b", "--blueprint", type=str, help="Path to blueprint file, defaults to '%s'" % DEF_BLUEPRINT_NAME,
- default=DEF_BLUEPRINT_NAME)
- parser.add_argument("-i", "--inputs", type=str, help="Port to listen on, defaults to '%s'" % DEF_INPUTS_NAME,
- default=DEF_INPUTS_NAME)
- parser.add_argument("-B", "--blueprint-exclusion-list", type=str, help="Comma-separated list of names not to warn about not being in the blueprint file", default="")
- parser.add_argument("-I", "--inputs-exclusion-list", type=str, help="Comma-separated list of names not to warn about not being in the inputs file", default="")
- parser.add_argument("-t", "--inputs-template", help="Treat inputs file as coming from template area", action="store_true")
- parser.add_argument("-v", "--verbose", help="Verbose, may be specified multiple times", action="count", default=0)
- args = parser.parse_args()
-
- blueprintExclusionList = args.blueprint_exclusion_list.split(",")
- if args.verbose: print("blueprintExclusionList=%s" % blueprintExclusionList)
-
- inputsExclusionList = args.inputs_exclusion_list.split(",")
- if args.verbose: print("inputsExclusionList=%s" % inputsExclusionList)
-
- def loadYaml(filename):
- """
- Load a YAML file
- """
- with open(filename, "r") as fd:
- try:
- y = yaml.safe_load(fd)
- if args.verbose > 1:
- print("Contents of %s:" % filename)
- yaml.dump(y, sys.stdout)
- except:
- type, value, traceback = sys.exc_info()
- print('value=%s' % value, file=sys.stderr)
- return y
-
- blueprint = loadYaml(args.blueprint)
- inputs = loadYaml(args.inputs)
-
- # if inputs file is empty, provide an empty dictionary
- if inputs is None: inputs = { }
-
- # blueprint file has inputs under the inputs: node
- blueprintInputs = blueprint['inputs']
-
- # inputs file normally has inputs at the top level,
- # but templated inputs files have themunder the inputs: node
- if args.inputs_template: inputs = inputs['inputs']
-
-
- exitval = 0
-
- def check_blueprint_inputs(blueprintInputs, inputs, inputsExclusionList):
- """
- check the blueprint inputs against the inputs file
- """
- foundone = False
- for input in blueprintInputs:
- if args.verbose: print("blueprint input=%s" % input)
- if input in inputs:
- if args.verbose: print("\tIS in inputs file")
- else:
- # print("blueprintInputs.get(input)=%s and blueprintInputs[input].get('default')=%s" % (blueprintInputs.get(input), blueprintInputs[input].get('default')))
- if blueprintInputs.get(input) and blueprintInputs[input].get('default'):
- if args.verbose: print("\tHAS a default value")
- elif input not in inputsExclusionList:
- print("<<<<<<<<<<<<<<<< %s not in inputs file" % input)
- foundone = True
- else:
- if args.verbose: print("<<<<<<<<<<<<<<<< %s not in inputs file, but being ignored" % input)
- return foundone
-
- # check the blueprint inputs: against the inputs file
- if args.verbose: print("================ check the blueprint inputs: against the inputs file")
- foundone = check_blueprint_inputs(blueprintInputs, inputs, inputsExclusionList)
- if foundone: print("")
- if foundone: exitval = 1
-
- def check_get_inputs(blueprint, blueprintInputs, inputs, inputsExclusionList):
- """
- check the blueprint get_input values against the inputs file
- """
- if args.verbose > 2: print("check_get_inputs(): d=%s" % d)
-
- def findInputs(d):
- ret = [ ]
- if isinstance(d, dict):
- if args.verbose: print("type(d) is dict")
- for key in d.keys():
- if args.verbose: print("looking at d[key=%s]" % key)
- if key == "get_input":
- if args.verbose: print("found get_input, adding '%s'" % d[key])
- ret += [ d[key] ]
- return ret
- else:
- if args.verbose: print("going recursive on '%s'" % d[key])
- ret += findInputs(d[key])
- elif isinstance(d, list):
- if args.verbose: print("type(d) is list")
- for val in d:
- if args.verbose: print("going recursive on '%s'" % val)
- ret += findInputs(val)
- else:
- if args.verbose: print("type(d) is scalar: %s" % d)
- return ret
-
- foundone = False
- inputList = findInputs(blueprint)
- if args.verbose: print("done looking for get_input, found:\n%s" % inputList)
- alreadySeen = { }
- for input in inputList:
- if input not in alreadySeen:
- alreadySeen[input] = True
- if args.verbose: print("checking input %s" % input)
- if input in inputs:
- if args.verbose: print("\tIS in input file")
- else:
- if blueprintInputs.get(input) and blueprintInputs[input].get('default'):
- if args.verbose: print("\tHAS a default value")
- elif input not in inputsExclusionList:
- print(":::::::::::::::: get_input: %s is NOT in input file" % input)
- foundone = True
- else:
- if args.verbose: print(":::::::::::::::: get_input: %s is NOT in input file, but being ignored" % input)
-
- return foundone
-
-
-
- # check the blueprint's get_input calls against the inputs file
- if args.verbose: print("================ check the blueprint's get_input calls against the inputs file ================")
- foundone = check_get_inputs(blueprint, blueprintInputs, inputs, inputsExclusionList)
- if foundone: print("")
- if foundone: exitval = 1
-
- def check_inputs(blueprintInputs, inputs, blueprintExclusionList):
- """
- check the inputs file against the blueprints inputs list
- """
- foundone = False
- for key in inputs:
- if args.verbose: print("inputs key=%s" % key)
- if key in blueprintInputs:
- if args.verbose: print("\tIS in blueprint")
- else:
- if key not in blueprintExclusionList:
- print(">>>>>>>>>>>>>>>> %s not in blueprint file" % key)
- foundone = True
- else:
- if args.verbose: print(">>>>>>>>>>>>>>>> %s not in blueprint file, but being ignored" % key)
- return foundone
-
- # check the inputs file against the blueprints input: section
- if args.verbose: print("================ check the inputs file against the blueprints input: section ================")
- foundone = check_inputs(blueprintInputs, inputs, blueprintExclusionList)
- if foundone: exitval = 1
- sys.exit(exitval)
-
-if __name__ == "__main__":
- main()
diff --git a/check-blueprint-vs-input/man/check-blueprint-vs-input.1 b/check-blueprint-vs-input/man/check-blueprint-vs-input.1
deleted file mode 100644
index 247fbc6..0000000
--- a/check-blueprint-vs-input/man/check-blueprint-vs-input.1
+++ /dev/null
@@ -1,50 +0,0 @@
-check-blueprint-vs-input(1UTIL) ONAP check-blueprint-vs-input(1UTIL)
-
-
-
-NNAAMMEE
- check-blueprint-vs-input
-
-SSYYNNOOPPSSIISS
- check-blueprint-vs-input [-h] [-b BLUEPRINT] [-i INPUTS]
- [-B BLUEPRINT_EXCLUSION_LIST]
- [-I INPUTS_EXCLUSION_LIST] [-t] [-v]
-
-DDEESSCCRRIIPPTTIIOONN
- Validate a blueprint and inputs file against each other. This looks for
- the inputs: node of the blueprint file, the inputs used by {get_input}
- within the blueprint, and the values found in the inputs file. The
- files may be in either YAML or JSON formats. The names default to blue‐
- print.yaml and inputs.yaml. If a blueprint inputs name has a default
- value, it is not considered an error if it is not in the inputs file.
- If using a template inputs file, add the -t/--template option. This
- will look for the inputs under an "inputs:" node instead of at the top
- level. If there are blueprint nodes or inputs nodes that should not be
- considered an error, specify them using the -B/--blueprint- exclusion-
- list and -I/inputs-exclusion-list parameters. "check-blueprint-vs-
- input --help" will list all of the available options.
-
- OOPPTTIIOONNSS
- -h, --help show this help message and exit
-
- -b BLUEPRINT, --blueprint BLUEPRINT Path to blueprint file, defaults to
- 'blueprint.yaml'
-
- -i INPUTS, --inputs INPUTS Port to listen on, defaults to 'inputs.yaml'
-
- -B BLUEPRINT_EXCLUSION_LIST, --blueprint-exclusion-list BLUE‐
- PRINT_EXCLUSION_LIST Comma-separated list of names
- not to warn about not
- being in the blueprint file
-
- -I INPUTS_EXCLUSION_LIST, --inputs-exclusion-list INPUTS_EXCLUSION_LIST
- Comma-separated list of names not to warn about not
- being in the inputs file
-
- -t, --inputs-template Treat inputs file as coming from template area
-
- -v, --verbose Verbose, may be specified multiple times
-
-
-
-ONAP {{DATE}} check-blueprint-vs-input(1UTIL)
diff --git a/check-blueprint-vs-input/man/check-blueprint-vs-input.man b/check-blueprint-vs-input/man/check-blueprint-vs-input.man
deleted file mode 100644
index 017d529..0000000
--- a/check-blueprint-vs-input/man/check-blueprint-vs-input.man
+++ /dev/null
@@ -1,55 +0,0 @@
-'\" Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
-'\"
-'\" Licensed under the Apache License, Version 2.0 (the "License");
-'\" you may not use this code except in compliance
-'\" with the License. You may obtain a copy of the License
-'\" at http://www.apache.org/licenses/LICENSE-2.0
-'\"
-'\" Unless required by applicable law or agreed to in writing, software
-'\" distributed under the License is distributed on an "AS IS" BASIS,
-'\" WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-'\" implied. See the License for the specific language governing
-'\" permissions and limitations under the License.
-.TH check-blueprint-vs-input 1UTIL {{DATE}} ONAP ONAP
-.SH NAME
-check-blueprint-vs-input
-.SH SYNOPSIS
-check-blueprint-vs-input [-h] [-b BLUEPRINT] [-i INPUTS]
- [-B BLUEPRINT_EXCLUSION_LIST]
- [-I INPUTS_EXCLUSION_LIST] [-t] [-v]
-.SH DESCRIPTION
-Validate a blueprint and inputs file against each other. This looks for the
-inputs: node of the blueprint file, the inputs used by {get_input} within the
-blueprint, and the values found in the inputs file. The files may be in either
-YAML or JSON formats. The names default to blueprint.yaml and inputs.yaml. If
-a blueprint inputs name has a default value, it is not considered an error if
-it is not in the inputs file. If using a template inputs file, add the
--t/--template option. This will look for the inputs under an "inputs:" node
-instead of at the top level. If there are blueprint nodes or inputs nodes that
-should not be considered an error, specify them using the -B/--blueprint-
-exclusion-list and -I/inputs-exclusion-list parameters. "check-blueprint-vs-
-input --help" will list all of the available options.
-.SS OPTIONS
-.HP 20
--h, --help
-show this help message and exit
-.HP 20
--b BLUEPRINT, --blueprint BLUEPRINT
-Path to blueprint file, defaults to 'blueprint.yaml'
-.HP 20
--i INPUTS, --inputs INPUTS
-Port to listen on, defaults to 'inputs.yaml'
-.HP 20
--B BLUEPRINT_EXCLUSION_LIST, --blueprint-exclusion-list BLUEPRINT_EXCLUSION_LIST
-Comma-separated list of names not to warn about not
- being in the blueprint file
-.HP 20
--I INPUTS_EXCLUSION_LIST, --inputs-exclusion-list INPUTS_EXCLUSION_LIST
-Comma-separated list of names not to warn about not
- being in the inputs file
-.HP 20
--t, --inputs-template
-Treat inputs file as coming from template area
-.HP 20
--v, --verbose
-Verbose, may be specified multiple times
diff --git a/check-blueprint-vs-input/pom.xml b/check-blueprint-vs-input/pom.xml
deleted file mode 100644
index 2bf822f..0000000
--- a/check-blueprint-vs-input/pom.xml
+++ /dev/null
@@ -1,250 +0,0 @@
-<?xml version="1.0"?>
-<!--
-================================================================================
-Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
-================================================================================
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-============LICENSE_END=========================================================
-
-ECOMP is a trademark and service mark of AT&T Intellectual Property.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.onap.dcaegen2.utils</groupId>
- <artifactId>utils</artifactId>
- <version>1.0.0-SNAPSHOT</version>
- </parent>
- <groupId>org.onap.dcaegen2.utils</groupId>
- <artifactId>check-blueprint-vs-input</artifactId>
- <name>dcaegen2-utils-check-blueprint-vs-input</name>
- <version>1.0.0-SNAPSHOT</version>
- <url>http://maven.apache.org</url>
-
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <sonar.skip>true</sonar.skip>
- <sonar.sources>.</sonar.sources>
- <!-- customize the SONARQUBE URL -->
- <!-- sonar.host.url>http://localhost:9000</sonar.host.url -->
- <!-- below are language dependent -->
- <!-- for Python -->
- <sonar.language>py</sonar.language>
- <sonar.pluginName>Python</sonar.pluginName>
- <sonar.inclusions>**/*.py</sonar.inclusions>
- <!-- for JavaScaript -->
- <!--
- <sonar.language>js</sonar.language>
- <sonar.pluginName>JS</sonar.pluginName>
- <sonar.inclusions>**/*.js</sonar.inclusions>
- -->
- </properties>
- <build>
- <finalName>${project.artifactId}-${project.version}</finalName>
- <pluginManagement>
- <plugins>
- <!-- the following plugins are invoked from oparent, we do not need them -->
- <plugin>
- <groupId>org.sonatype.plugins</groupId>
- <artifactId>nexus-staging-maven-plugin</artifactId>
- <version>1.6.7</version>
- <configuration>
- <skipNexusStagingDeployMojo>true</skipNexusStagingDeployMojo>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-deploy-plugin</artifactId>
- <!-- This version supports the "deployAtEnd" parameter -->
- <version>2.8</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- first disable the default Java plugins at various stages -->
- <!-- maven-resources-plugin is called during "*resource" phases by default behavior. it prepares
- the resources dir. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-resources-plugin</artifactId>
- <version>2.6</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- maven-compiler-plugin is called during "compile" phases by default behavior. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>3.1</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- maven-jar-plugin is called during "compile" phase by default behavior. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <version>2.4</version>
- <executions>
- <execution>
- <id>default-jar</id>
- <phase/>
- </execution>
- </executions>
- </plugin>
- <!-- maven-install-plugin is called during "install" phase by default behavior. it tries to copy stuff under
- target dir to ~/.m2. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-install-plugin</artifactId>
- <version>2.4</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- maven-surefire-plugin is called during "test" phase by default behavior. it triggers junit test.
- we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.12.4</version>
- <configuration>
- <skipTests>true</skipTests>
- </configuration>
- </plugin>
- </plugins>
- </pluginManagement>
- <plugins>
- <!-- plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>2.4.1</version>
- <configuration>
- <descriptors>
- <descriptor>assembly/dep.xml</descriptor>
- </descriptors>
- </configuration>
- <executions>
- <execution>
- <id>make-assembly</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin -->
- <!-- now we configure custom action (calling a script) at various lifecycle phases -->
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>exec-maven-plugin</artifactId>
- <version>1.2.1</version>
- <executions>
- <execution>
- <id>clean phase script</id>
- <phase>clean</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>clean</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>generate-sources script</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>generate-sources</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>compile script</id>
- <phase>compile</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>compile</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>package script</id>
- <phase>package</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>package</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>test script</id>
- <phase>test</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>test</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>install script</id>
- <phase>install</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>install</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>deploy script</id>
- <phase>deploy</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>deploy</argument>
- <argument>bin/check-blueprint-vs-input</argument>
- <argument>man/check-blueprint-vs-input.1</argument>
- </arguments>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>
diff --git a/mvn-phase-script.sh b/mvn-phase-script.sh
index 091abb6..de55ebc 100755
--- a/mvn-phase-script.sh
+++ b/mvn-phase-script.sh
@@ -56,9 +56,6 @@ case $MVN_PHASE in
clean)
echo "==> clean phase script"
case $MVN_PROJECT_MODULEID in
- check-blueprint-vs-input | repackage)
- if [ -f makefile -o -f Makefile ];then make clean; else :; fi
- ;;
*)
clean_templated_files
clean_tox_files
@@ -69,9 +66,6 @@ clean)
generate-sources)
echo "==> generate-sources phase script"
case $MVN_PROJECT_MODULEID in
- check-blueprint-vs-input | repackage)
- if [ -f makefile -o -f Makefile ];then make generate-sources; else :; fi
- ;;
*)
expand_templates
;;
@@ -80,9 +74,6 @@ generate-sources)
compile)
echo "==> compile phase script"
case $MVN_PROJECT_MODULEID in
- check-blueprint-vs-input | repackage)
- if [ -f makefile -o -f Makefile ];then make compile; else :; fi
- ;;
*)
;;
esac
@@ -90,9 +81,6 @@ compile)
test)
echo "==> test phase script"
case $MVN_PROJECT_MODULEID in
- check-blueprint-vs-input | repackage)
- if [ -f makefile -o -f Makefile ];then make test; else :; fi
- ;;
*)
set +e
run_tox_test
@@ -103,9 +91,6 @@ test)
package)
echo "==> package phase script"
case $MVN_PROJECT_MODULEID in
- check-blueprint-vs-input | repackage)
- if [ -f makefile -o -f Makefile ];then make package; else :; fi
- ;;
*)
;;
esac
@@ -113,9 +98,6 @@ package)
install)
echo "==> install phase script"
case $MVN_PROJECT_MODULEID in
- check-blueprint-vs-input | repackage)
- if [ -f makefile -o -f Makefile ];then make install; else :; fi
- ;;
*)
;;
esac
@@ -123,18 +105,6 @@ install)
deploy)
echo "==> deploy phase script"
case $MVN_PROJECT_MODULEID in
- check-blueprint-vs-input | repackage)
- if [ -f makefile -o -f Makefile ];then make deploy
- else
- # Upload all files (listed as additional deployment arguments) to Nexus
- # additional
- for artifact
- do
- upload_raw_file $artifact
- done
- set +e +x
- fi
- ;;
*)
# uncomment after we figure out how to use pypi. this command expects that the credentials are passed in
# settings.xml, and the URL and serverid are passed in from either oparent or dcaegen2's root pom
diff --git a/pom.xml b/pom.xml
index 432016f..2eddf1c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -37,8 +37,6 @@ ECOMP is a trademark and service mark of AT&T Intellectual Property.
<module>onap-dcae-dcaepolicy-lib</module>
<module>python-discovery-client</module>
<module>python-dockering</module>
- <module>check-blueprint-vs-input</module>
- <module>repackage</module>
</modules>
<properties>
diff --git a/repackage/bin/repackage b/repackage/bin/repackage
deleted file mode 100755
index 389be54..0000000
--- a/repackage/bin/repackage
+++ /dev/null
@@ -1,680 +0,0 @@
-#!/usr/bin/env python3
-# -*- indent-tabs-mode: nil -*- vi: set expandtab:
-
-import sys, os, argparse, time, re, posix, atexit, binascii
-from pathlib import Path
-
-yamlOk = True
-try:
- import yaml
-except:
- yamlOk = False
-jsonOk = True
-try:
- import simplejson as json
-except:
- try:
- import json
- except:
- jsonOk = False
-
-def date():
- """ return a datestamp """
- return time.strftime("%Y-%m-%d %H:%M:%S")
-
-def infoMsg(msg):
- """ generate an informational message to stdout """
- print("%s:INFO:%s" % (date(), msg))
-
-def traceMsg(msg):
- """ if verbose flag is on, generate an informational message to stdout """
- global args
- if args.verbose:
- infoMsg(msg)
-
-def warnMsg(msg):
- """ generate a warning message to stdout """
- print("%s:WARNING:%s" % (date(), msg))
-
-def die(msg):
- """ generate a FATAL message to stdout and exit """
- print("%s:FATAL:%s" % (date(), msg))
- sys.exit(2)
-
-def displayCwd():
- """ display the working directory """
- infoMsg("working directory '" + os.getcwd() + "'")
-
-def cdCheck(dir):
- """ cd to a new directory and die if we cannot """
- try:
- traceMsg("cd %s" % dir)
- os.chdir(dir)
- except:
- die("Cannot chdir(" + dir + ")")
-
-def removeDirPath(path, prmsg = True, gone_ok = False):
- """
- remove a directory path
- prmsg - print a message before proceeding
- gone_ok - do not warn if a path does not exist
- """
- if prmsg:
- infoMsg("Removing path '%s'" % path)
- nmlist = None
- try:
- nmlist = os.listdir(path)
- except FileNotFoundError:
- if not gone_ok:
- warnMsg("path no longer exists: %s" % path)
- return
- except:
- e = sys.exc_info()[0]
- warnMsg("removing path (%s) gave this error: %s" % (path, e))
- return
-
- for nm in nmlist:
- if nm != "." and nm != "..":
- pathnm = path + "/" + nm
- if os.path.isdir(pathnm):
- removeDirPath(pathnm, prmsg = False)
- else:
- # infoMsg(">>>>removing file %s" % pathnm)
- try:
- os.remove(pathnm)
- except:
- e = sys.exc_info()[0]
- warnMsg("Could not remove file (%s) because of %s" % (pathnm, e))
-
- # infoMsg(">>>>removing directory %s" % pathnm)
- try:
- os.rmdir(path)
- except FileNotFoundError:
- if not gone_ok:
- warnMsg("Could not remove directory (%s) because of FileNotFound" % path)
- except:
- e = sys.exc_info()[0]
- warnMsg("Could not remove directory (%s) because of %s" % (path, e))
-
-def verboseOsSystem(cmd):
- """ execute a shell command, printing a trace message first """
- traceMsg("About to execute '%s'" % cmd)
- os.system(cmd)
-
-def lndir(fr, to):
- """ create a copy of a tree structure, using hard links where possible """
- global args
- removeDirPath(to + "/" + fr, prmsg = args.verbose, gone_ok = True)
- verboseOsSystem("find '%s' -print0 | cpio -pdml0 '%s'" % ( fr, to ))
-
-y = None
-
-def getParam(name, dflt = None):
- """
- Retrieve the contents of a parameter file, rooted where specified.
- Return None when it does not exist.
- """
- global y, args
- if y is None:
- fname = args.directory + "/" + args.repackageyaml
- if args.repackageyaml.endswith(".yaml"):
- if not yamlOk:
- die("YAML not available on this machine")
- else:
- with open(fname, "r") as fd:
- try:
- contents = fd.read()
- contents = re.sub("^\t+", " ", contents, flags=re.M)
- y = yaml.safe_load(contents)
- except:
- die("Invalid yaml in '%s'" % fname)
- elif args.repackageyaml.endswith(".json"):
- if not jsonOk:
- die("JSON not available on this machine")
- else:
- with open(fname, "r") as fd:
- try:
- contents = fd.read()
- y = json.loads(contents)
- except:
- type, value, traceback = sys.exc_info()
- die("Invalid json in '%s': %s" % (fname, value))
- else:
- die("%s must end either in .yaml or .json" % repackageyaml)
-
- e = "y" + name
- inp = None
- try:
- inp = eval(e,{"__builtins__":None},{"y":y})
- except KeyError:
- if dflt is not None:
- return dflt
- if inp is None:
- die("The %s must be be set in %s" % (name, args.repackageyaml))
- return inp
-
-def cleanupTmpRoot():
- """ clean out the tmp directory """
- global TMPROOT
- removeDirPath(TMPROOT, prmsg = args.verbose, gone_ok = True)
-
-def genDebianChangelog(fname):
- """ generate a Debian change log, hard-coded to this for now """
- with open(fname, "w") as fd:
- fd.write("OpenECOMP 1701 Demo\n")
-
-def uploadDocker(name,tag):
- """ tag & push Docker image to nexus docker registry """
- ns = getParam( '["docker"]["namespace"]' )
- registry = getParam( '["docker"]["registry"]' )
- image = name + ":" + tag
- repo = os.environ.get("DOCKERREGISTRY") + "/" + ns + "/" + image
- repo = os.environ.get("DOCKERREGISTRY") + "/" + ns + "/" + image
- verboseOsSystem("docker tag " + image + " " + repo)
- verboseOsSystem("docker push " + repo)
- i = 2
- while os.environ.get("DOCKERREGISTRY" + str(i)):
- repo = os.environ.get("DOCKERREGISTRY" + str(i)) + "/" + ns + "/" + image
- verboseOsSystem("docker tag " + image + " " + repo)
- verboseOsSystem("docker push " + repo)
- i += 1
-
-# The Debian control archive contents can include the following files:
-#
-# control: A list of dependencies, and other useful information to indentify the package, such as
-# a brief description of the package.
-# md5sums: contains MD5 checksums of all files in the package in order to detect corrupt or incomplete files.
-# preinst, postinst, prerm and postrm are optional scripts that are executed before or after installing,
-# updating or removing the package.
-# copyright: any needed copyright notice
-# changelog:
-# conffiles: Lists the files of the package that should be treated as configuration files.
-# Configuration files are not overwritten during an update unless specified.
-# debian-binary: contains the deb-package version, currently 2.0
-# templates: A file with error descriptions and dialogs during installation
-# config: is an optional script that supports the debconf configuration mechanism.
-# shlibs: list of shared library dependencies.
-
-def genDebianControl(fname):
- """ generate a Debian control file """
- with open(fname, "w") as fd:
- global APPL, VER, BNBR, MAINTAINER
- fd.write("Package: %s\n" % APPL)
- fd.write("Version: %s-%s\n" % (VER, BNBR))
- fd.write("Section: utils\n")
- fd.write("Priority: optional\n")
- fd.write("Architecture: all\n")
- fd.write("Maintainer: %s\n" % MAINTAINER)
- deps = getParam('["debian"]["externalDependencies"]')
- depends = ""
- sep = " "
- if deps:
- for dep in deps:
- for d, v in dep.items():
- depends += sep + d + " (" + v + ")"
- sep = ", "
- fd.write("Depends:%s\n" % depends)
- fd.write("Conflicts:\n")
- fd.write("Replaces:\n")
- desc = getParam( '["description"]' )
- desc = re.sub("^[ \t]*$", ".", desc, flags=re.M)
- desc = re.sub("^[ \t]*", " ", desc, flags=re.M)
- fd.write("Description:%s\n" % desc)
-
-def genDebianMd5sums(fname):
- """ generate an MD5 listing of all of the staged files """
- global ROOTDIR
- verboseOsSystem("cd '%s/stage' && find * -type f -exec md5sum -b {} + > %s" % (ROOTDIR, fname))
-
-def genCopyright(fname, prefix = ""):
- """ generate a copyright statement, with the given prefix on each line """
- with open(fname, "w") as fd:
- fd.write(prefix + "Copyright (C) 2016 AT&T Intellectual Property. All rights reserved.\n")
- fd.write(prefix + "\n")
- fd.write(prefix + "This code is licensed under the Apache License, Version 2.0;\n")
- fd.write(prefix + "you may not use this code for any purpose except in compliance\n")
- fd.write(prefix + "with the Apache License. You may obtain a copy of the License\n")
- fd.write(prefix + "at http://www.att.com/openecomp.html.\n")
-
-def isExe(fname):
- """ check if a path exists and is executable """
- return os.path.exists(fname) and os.access(fname, os.X_OK)
-
-def isFileExe(fname):
- """ check if a path exists as a file and is executable """
- return os.path.isfile(fname) and os.access(fname, os.X_OK)
-
-def genFileList(path, testFn):
- """ generate a list of files, rooted at path, that all pass the given test """
- ret = []
- try:
- nmlist = os.listdir(path)
- except FileNotFoundError:
- return ret
- except:
- e = sys.exc_info()[0]
- warnMsg("error while listing path (%s): %s" % (path, e))
- return ret
-
- for nm in nmlist:
- if nm != "." and nm != "..":
- pathnm = path + "/" + nm
- if os.path.isdir(pathnm):
- more = genFileList(pathnm, testFn)
- ret.extend(more)
- elif testFn(pathnm):
- ret.append(pathnm)
- return ret
-
-
-def createDockerTempFiles(L):
- """ create the temp file structure needed to create a docker image """
- global args, ROOTDIR
- removeDirPath(L, prmsg = args.verbose, gone_ok = True)
- os.makedirs(L, exist_ok = True)
-
- cdCheck(ROOTDIR + "/stage")
- copyList = []
- for i in os.listdir():
- if not i.startswith("."):
- lndir(i, L)
- copyList.append(i)
-
- posix.link(ROOTDIR + "/Dockerfile", L + "/Dockerfile")
-
-def genApplVerBnbrSuffix(suffix, whichBuildNumber):
- """ Generate a number of constants used in building a package """
- global APPL, VER, BNBR, TIMESTAMP
- applVer = APPL + "_" + VER
- buildNumber = BNBR if whichBuildNumber == '{buildnumber}' else TIMESTAMP if whichBuildNumber == '{datetime}' else whichBuildNumber
- if buildNumber.startswith("{") and buildNumber.endswith("}"):
- die("Unrecognized buildnumber macro name: %s" % buildNumber)
- applVerBnbr = applVer + "-" + buildNumber
- applVerBnbrSuffix = applVerBnbr + "." + suffix
- applVerSuffix = applVer + "." + suffix
- outdirApplVerBnbrSuffix = args.outputdirectory + "/" + applVerBnbrSuffix
- return applVer, applVerBnbr, applVerBnbrSuffix, applVerSuffix, outdirApplVerBnbrSuffix
-
-def uploadAll(envName, groupId, outdirApplVerBnbrSuffix, suffix, applVer, applVerSuffix):
- """
- Execute the various upload commands for a given package.
- Take into account args.multipleuploadversions
- """
- for buildNumber in args.allUploadVersions:
- ignored1, ignored2, applVerBnbrSuffix, ignored3, ignored4 = genApplVerBnbrSuffix(suffix, buildNumber)
- verboseOsSystem(os.environ.get(envName).format(outdirApplVerBnbrSuffix, applVerBnbrSuffix, groupId, applVerSuffix, applVer))
- i = 2
- while os.environ.get(envName + str(i)):
- verboseOsSystem(os.environ.get(envName + str(i)).format(outdirApplVerBnbrSuffix, applVerBnbrSuffix, groupId, applVerSuffix, applVer))
- i += 1
-
-def buildDebian():
- """ Build a local debian formatted package """
- infoMsg( 'Building a Debian package ...' )
- global args, TMPROOT, ROOTDIR
- if args.skipexecution:
- return
-
- suffix = "deb"
- applVer, applVerBnbr, applVerBnbrSuffix, applVerSuffix, outdirApplVerBnbrSuffix = genApplVerBnbrSuffix(suffix, '{buildnumber}')
-
- if args.usecache and os.path.exists(outdirApplVerBnbrSuffix):
- infoMsg( "Already built %s" % applVerBnbrSuffix)
-
- else:
- L = TMPROOT + "/debian"
- LD = TMPROOT + "/debian/DEBIAN"
- removeDirPath(L, prmsg = args.verbose, gone_ok = True)
- os.makedirs(LD, exist_ok = True)
-
- cdCheck(ROOTDIR + "/stage")
- for i in os.listdir():
- if not i.startswith("."):
- lndir(i, L)
-
- genCopyright(LD + "/copyright")
- genDebianControl(LD + "/control")
- genDebianChangelog(LD + "/changelog")
- genDebianMd5sums(LD + "/md5sums")
-
- cdCheck(ROOTDIR)
- execUser = getParam('["executionUser"]')
- fileUser = getParam('["fileUser"]')
- fileGroup = getParam('["fileGroup"]')
- isRoot = execUser == "root"
- for cname in [ "preinst", "postinst", "prerm", "postrm" ]:
- comCname = "common/" + cname
- ldName = LD + "/" + cname
- if isExe(comCname) or cname == "postinst":
- traceMsg("linking %s to %s" % (comCname, ldName))
- if isRoot and isExe(comCname):
- posix.link(comCname, ldName)
- else:
- with open(ldName, "w") as out:
- if cname == "postinst" and fileUser != "root":
- for nm in os.listdir("stage"):
- t = getParam( '["directoryTreeTops"]["/' + nm + '"]', "n/a" )
- if t == "n/a":
- t = "/" + nm
- print("chown -R '%s:%s' '%s'" % (fileUser, fileGroup, t), file=out)
- print("find '%s' -type d -exec chmod 755 {} +" % t, file=out)
- print("find '%s' ! -type d -exec chmod 644 {} +" % t, file=out)
- # list each executable file separately
- for fname in genFileList("stage", isFileExe):
- fname = fname[6:] # remove 'stage/' from beginning
- print("chmod 755 '/%s'" % fname, file=out)
-
- if isExe(comCname):
- with open(comCname, "r") as inp:
- print("gawk '{\n" +
- " f = $0\n" +
- " for (i = 1; i <= length(f); i+=2) {\n" +
- " printf(\"%c\", strtonum(\"0X\" substr(f,i,2)))\n" +
- " }\n" +
- "}' > /tmp/rep.$$ <<EOF", file=out)
- for line in inp:
- for c in line:
- # print(">>%02x<<" % ord(c))
- print("%02x" % ord(c), file=out, end="")
- print("", file=out)
- print("EOF\n" +
- "chmod a+x /tmp/rep.$$\n" +
- "su " + execUser + " -c /tmp/rep.$$\n" +
- "rm -f /tmp/rep.$$\n", file=out)
- verboseOsSystem("chmod a+x " + ldName)
-
- elif os.path.exists(comCname):
- die(comCname + " must be executable")
-
- cdCheck(TMPROOT)
-
- if args.skipbuild:
- traceMsg('Skipping final build')
- return
-
- verboseOsSystem(". '%s'; fakeroot -- dpkg-deb --verbose --build '%s'" % (args.environfile, L))
- os.makedirs(args.outputdirectory, exist_ok = True)
- os.rename("debian.deb", outdirApplVerBnbrSuffix)
-
- if not os.path.exists(outdirApplVerBnbrSuffix):
- infoMsg( "Unsuccesful in building %s" % applVerBnbrSuffix)
- return
-
- infoMsg( "Successfully built %s" % applVerBnbrSuffix)
-
- if args.upload:
- envName = "REPACKAGEDEBIANUPLOAD"
- groupId = getParam('["debian"]["groupId"]', getParam('["groupId"]'))
- uploadAll(envName, groupId, outdirApplVerBnbrSuffix, suffix, applVer, applVerSuffix)
-
-def buildTar(useGzip):
- """ Build a local tarball formatted package """
- infoMsg( 'Building a tar package ...' )
- global args, TMPROOT, ROOTDIR
- if args.skipexecution:
- return
-
- suffix = "tgz" if useGzip else "tar"
- applVer, applVerBnbr, applVerBnbrSuffix, applVerSuffix, outdirApplVerBnbrSuffix = genApplVerBnbrSuffix(suffix, '{buildnumber}')
-
- if args.usecache and os.path.isfile(outdirApplVerBnbrSuffix):
- infoMsg( "Already built %s" % applVerBnbrSuffix)
-
- else:
- L = TMPROOT + "/" + suffix
- LD = L + "/" + applVerBnbr
- removeDirPath(L, prmsg = args.verbose, gone_ok = True)
- os.makedirs(LD, exist_ok = True)
-
- cdCheck(ROOTDIR + "/stage")
- for i in os.listdir():
- if not i.startswith("."):
- lndir(i, LD)
-
- cdCheck(L)
-
- if args.skipbuild:
- traceMsg('Skipping final build')
- return
-
- taropts = "-zc" if useGzip else "-c"
- if args.verbose: taropts += "v"
- taropts += "f"
- verboseOsSystem(". '%s'; fakeroot -- tar %s tar.%s %s" % (args.environfile, taropts, suffix, applVerBnbr))
- os.makedirs(args.outputdirectory, exist_ok = True)
- if args.verbose:
- print("renaming tar.%s to %s" % (suffix, outdirApplVerBnbrSuffix))
- os.rename("tar.%s" % suffix, outdirApplVerBnbrSuffix)
-
- if not os.path.exists(outdirApplVerBnbrSuffix):
- infoMsg( "Unsuccesful in building %s" % applVerBnbrSuffix)
- return
-
- infoMsg( "Successfully built %s" % applVerBnbrSuffix)
-
-
- if args.upload:
- envName = "REPACKAGETGZUPLOAD" if useGzip else "REPACKAGETARUPLOAD"
- groupId = getParam('["%s"]["groupId"]' % suffix, getParam('["groupId"]'))
- uploadAll(envName, groupId, outdirApplVerBnbrSuffix, suffix, applVer, applVerSuffix)
-
-def buildDocker():
- """ Build a DOCKER image """
- image = getParam( '["docker"]["image"]', "n/a" )
- if image == "n/a":
- global APPL
- image = APPL
- tag = getParam( '["docker"]["tag"]' )
-
- infoMsg( 'Building a (local) docker image ...' )
- global args, TMPROOT
- if args.skipexecution:
- return
-
- L = TMPROOT + "/docker"
- createDockerTempFiles(L)
-
- if args.skipbuild:
- traceMsg('Skipping final build')
- return
-
- cdCheck(L)
- verboseOsSystem(". '%s'; docker build -t '%s:%s' ." % (args.environfile, image, tag))
-
- if args.upload:
- uploadDocker(image,tag)
-
-
-def strToBool(string):
- return True if (type(string) is str and string == "true") else False if (type(string) is str and string == "false") else string
-
-def main():
- """ the main executable function """
-
- #
- # deal with the program arguments -
- # we build two different types of argument lists based on
- # context. jenkins requires positional arguments while linux cmd line
- # permits parameterized ones. the jenkins positional argument list is
- # smaller
- #
- parser = argparse.ArgumentParser(
- description="Build the specified packages. 'package-type' is one or more of " +
- "docker, debian, tar, tgz" +
- " (comma-separated), or 'all' to build all of them."
- )
-
- REPACKAGEYAML = "repackage.yaml"
- REPACKAGEJSON = "repackage.json"
- if os.environ.get("JENKINS"):
- parser.add_argument("packagetype",help= "debian" +
- "|docker|tar|tgz" +
- "|all")
- parser.add_argument("upload",help="upload package to appropriate repository",nargs='?',default="false")
- parser.add_argument("directory", type=str, help="where to find the stage directory and %s. Defaults to '.'" % REPACKAGEYAML, default=".",nargs='?')
- parser.add_argument("environfile", type=str, help="Optional environment file. Overrides $REPACKAGEENVFILE, defaults to /dev/null", default="/dev/null", nargs='?')
- parser.add_argument("outputdirectory", type=str, help="Output directory. Defaults to 'output' under --directory path.", default=None, nargs='?')
- parser.add_argument("verbose",help="turn on verbosity",nargs='?',default="true")
- parser.add_argument("skipexecution",help="indcate packages and exit ",nargs='?',default="false")
- parser.add_argument("skipbuild",help="skip actually bulding the packages",nargs='?',default="false")
- parser.add_argument("usecache",help="if debian/tar/tgz artifact already exists use it",nargs='?',default="false")
- parser.add_argument("keeptempfiles",help="keep temp files at exit",nargs='?',default="false")
- else:
- parser.add_argument("-n", "--skipexecution", help="indicate the packages and exit", action="store_true")
- parser.add_argument("-c", "--usecache", help="if a debian/tar/tgz artifact already exists use it", action="store_true")
- parser.add_argument("-N", "--skipbuild", help="skip actually building the packages", action="store_true")
- parser.add_argument("-K", "--keeptempfiles", help="keep temp files at exit", action="store_true")
- parser.add_argument("-v", "--verbose", help="turn on verbosity", action="store_true")
- parser.add_argument("-b", "--packagetype", type=str, help="""The package-type may be specified multiple times or may use a ','-separated
- or space-separated list. 'all' is an alias for all of them. Potential values are debian, docker""" +
- ", tar or tgz", required=True)
- parser.add_argument("-u", "--upload", action="store_true", help="""Depending on package type -- docker, debian, tar or tgz -- uploads the artifact to a remote repository.
- For Docker, uses $DOCKERREGISTRY as the remote repository to push the image.
-
- For Debian, uses $REPACKAGEDEBIANUPLOAD as the command, with {0} as the local path to the debian image, {1} as the image name with build number,
- and optionally {2} as groupId (may be used as part of the directory path), {3} as the image name without the build number, and {4}
- as the image name with no build number and no .deb suffix.
- For additional uploads, this will also look for $REPACKAGEDEBIANUPLOAD2, $REPACKAGEDEBIANUPLOAD3, etc., and repeat the upload.
-
- For tar, uses $REPACKAGETARUPLOAD as the command. Everything said about $REPACKAGEDEBIANUPLOAD applies to $REPACKAGETARUPLOAD.
- For tgz, uses $REPACKAGETGZUPLOAD as the command. Everything said about $REPACKAGEDEBIANUPLOAD applies to $REPACKAGETGZUPLOAD.
-
- In addition, if --multipleuploadversions is used, the above will be executed using the list of upload version numbers specified there.
-
- This is typically used to create multiple versions (using --multipleuploadversions) on multiple remote repositories (using $REPACKAGE*UPLOAD).
- """)
- # For additional uploads, repackage will also look for $DOCKERREGISTRY2, $DOCKERREGISTRY3, etc.
- parser.add_argument("-d", "--directory", type=str, help="where to find the stage directory and %s. Defaults to '.'" % REPACKAGEYAML, default=".")
- parser.add_argument("-e", "--environfile", type=str, help="Optional environment file. Overrides $REPACKAGEENVFILE, defaults to /dev/null", default="/dev/null")
- parser.add_argument("-o", "--outputdirectory", type=str, help="Output directory. Defaults to 'output' under --directory path.", default=None)
- parser.add_argument("-y", "--repackageyaml", type=str, help="Name of parameter file. Defaults to '" + REPACKAGEYAML + "' or '" + REPACKAGEJSON + "' under --directory path.", default=REPACKAGEYAML)
- parser.add_argument("-B", "--buildnumber", type=str, help="Build number. Defaults to $BUILD_NUMBER, which defaults to a date-based string.", default="")
- parser.add_argument("-D", "--define", type=str, action='append', help="define an argument at runtime in key=value format")
- parser.add_argument("-M", "--multipleuploadversions", type=str, help="Use multiple versions for upload. Comma-separated list of {datetime}, {buildnumber} or arbitrary strings. Defaults to {buildnumber}, which is the value from --buildnumber.", default="{buildnumber}")
-
- global args
- args = parser.parse_args()
-
- # for some reason, the Jenkins branch leaves these as strings instead of the proper boolean values
- args.upload = strToBool(args.upload)
- args.verbose = strToBool(args.verbose)
- args.skipexecution = strToBool(args.skipexecution)
- args.skipbuild = strToBool(args.skipbuild)
- args.usecache = strToBool(args.usecache)
- args.keeptempfiles = strToBool(args.keeptempfiles)
-
- # arguments defined at runtime as key=value pairs
- global rtdef
- rtdef = {}
-
- if args.define:
- for k in args.define:
- tag, val = k.split("=")
- rtdef[tag] = val
-
- for k, v in rtdef.items():
- traceMsg("runtime defined %s <- %s" % (k,v))
-
- # check the -e/$REPACKAGEENVFILE value
- if args.environfile == "":
- if os.environ.get("REPACKAGEENVFILE") is not None:
- args.environfile = os.environ["REPACKAGEENVFILE"]
- if not os.path.isfile(args.environfile) and args.environfile != "/dev/null":
- die("-e / $REPACKAGEENVFILE must be a file that can be sourced by the shell")
- if not args.environfile.startswith("/"):
- args.environfile = os.getcwd() + "/" + args.environfile
-
- allPackages = [ "debian", "tar", "tgz",
- "docker" ]
- args.builds = { }
- for pkg in allPackages:
- args.builds[pkg] = False
- if args.packagetype == "all":
- args.packagetype = ",".join(allPackages)
- for build in re.split("[, \t]", args.packagetype):
- args.builds[build] = True
-
- args.allUploadVersions = args.multipleuploadversions.split(",")
-
- if args.upload and args.builds["debian"]:
- if os.environ.get("REPACKAGEDEBIANUPLOAD") is None:
- die("-u requires $REPACKAGEDEBIANUPLOAD to be set when building debian")
- elif not re.search("[{]0[}]", os.environ.get("REPACKAGEDEBIANUPLOAD")):
- die("$REPACKAGEDEBIANUPLOAD is missing {0}")
- elif not re.search("[{][13][}]", os.environ.get("REPACKAGEDEBIANUPLOAD")):
- die("$REPACKAGEDEBIANUPLOAD is missing either {1}, {3} or {4}")
-
- if args.upload and args.builds["tar"]:
- if os.environ.get("REPACKAGETARUPLOAD") is None:
- die("-u requires $REPACKAGETARUPLOAD to be set when building tar")
- elif not re.search("[{]0[}]", os.environ.get("REPACKAGETARUPLOAD")):
- die("$REPACKAGETARUPLOAD is missing {0}")
- elif not re.search("[{][134][}]", os.environ.get("REPACKAGETARUPLOAD")):
- die("$REPACKAGETARUPLOAD is missing either {1}, {3} or {4}")
-
- if args.upload and args.builds["tgz"]:
- if os.environ.get("REPACKAGETGZUPLOAD") is None:
- die("-u requires $REPACKAGETGZUPLOAD to be set when building tgz")
- elif not re.search("[{]0[}]", os.environ.get("REPACKAGETGZUPLOAD")):
- die("$REPACKAGETGZUPLOAD is missing {0}")
- elif not re.search("[{][134][}]", os.environ.get("REPACKAGETGZUPLOAD")):
- die("$REPACKAGETGZUPLOAD is missing either {1}, {3} or {4}")
-
- if args.upload and args.builds["docker"] and os.environ.get("DOCKERREGISTRY") is None:
- die("-u requires $DOCKERREGISTRY to be set when building docker")
-
- if not os.path.isdir(args.directory):
- die("The root directory %s does not exist" % args.directory)
- if not args.directory.startswith("/"):
- args.directory = os.getcwd() + "/" + args.directory
- if args.repackageyaml != REPACKAGEYAML:
- if not os.path.exists(args.directory + "/" + args.repackageyaml):
- die("The file %s/%s does not exist" % (args.directory, args.repackageyaml))
- else:
- if os.path.exists(args.directory + "/" + REPACKAGEYAML):
- args.repackageyaml = REPACKAGEYAML
- elif os.path.exists(args.directory + "/" + REPACKAGEJSON):
- args.repackageyaml = REPACKAGEJSON
- else:
- die("Either %s/%s or %s/%s must exist" % (args.directory, args.repackageyaml, args.directory, args.repackagejson))
-
- if args.outputdirectory is None:
- args.outputdirectory = args.directory + "/output"
- else:
- if not args.outputdirectory.startswith("/"):
- args.outputdirectory = os.getcwd() + "/" + args.outputdirectory
- if not os.path.isdir(args.outputdirectory):
- die("The specified --outputdirectory %s does not exist" % args.outputdirectory)
-
- # establish some global variables used everywhere
- global ROOTDIR, TMPROOT
- ROOTDIR = args.directory
- TMPROOT = args.directory + "/tmp"
-
- # and cd to our ROOTDIR
- cdCheck(ROOTDIR)
-
- # unless -K is specified, remove any temp files at the end
- if not args.keeptempfiles:
- atexit.register(cleanupTmpRoot)
-
- # grab and share some variables that are used by lots of build functions
- global APPL, MAINTAINER, VER, BNBR, TIMESTAMP
- APPL = getParam( '["applicationName"]' )
- MAINTAINER = getParam( '["maintainer"]' )
- VER = getParam( '["version"]' )
- TIMESTAMP = time.strftime("%Y%m%d%H%M%S")
- BNBR = args.buildnumber if args.buildnumber != "" else os.environ.get("BUILD_NUMBER") if os.environ.get("BUILD_NUMBER") is not None else TIMESTAMP
-
- # build whatever was requested
- if args.builds["docker"]:
- buildDocker()
- if args.builds["debian"]:
- buildDebian()
- if args.builds["tar"]:
- buildTar(False)
- if args.builds["tgz"]:
- buildTar(True)
-
-if __name__ == "__main__":
- main()
diff --git a/repackage/bin/yamltojson b/repackage/bin/yamltojson
deleted file mode 100755
index 0dc85fc..0000000
--- a/repackage/bin/yamltojson
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python3
-# -*- indent-tabs-mode: nil -*- vi: set expandtab:
-
-"""
-NAME
- yamltojson - convert a yaml file to a json file
-
-SYNOPSIS
- yamltojson file.yaml ...
-
-DESCRIPTION
- Read in a yaml file (whose name must end with ".yaml") and create cor‐
- responding json files, whose names will end with ".json".
-"""
-
-import sys, re, yaml
-try:
- import simplejson as json
-except:
- import json
-
-def die(msg):
- """ generate a FATAL message to stdout and exit """
- print("%s:FATAL:%s" % (date(), msg))
- sys.exit(2)
-
-for fname in sys.argv[1:]:
- if fname.endswith(".yaml"):
- y = None
- with open(fname, "r") as fd:
- try:
- contents = fd.read()
- contents = re.sub("^\t+", " ", contents, flags=re.M)
- y = yaml.safe_load(contents)
- except:
- die("Invalid yaml in '%s'" % fname)
- jsonfname = fname[:-5] + ".json"
- with open(jsonfname, "w") as fd:
- json.dump(y, fd, indent=4, sort_keys=True)
diff --git a/repackage/man/repackage.1 b/repackage/man/repackage.1
deleted file mode 100644
index aa5f411..0000000
--- a/repackage/man/repackage.1
+++ /dev/null
@@ -1,123 +0,0 @@
-repackage(1) OpenECOMP repackage(1)
-
-
-
-NNAAMMEE
- repackage - package an set of files into debian, or docker packages
-
-SSYYNNOOPPSSIISS
- repackage [-h] [-n] [-c] [-N] [-K] [-v] -b PACKAGETYPE [-u] [-d DIREC‐
- TORY] [-e ENVIRONFILE] [-o OUTPUTDIRECTORY]
-
-DDEESSCCRRIIPPTTIIOONN
- Build the specified packages. 'package-type' is one or more of docker,
- debian, (comma-separated), or 'all' to build all of them.
-
-OOPPTTIIOONNSS
- -h, --help show a help message and exit
-
- -n, --skipexecution indicate the packages and exit
-
- -c, --usecache if a debian/tar/tgz artifact already exists use it
-
- -N, --skipbuild skip actually building the packages
-
- -K, --keeptempfiles keep temp files at exit
-
- -v, --verbose turn on verbosity
-
- -b PACKAGETYPE, --packagetype PACKAGETYPE
- The package-type may be specified multiple times or
- may use a ','-separated or space-separated list.
- 'all' is an alias for all of them. Potential val‐
- ues are debian, docker, tar or tgz
-
- -u, --upload Depending on package type -- docker, debian, tar or
- tgz -- uploads the artifact to a remote repository.
- For Docker, uses $DOCKERREGISTRY as the remote
- repository to push the image. For Debian, uses
- $REPACKAGEDEBIANUPLOAD as the command, with {0} as
- the local path to the debian image, {1} as the
- image name with build number, and optionally {2} as
- groupId (may be used as part of the directory
- path), {3} as the image name without the build num‐
- ber, and {4} as the image name with no build number
- and no .deb suffix. For additional uploads, this
- will also look for $REPACKAGEDEBIANUPLOAD2,
- $REPACKAGEDEBIANUPLOAD3, etc., and repeat the
- upload. For tar, uses $REPACKAGETARUPLOAD as the
- command. Everything said about $REPACKAGEDEBIANU‐
- PLOAD applies to $REPACKAGETARUPLOAD. For tgz, uses
- $REPACKAGETGZUPLOAD as the command. Everything said
- about $REPACKAGEDEBIANUPLOAD applies to $REPACK‐
- AGETGZUPLOAD. In addition, if --multipleuploadver‐
- sions is used, the above will be executed using the
- list of upload version numbers specified there.
- This is typically used to create multiple versions
- (using --multipleuploadversions) on multiple remote
- repositories (using $REPACKAGE*UPLOAD).
-
- -d DIRECTORY, --directory DIRECTORY
- where to find the stage directory and repack‐
- age.yaml. Defaults to '.'
-
- -e ENVIRONFILE, --environfile ENVIRONFILE
- Optional environment file. Overrides $REPACKAGEEN‐
- VFILE, defaults to /dev/null
-
- -o OUTPUTDIRECTORY, --outputdirectory OUTPUTDIRECTORY
- Output directory. Defaults to 'output' under
- --directory path.
-
- -y FILE, --repackageyaml FILE
- Name of parameter file. Defaults to 'repack‐
- age.yaml' or 'repackage.json' under --directory
- path.
-
- -B buildnumber, --buildnumber BUILD-NUMBER
- Build number. Defaults to $BUILD_NUMBER, which
- defaults to a date-based string.
-
- -M MULTIPLEUPLOADVERSIONS, --multipleuploadversions MULTIPLEUPLOADVER‐
- SIONS
- Use multiple versions for upload. Comma-separated
- list of {datetime}, {buildnumber} or arbitrary
- strings. Defaults to {buildnumber}, which is the
- value from --buildnumber.
-
-DDIIRREECCTTOORRYY SSTTRRUUCCTTUURREE
- Repackage requires as its input four items:
-
- stage/* A directory structure filled with files laid out
- exactly as they are to appear on the end system, as
- if "stage" were the root of the filesystem. For
- DCAE applications, it is recommended that you use
- the path opt/app/PACKAGENAME for all files associ‐
- ated with PACKAGENAME.
-
- repackage.yaml or repackage.json
- A configuration file with information about the
- packages, such as the package's name, version
- information, dependencies, etc. More information
- will be provided below.
-
- Dockerfile This file is only used for creating Docker images.
- It contains the docker creation script.
-
- common/* If needed, any package installation or package
- removal scripts would be placed here. They must be
- named preinst, postinst, prerm or postrm.
-
- If the installation scripts need some temporary files, it is recom‐
- mended that you place them into stage/opt/app/PACKAGENAME/pkg. Your
- installation script can then remove that directory if necessary after
- successful installation.
-
-FFIILLEESS
- /opt/app/repackage/bin/repackage
-
- /opt/app/repackage/man/repackage.1
-
-
-
-OpenECOMP 2017-09-13 repackage(1)
diff --git a/repackage/man/yamltojson.1 b/repackage/man/yamltojson.1
deleted file mode 100644
index 55a2c4d..0000000
--- a/repackage/man/yamltojson.1
+++ /dev/null
@@ -1,22 +0,0 @@
-yamltojson(1) OpenECOMP yamltojson(1)
-
-
-
-NNAAMMEE
- yamltojson - convert a yaml file to a json file
-
-SSYYNNOOPPSSIISS
- yamltojson file.yaml ...
-
-DDEESSCCRRIIPPTTIIOONN
- Read in a yaml file (whose name must end with ".yaml") and create cor‐
- responding json files, whose names will end with ".json".
-
-FFIILLEESS
- /opt/app/repackage/bin/yamltojson
-
- /opt/app/repackage/man/yamltojson.1
-
-
-
-OpenECOMP 2017-01-26 yamltojson(1)
diff --git a/repackage/pom.xml b/repackage/pom.xml
deleted file mode 100644
index f8457a3..0000000
--- a/repackage/pom.xml
+++ /dev/null
@@ -1,252 +0,0 @@
-<?xml version="1.0"?>
-<!--
-================================================================================
-Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
-================================================================================
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-============LICENSE_END=========================================================
-
-ECOMP is a trademark and service mark of AT&T Intellectual Property.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.onap.dcaegen2.utils</groupId>
- <artifactId>utils</artifactId>
- <version>1.0.0-SNAPSHOT</version>
- </parent>
- <groupId>org.onap.dcaegen2.utils</groupId>
- <artifactId>repackage</artifactId>
- <name>dcaegen2-utils-repackage</name>
- <version>1.0.0-SNAPSHOT</version>
- <url>http://maven.apache.org</url>
-
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <sonar.skip>true</sonar.skip>
- <sonar.sources>.</sonar.sources>
- <!-- customize the SONARQUBE URL -->
- <!-- sonar.host.url>http://localhost:9000</sonar.host.url -->
- <!-- below are language dependent -->
- <!-- for Python -->
- <sonar.language>py</sonar.language>
- <sonar.pluginName>Python</sonar.pluginName>
- <sonar.inclusions>**/*.py</sonar.inclusions>
- <!-- for JavaScaript -->
- <!--
- <sonar.language>js</sonar.language>
- <sonar.pluginName>JS</sonar.pluginName>
- <sonar.inclusions>**/*.js</sonar.inclusions>
- -->
- </properties>
- <build>
- <finalName>${project.artifactId}-${project.version}</finalName>
- <pluginManagement>
- <plugins>
- <!-- the following plugins are invoked from oparent, we do not need them -->
- <plugin>
- <groupId>org.sonatype.plugins</groupId>
- <artifactId>nexus-staging-maven-plugin</artifactId>
- <version>1.6.7</version>
- <configuration>
- <skipNexusStagingDeployMojo>true</skipNexusStagingDeployMojo>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-deploy-plugin</artifactId>
- <!-- This version supports the "deployAtEnd" parameter -->
- <version>2.8</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- first disable the default Java plugins at various stages -->
- <!-- maven-resources-plugin is called during "*resource" phases by default behavior. it prepares
- the resources dir. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-resources-plugin</artifactId>
- <version>2.6</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- maven-compiler-plugin is called during "compile" phases by default behavior. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>3.1</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- maven-jar-plugin is called during "compile" phase by default behavior. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <version>2.4</version>
- <executions>
- <execution>
- <id>default-jar</id>
- <phase/>
- </execution>
- </executions>
- </plugin>
- <!-- maven-install-plugin is called during "install" phase by default behavior. it tries to copy stuff under
- target dir to ~/.m2. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-install-plugin</artifactId>
- <version>2.4</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- maven-surefire-plugin is called during "test" phase by default behavior. it triggers junit test.
- we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.12.4</version>
- <configuration>
- <skipTests>true</skipTests>
- </configuration>
- </plugin>
- </plugins>
- </pluginManagement>
- <plugins>
- <!-- plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>2.4.1</version>
- <configuration>
- <descriptors>
- <descriptor>assembly/dep.xml</descriptor>
- </descriptors>
- </configuration>
- <executions>
- <execution>
- <id>make-assembly</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin -->
- <!-- now we configure custom action (calling a script) at various lifecycle phases -->
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>exec-maven-plugin</artifactId>
- <version>1.2.1</version>
- <executions>
- <execution>
- <id>clean phase script</id>
- <phase>clean</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>clean</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>generate-sources script</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>generate-sources</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>compile script</id>
- <phase>compile</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>compile</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>package script</id>
- <phase>package</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>package</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>test script</id>
- <phase>test</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>test</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>install script</id>
- <phase>install</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>install</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>deploy script</id>
- <phase>deploy</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>${project.artifactId}</argument>
- <argument>deploy</argument>
- <argument>bin/repackage</argument>
- <argument>bin/yamltojson</argument>
- <argument>man/repackage.1</argument>
- <argument>man/yamltojson.1</argument>
- </arguments>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>