diff options
-rwxr-xr-x | check-blueprint-vs-input/check-blueprint-vs-input | 215 | ||||
-rw-r--r-- | check-blueprint-vs-input/check-blueprint-vs-input.1 | 50 | ||||
-rw-r--r-- | check-blueprint-vs-input/check-blueprint-vs-input.man | 55 | ||||
-rwxr-xr-x | repackage/bin/repackage | 680 | ||||
-rwxr-xr-x | repackage/bin/yamltojson | 39 | ||||
-rw-r--r-- | repackage/man/repackage.1 | 123 | ||||
-rw-r--r-- | repackage/man/yamltojson.1 | 22 |
7 files changed, 1184 insertions, 0 deletions
diff --git a/check-blueprint-vs-input/check-blueprint-vs-input b/check-blueprint-vs-input/check-blueprint-vs-input new file mode 100755 index 0000000..9174526 --- /dev/null +++ b/check-blueprint-vs-input/check-blueprint-vs-input @@ -0,0 +1,215 @@ +#!/usr/bin/env python3 +# -*- indent-tabs-mode: nil -*- +# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this code except in compliance +# with the License. You may obtain a copy of the License +# at http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +from __future__ import print_function + +""" + + NAME + check-blueprint-vs-input - given a blueprint and inputs file pair, validate them against each other + + USAGE + check-blueprint-vs-input [-v] [-t] -b BLUEPRINT [-B exclusion-list] -i INPUTS [-B exclusion-list] + + DESCRIPTION +""" +description = """ + Validate a blueprint and inputs file against each other. This looks for the inputs: node of the blueprint + file, the inputs used by {get_input} within the blueprint, and the values found in the inputs file. The + files may be in either YAML or JSON formats. The names default to blueprint.yaml and inputs.yaml. If + a blueprint inputs name has a default value, it is not considered an error if it is not in the inputs file. + + If using a template inputs file, add the -t/--template option. This will look for the inputs under + an "inputs:" node instead of at the top level. + + If there are blueprint nodes or inputs nodes that should not be considered an error, specify them + using the -B/--blueprint-exclusion-list and -I/inputs-exclusion-list parameters. + + "check-blueprint-vs-input --help" will list all of the available options. +""" +epilog = """ + NOTE + Values specified within the inputs file with no value or a value of {} (such as 'openstack: {}') + do not get loaded from the yaml dictionary, and will show up as false positives. It is suggested that + -B/--blueprint-exclusion-list be used for these, and then do a subsequent grep for those variables + being in the inputs file, as in: + + grep 'openstack: {}' $(BLUEPRINT) + +""" + +import yaml +import sys +import argparse + +def main(): + DEF_BLUEPRINT_NAME = "blueprint.yaml" + DEF_INPUTS_NAME = "inputs.yaml" + parser = argparse.ArgumentParser(description=description, epilog=epilog) + parser.add_argument("-b", "--blueprint", type=str, help="Path to blueprint file, defaults to '%s'" % DEF_BLUEPRINT_NAME, + default=DEF_BLUEPRINT_NAME) + parser.add_argument("-i", "--inputs", type=str, help="Port to listen on, defaults to '%s'" % DEF_INPUTS_NAME, + default=DEF_INPUTS_NAME) + parser.add_argument("-B", "--blueprint-exclusion-list", type=str, help="Comma-separated list of names not to warn about not being in the blueprint file", default="") + parser.add_argument("-I", "--inputs-exclusion-list", type=str, help="Comma-separated list of names not to warn about not being in the inputs file", default="") + parser.add_argument("-t", "--inputs-template", help="Treat inputs file as coming from template area", action="store_true") + parser.add_argument("-v", "--verbose", help="Verbose, may be specified multiple times", action="count", default=0) + args = parser.parse_args() + + blueprintExclusionList = args.blueprint_exclusion_list.split(",") + if args.verbose: print("blueprintExclusionList=%s" % blueprintExclusionList) + + inputsExclusionList = args.inputs_exclusion_list.split(",") + if args.verbose: print("inputsExclusionList=%s" % inputsExclusionList) + + def loadYaml(filename): + """ + Load a YAML file + """ + with open(filename, "r") as fd: + try: + y = yaml.safe_load(fd) + if args.verbose > 1: + print("Contents of %s:" % filename) + yaml.dump(y, sys.stdout) + except: + type, value, traceback = sys.exc_info() + print('value=%s' % value, file=sys.stderr) + return y + + blueprint = loadYaml(args.blueprint) + inputs = loadYaml(args.inputs) + + # if inputs file is empty, provide an empty dictionary + if inputs is None: inputs = { } + + # blueprint file has inputs under the inputs: node + blueprintInputs = blueprint['inputs'] + + # inputs file normally has inputs at the top level, + # but templated inputs files have themunder the inputs: node + if args.inputs_template: inputs = inputs['inputs'] + + + exitval = 0 + + def check_blueprint_inputs(blueprintInputs, inputs, inputsExclusionList): + """ + check the blueprint inputs against the inputs file + """ + foundone = False + for input in blueprintInputs: + if args.verbose: print("blueprint input=%s" % input) + if input in inputs: + if args.verbose: print("\tIS in inputs file") + else: + # print("blueprintInputs.get(input)=%s and blueprintInputs[input].get('default')=%s" % (blueprintInputs.get(input), blueprintInputs[input].get('default'))) + if blueprintInputs.get(input) and blueprintInputs[input].get('default'): + if args.verbose: print("\tHAS a default value") + elif input not in inputsExclusionList: + print("<<<<<<<<<<<<<<<< %s not in inputs file" % input) + foundone = True + else: + if args.verbose: print("<<<<<<<<<<<<<<<< %s not in inputs file, but being ignored" % input) + return foundone + + # check the blueprint inputs: against the inputs file + if args.verbose: print("================ check the blueprint inputs: against the inputs file") + foundone = check_blueprint_inputs(blueprintInputs, inputs, inputsExclusionList) + if foundone: print("") + if foundone: exitval = 1 + + def check_get_inputs(blueprint, blueprintInputs, inputs, inputsExclusionList): + """ + check the blueprint get_input values against the inputs file + """ + if args.verbose > 2: print("check_get_inputs(): d=%s" % d) + + def findInputs(d): + ret = [ ] + if isinstance(d, dict): + if args.verbose: print("type(d) is dict") + for key in d.keys(): + if args.verbose: print("looking at d[key=%s]" % key) + if key == "get_input": + if args.verbose: print("found get_input, adding '%s'" % d[key]) + ret += [ d[key] ] + return ret + else: + if args.verbose: print("going recursive on '%s'" % d[key]) + ret += findInputs(d[key]) + elif isinstance(d, list): + if args.verbose: print("type(d) is list") + for val in d: + if args.verbose: print("going recursive on '%s'" % val) + ret += findInputs(val) + else: + if args.verbose: print("type(d) is scalar: %s" % d) + return ret + + foundone = False + inputList = findInputs(blueprint) + if args.verbose: print("done looking for get_input, found:\n%s" % inputList) + alreadySeen = { } + for input in inputList: + if input not in alreadySeen: + alreadySeen[input] = True + if args.verbose: print("checking input %s" % input) + if input in inputs: + if args.verbose: print("\tIS in input file") + else: + if blueprintInputs.get(input) and blueprintInputs[input].get('default'): + if args.verbose: print("\tHAS a default value") + elif input not in inputsExclusionList: + print(":::::::::::::::: get_input: %s is NOT in input file" % input) + foundone = True + else: + if args.verbose: print(":::::::::::::::: get_input: %s is NOT in input file, but being ignored" % input) + + return foundone + + + + # check the blueprint's get_input calls against the inputs file + if args.verbose: print("================ check the blueprint's get_input calls against the inputs file ================") + foundone = check_get_inputs(blueprint, blueprintInputs, inputs, inputsExclusionList) + if foundone: print("") + if foundone: exitval = 1 + + def check_inputs(blueprintInputs, inputs, blueprintExclusionList): + """ + check the inputs file against the blueprints inputs list + """ + foundone = False + for key in inputs: + if args.verbose: print("inputs key=%s" % key) + if key in blueprintInputs: + if args.verbose: print("\tIS in blueprint") + else: + if key not in blueprintExclusionList: + print(">>>>>>>>>>>>>>>> %s not in blueprint file" % key) + foundone = True + else: + if args.verbose: print(">>>>>>>>>>>>>>>> %s not in blueprint file, but being ignored" % key) + return foundone + + # check the inputs file against the blueprints input: section + if args.verbose: print("================ check the inputs file against the blueprints input: section ================") + foundone = check_inputs(blueprintInputs, inputs, blueprintExclusionList) + if foundone: exitval = 1 + sys.exit(exitval) + +if __name__ == "__main__": + main() diff --git a/check-blueprint-vs-input/check-blueprint-vs-input.1 b/check-blueprint-vs-input/check-blueprint-vs-input.1 new file mode 100644 index 0000000..247fbc6 --- /dev/null +++ b/check-blueprint-vs-input/check-blueprint-vs-input.1 @@ -0,0 +1,50 @@ +check-blueprint-vs-input(1UTIL) ONAP check-blueprint-vs-input(1UTIL) + + + +NNAAMMEE + check-blueprint-vs-input + +SSYYNNOOPPSSIISS + check-blueprint-vs-input [-h] [-b BLUEPRINT] [-i INPUTS] + [-B BLUEPRINT_EXCLUSION_LIST] + [-I INPUTS_EXCLUSION_LIST] [-t] [-v] + +DDEESSCCRRIIPPTTIIOONN + Validate a blueprint and inputs file against each other. This looks for + the inputs: node of the blueprint file, the inputs used by {get_input} + within the blueprint, and the values found in the inputs file. The + files may be in either YAML or JSON formats. The names default to blue‐ + print.yaml and inputs.yaml. If a blueprint inputs name has a default + value, it is not considered an error if it is not in the inputs file. + If using a template inputs file, add the -t/--template option. This + will look for the inputs under an "inputs:" node instead of at the top + level. If there are blueprint nodes or inputs nodes that should not be + considered an error, specify them using the -B/--blueprint- exclusion- + list and -I/inputs-exclusion-list parameters. "check-blueprint-vs- + input --help" will list all of the available options. + + OOPPTTIIOONNSS + -h, --help show this help message and exit + + -b BLUEPRINT, --blueprint BLUEPRINT Path to blueprint file, defaults to + 'blueprint.yaml' + + -i INPUTS, --inputs INPUTS Port to listen on, defaults to 'inputs.yaml' + + -B BLUEPRINT_EXCLUSION_LIST, --blueprint-exclusion-list BLUE‐ + PRINT_EXCLUSION_LIST Comma-separated list of names + not to warn about not + being in the blueprint file + + -I INPUTS_EXCLUSION_LIST, --inputs-exclusion-list INPUTS_EXCLUSION_LIST + Comma-separated list of names not to warn about not + being in the inputs file + + -t, --inputs-template Treat inputs file as coming from template area + + -v, --verbose Verbose, may be specified multiple times + + + +ONAP {{DATE}} check-blueprint-vs-input(1UTIL) diff --git a/check-blueprint-vs-input/check-blueprint-vs-input.man b/check-blueprint-vs-input/check-blueprint-vs-input.man new file mode 100644 index 0000000..017d529 --- /dev/null +++ b/check-blueprint-vs-input/check-blueprint-vs-input.man @@ -0,0 +1,55 @@ +'\" Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. +'\" +'\" Licensed under the Apache License, Version 2.0 (the "License"); +'\" you may not use this code except in compliance +'\" with the License. You may obtain a copy of the License +'\" at http://www.apache.org/licenses/LICENSE-2.0 +'\" +'\" Unless required by applicable law or agreed to in writing, software +'\" distributed under the License is distributed on an "AS IS" BASIS, +'\" WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +'\" implied. See the License for the specific language governing +'\" permissions and limitations under the License. +.TH check-blueprint-vs-input 1UTIL {{DATE}} ONAP ONAP +.SH NAME +check-blueprint-vs-input +.SH SYNOPSIS +check-blueprint-vs-input [-h] [-b BLUEPRINT] [-i INPUTS] + [-B BLUEPRINT_EXCLUSION_LIST] + [-I INPUTS_EXCLUSION_LIST] [-t] [-v] +.SH DESCRIPTION +Validate a blueprint and inputs file against each other. This looks for the +inputs: node of the blueprint file, the inputs used by {get_input} within the +blueprint, and the values found in the inputs file. The files may be in either +YAML or JSON formats. The names default to blueprint.yaml and inputs.yaml. If +a blueprint inputs name has a default value, it is not considered an error if +it is not in the inputs file. If using a template inputs file, add the +-t/--template option. This will look for the inputs under an "inputs:" node +instead of at the top level. If there are blueprint nodes or inputs nodes that +should not be considered an error, specify them using the -B/--blueprint- +exclusion-list and -I/inputs-exclusion-list parameters. "check-blueprint-vs- +input --help" will list all of the available options. +.SS OPTIONS +.HP 20 +-h, --help +show this help message and exit +.HP 20 +-b BLUEPRINT, --blueprint BLUEPRINT +Path to blueprint file, defaults to 'blueprint.yaml' +.HP 20 +-i INPUTS, --inputs INPUTS +Port to listen on, defaults to 'inputs.yaml' +.HP 20 +-B BLUEPRINT_EXCLUSION_LIST, --blueprint-exclusion-list BLUEPRINT_EXCLUSION_LIST +Comma-separated list of names not to warn about not + being in the blueprint file +.HP 20 +-I INPUTS_EXCLUSION_LIST, --inputs-exclusion-list INPUTS_EXCLUSION_LIST +Comma-separated list of names not to warn about not + being in the inputs file +.HP 20 +-t, --inputs-template +Treat inputs file as coming from template area +.HP 20 +-v, --verbose +Verbose, may be specified multiple times diff --git a/repackage/bin/repackage b/repackage/bin/repackage new file mode 100755 index 0000000..389be54 --- /dev/null +++ b/repackage/bin/repackage @@ -0,0 +1,680 @@ +#!/usr/bin/env python3 +# -*- indent-tabs-mode: nil -*- vi: set expandtab: + +import sys, os, argparse, time, re, posix, atexit, binascii +from pathlib import Path + +yamlOk = True +try: + import yaml +except: + yamlOk = False +jsonOk = True +try: + import simplejson as json +except: + try: + import json + except: + jsonOk = False + +def date(): + """ return a datestamp """ + return time.strftime("%Y-%m-%d %H:%M:%S") + +def infoMsg(msg): + """ generate an informational message to stdout """ + print("%s:INFO:%s" % (date(), msg)) + +def traceMsg(msg): + """ if verbose flag is on, generate an informational message to stdout """ + global args + if args.verbose: + infoMsg(msg) + +def warnMsg(msg): + """ generate a warning message to stdout """ + print("%s:WARNING:%s" % (date(), msg)) + +def die(msg): + """ generate a FATAL message to stdout and exit """ + print("%s:FATAL:%s" % (date(), msg)) + sys.exit(2) + +def displayCwd(): + """ display the working directory """ + infoMsg("working directory '" + os.getcwd() + "'") + +def cdCheck(dir): + """ cd to a new directory and die if we cannot """ + try: + traceMsg("cd %s" % dir) + os.chdir(dir) + except: + die("Cannot chdir(" + dir + ")") + +def removeDirPath(path, prmsg = True, gone_ok = False): + """ + remove a directory path + prmsg - print a message before proceeding + gone_ok - do not warn if a path does not exist + """ + if prmsg: + infoMsg("Removing path '%s'" % path) + nmlist = None + try: + nmlist = os.listdir(path) + except FileNotFoundError: + if not gone_ok: + warnMsg("path no longer exists: %s" % path) + return + except: + e = sys.exc_info()[0] + warnMsg("removing path (%s) gave this error: %s" % (path, e)) + return + + for nm in nmlist: + if nm != "." and nm != "..": + pathnm = path + "/" + nm + if os.path.isdir(pathnm): + removeDirPath(pathnm, prmsg = False) + else: + # infoMsg(">>>>removing file %s" % pathnm) + try: + os.remove(pathnm) + except: + e = sys.exc_info()[0] + warnMsg("Could not remove file (%s) because of %s" % (pathnm, e)) + + # infoMsg(">>>>removing directory %s" % pathnm) + try: + os.rmdir(path) + except FileNotFoundError: + if not gone_ok: + warnMsg("Could not remove directory (%s) because of FileNotFound" % path) + except: + e = sys.exc_info()[0] + warnMsg("Could not remove directory (%s) because of %s" % (path, e)) + +def verboseOsSystem(cmd): + """ execute a shell command, printing a trace message first """ + traceMsg("About to execute '%s'" % cmd) + os.system(cmd) + +def lndir(fr, to): + """ create a copy of a tree structure, using hard links where possible """ + global args + removeDirPath(to + "/" + fr, prmsg = args.verbose, gone_ok = True) + verboseOsSystem("find '%s' -print0 | cpio -pdml0 '%s'" % ( fr, to )) + +y = None + +def getParam(name, dflt = None): + """ + Retrieve the contents of a parameter file, rooted where specified. + Return None when it does not exist. + """ + global y, args + if y is None: + fname = args.directory + "/" + args.repackageyaml + if args.repackageyaml.endswith(".yaml"): + if not yamlOk: + die("YAML not available on this machine") + else: + with open(fname, "r") as fd: + try: + contents = fd.read() + contents = re.sub("^\t+", " ", contents, flags=re.M) + y = yaml.safe_load(contents) + except: + die("Invalid yaml in '%s'" % fname) + elif args.repackageyaml.endswith(".json"): + if not jsonOk: + die("JSON not available on this machine") + else: + with open(fname, "r") as fd: + try: + contents = fd.read() + y = json.loads(contents) + except: + type, value, traceback = sys.exc_info() + die("Invalid json in '%s': %s" % (fname, value)) + else: + die("%s must end either in .yaml or .json" % repackageyaml) + + e = "y" + name + inp = None + try: + inp = eval(e,{"__builtins__":None},{"y":y}) + except KeyError: + if dflt is not None: + return dflt + if inp is None: + die("The %s must be be set in %s" % (name, args.repackageyaml)) + return inp + +def cleanupTmpRoot(): + """ clean out the tmp directory """ + global TMPROOT + removeDirPath(TMPROOT, prmsg = args.verbose, gone_ok = True) + +def genDebianChangelog(fname): + """ generate a Debian change log, hard-coded to this for now """ + with open(fname, "w") as fd: + fd.write("OpenECOMP 1701 Demo\n") + +def uploadDocker(name,tag): + """ tag & push Docker image to nexus docker registry """ + ns = getParam( '["docker"]["namespace"]' ) + registry = getParam( '["docker"]["registry"]' ) + image = name + ":" + tag + repo = os.environ.get("DOCKERREGISTRY") + "/" + ns + "/" + image + repo = os.environ.get("DOCKERREGISTRY") + "/" + ns + "/" + image + verboseOsSystem("docker tag " + image + " " + repo) + verboseOsSystem("docker push " + repo) + i = 2 + while os.environ.get("DOCKERREGISTRY" + str(i)): + repo = os.environ.get("DOCKERREGISTRY" + str(i)) + "/" + ns + "/" + image + verboseOsSystem("docker tag " + image + " " + repo) + verboseOsSystem("docker push " + repo) + i += 1 + +# The Debian control archive contents can include the following files: +# +# control: A list of dependencies, and other useful information to indentify the package, such as +# a brief description of the package. +# md5sums: contains MD5 checksums of all files in the package in order to detect corrupt or incomplete files. +# preinst, postinst, prerm and postrm are optional scripts that are executed before or after installing, +# updating or removing the package. +# copyright: any needed copyright notice +# changelog: +# conffiles: Lists the files of the package that should be treated as configuration files. +# Configuration files are not overwritten during an update unless specified. +# debian-binary: contains the deb-package version, currently 2.0 +# templates: A file with error descriptions and dialogs during installation +# config: is an optional script that supports the debconf configuration mechanism. +# shlibs: list of shared library dependencies. + +def genDebianControl(fname): + """ generate a Debian control file """ + with open(fname, "w") as fd: + global APPL, VER, BNBR, MAINTAINER + fd.write("Package: %s\n" % APPL) + fd.write("Version: %s-%s\n" % (VER, BNBR)) + fd.write("Section: utils\n") + fd.write("Priority: optional\n") + fd.write("Architecture: all\n") + fd.write("Maintainer: %s\n" % MAINTAINER) + deps = getParam('["debian"]["externalDependencies"]') + depends = "" + sep = " " + if deps: + for dep in deps: + for d, v in dep.items(): + depends += sep + d + " (" + v + ")" + sep = ", " + fd.write("Depends:%s\n" % depends) + fd.write("Conflicts:\n") + fd.write("Replaces:\n") + desc = getParam( '["description"]' ) + desc = re.sub("^[ \t]*$", ".", desc, flags=re.M) + desc = re.sub("^[ \t]*", " ", desc, flags=re.M) + fd.write("Description:%s\n" % desc) + +def genDebianMd5sums(fname): + """ generate an MD5 listing of all of the staged files """ + global ROOTDIR + verboseOsSystem("cd '%s/stage' && find * -type f -exec md5sum -b {} + > %s" % (ROOTDIR, fname)) + +def genCopyright(fname, prefix = ""): + """ generate a copyright statement, with the given prefix on each line """ + with open(fname, "w") as fd: + fd.write(prefix + "Copyright (C) 2016 AT&T Intellectual Property. All rights reserved.\n") + fd.write(prefix + "\n") + fd.write(prefix + "This code is licensed under the Apache License, Version 2.0;\n") + fd.write(prefix + "you may not use this code for any purpose except in compliance\n") + fd.write(prefix + "with the Apache License. You may obtain a copy of the License\n") + fd.write(prefix + "at http://www.att.com/openecomp.html.\n") + +def isExe(fname): + """ check if a path exists and is executable """ + return os.path.exists(fname) and os.access(fname, os.X_OK) + +def isFileExe(fname): + """ check if a path exists as a file and is executable """ + return os.path.isfile(fname) and os.access(fname, os.X_OK) + +def genFileList(path, testFn): + """ generate a list of files, rooted at path, that all pass the given test """ + ret = [] + try: + nmlist = os.listdir(path) + except FileNotFoundError: + return ret + except: + e = sys.exc_info()[0] + warnMsg("error while listing path (%s): %s" % (path, e)) + return ret + + for nm in nmlist: + if nm != "." and nm != "..": + pathnm = path + "/" + nm + if os.path.isdir(pathnm): + more = genFileList(pathnm, testFn) + ret.extend(more) + elif testFn(pathnm): + ret.append(pathnm) + return ret + + +def createDockerTempFiles(L): + """ create the temp file structure needed to create a docker image """ + global args, ROOTDIR + removeDirPath(L, prmsg = args.verbose, gone_ok = True) + os.makedirs(L, exist_ok = True) + + cdCheck(ROOTDIR + "/stage") + copyList = [] + for i in os.listdir(): + if not i.startswith("."): + lndir(i, L) + copyList.append(i) + + posix.link(ROOTDIR + "/Dockerfile", L + "/Dockerfile") + +def genApplVerBnbrSuffix(suffix, whichBuildNumber): + """ Generate a number of constants used in building a package """ + global APPL, VER, BNBR, TIMESTAMP + applVer = APPL + "_" + VER + buildNumber = BNBR if whichBuildNumber == '{buildnumber}' else TIMESTAMP if whichBuildNumber == '{datetime}' else whichBuildNumber + if buildNumber.startswith("{") and buildNumber.endswith("}"): + die("Unrecognized buildnumber macro name: %s" % buildNumber) + applVerBnbr = applVer + "-" + buildNumber + applVerBnbrSuffix = applVerBnbr + "." + suffix + applVerSuffix = applVer + "." + suffix + outdirApplVerBnbrSuffix = args.outputdirectory + "/" + applVerBnbrSuffix + return applVer, applVerBnbr, applVerBnbrSuffix, applVerSuffix, outdirApplVerBnbrSuffix + +def uploadAll(envName, groupId, outdirApplVerBnbrSuffix, suffix, applVer, applVerSuffix): + """ + Execute the various upload commands for a given package. + Take into account args.multipleuploadversions + """ + for buildNumber in args.allUploadVersions: + ignored1, ignored2, applVerBnbrSuffix, ignored3, ignored4 = genApplVerBnbrSuffix(suffix, buildNumber) + verboseOsSystem(os.environ.get(envName).format(outdirApplVerBnbrSuffix, applVerBnbrSuffix, groupId, applVerSuffix, applVer)) + i = 2 + while os.environ.get(envName + str(i)): + verboseOsSystem(os.environ.get(envName + str(i)).format(outdirApplVerBnbrSuffix, applVerBnbrSuffix, groupId, applVerSuffix, applVer)) + i += 1 + +def buildDebian(): + """ Build a local debian formatted package """ + infoMsg( 'Building a Debian package ...' ) + global args, TMPROOT, ROOTDIR + if args.skipexecution: + return + + suffix = "deb" + applVer, applVerBnbr, applVerBnbrSuffix, applVerSuffix, outdirApplVerBnbrSuffix = genApplVerBnbrSuffix(suffix, '{buildnumber}') + + if args.usecache and os.path.exists(outdirApplVerBnbrSuffix): + infoMsg( "Already built %s" % applVerBnbrSuffix) + + else: + L = TMPROOT + "/debian" + LD = TMPROOT + "/debian/DEBIAN" + removeDirPath(L, prmsg = args.verbose, gone_ok = True) + os.makedirs(LD, exist_ok = True) + + cdCheck(ROOTDIR + "/stage") + for i in os.listdir(): + if not i.startswith("."): + lndir(i, L) + + genCopyright(LD + "/copyright") + genDebianControl(LD + "/control") + genDebianChangelog(LD + "/changelog") + genDebianMd5sums(LD + "/md5sums") + + cdCheck(ROOTDIR) + execUser = getParam('["executionUser"]') + fileUser = getParam('["fileUser"]') + fileGroup = getParam('["fileGroup"]') + isRoot = execUser == "root" + for cname in [ "preinst", "postinst", "prerm", "postrm" ]: + comCname = "common/" + cname + ldName = LD + "/" + cname + if isExe(comCname) or cname == "postinst": + traceMsg("linking %s to %s" % (comCname, ldName)) + if isRoot and isExe(comCname): + posix.link(comCname, ldName) + else: + with open(ldName, "w") as out: + if cname == "postinst" and fileUser != "root": + for nm in os.listdir("stage"): + t = getParam( '["directoryTreeTops"]["/' + nm + '"]', "n/a" ) + if t == "n/a": + t = "/" + nm + print("chown -R '%s:%s' '%s'" % (fileUser, fileGroup, t), file=out) + print("find '%s' -type d -exec chmod 755 {} +" % t, file=out) + print("find '%s' ! -type d -exec chmod 644 {} +" % t, file=out) + # list each executable file separately + for fname in genFileList("stage", isFileExe): + fname = fname[6:] # remove 'stage/' from beginning + print("chmod 755 '/%s'" % fname, file=out) + + if isExe(comCname): + with open(comCname, "r") as inp: + print("gawk '{\n" + + " f = $0\n" + + " for (i = 1; i <= length(f); i+=2) {\n" + + " printf(\"%c\", strtonum(\"0X\" substr(f,i,2)))\n" + + " }\n" + + "}' > /tmp/rep.$$ <<EOF", file=out) + for line in inp: + for c in line: + # print(">>%02x<<" % ord(c)) + print("%02x" % ord(c), file=out, end="") + print("", file=out) + print("EOF\n" + + "chmod a+x /tmp/rep.$$\n" + + "su " + execUser + " -c /tmp/rep.$$\n" + + "rm -f /tmp/rep.$$\n", file=out) + verboseOsSystem("chmod a+x " + ldName) + + elif os.path.exists(comCname): + die(comCname + " must be executable") + + cdCheck(TMPROOT) + + if args.skipbuild: + traceMsg('Skipping final build') + return + + verboseOsSystem(". '%s'; fakeroot -- dpkg-deb --verbose --build '%s'" % (args.environfile, L)) + os.makedirs(args.outputdirectory, exist_ok = True) + os.rename("debian.deb", outdirApplVerBnbrSuffix) + + if not os.path.exists(outdirApplVerBnbrSuffix): + infoMsg( "Unsuccesful in building %s" % applVerBnbrSuffix) + return + + infoMsg( "Successfully built %s" % applVerBnbrSuffix) + + if args.upload: + envName = "REPACKAGEDEBIANUPLOAD" + groupId = getParam('["debian"]["groupId"]', getParam('["groupId"]')) + uploadAll(envName, groupId, outdirApplVerBnbrSuffix, suffix, applVer, applVerSuffix) + +def buildTar(useGzip): + """ Build a local tarball formatted package """ + infoMsg( 'Building a tar package ...' ) + global args, TMPROOT, ROOTDIR + if args.skipexecution: + return + + suffix = "tgz" if useGzip else "tar" + applVer, applVerBnbr, applVerBnbrSuffix, applVerSuffix, outdirApplVerBnbrSuffix = genApplVerBnbrSuffix(suffix, '{buildnumber}') + + if args.usecache and os.path.isfile(outdirApplVerBnbrSuffix): + infoMsg( "Already built %s" % applVerBnbrSuffix) + + else: + L = TMPROOT + "/" + suffix + LD = L + "/" + applVerBnbr + removeDirPath(L, prmsg = args.verbose, gone_ok = True) + os.makedirs(LD, exist_ok = True) + + cdCheck(ROOTDIR + "/stage") + for i in os.listdir(): + if not i.startswith("."): + lndir(i, LD) + + cdCheck(L) + + if args.skipbuild: + traceMsg('Skipping final build') + return + + taropts = "-zc" if useGzip else "-c" + if args.verbose: taropts += "v" + taropts += "f" + verboseOsSystem(". '%s'; fakeroot -- tar %s tar.%s %s" % (args.environfile, taropts, suffix, applVerBnbr)) + os.makedirs(args.outputdirectory, exist_ok = True) + if args.verbose: + print("renaming tar.%s to %s" % (suffix, outdirApplVerBnbrSuffix)) + os.rename("tar.%s" % suffix, outdirApplVerBnbrSuffix) + + if not os.path.exists(outdirApplVerBnbrSuffix): + infoMsg( "Unsuccesful in building %s" % applVerBnbrSuffix) + return + + infoMsg( "Successfully built %s" % applVerBnbrSuffix) + + + if args.upload: + envName = "REPACKAGETGZUPLOAD" if useGzip else "REPACKAGETARUPLOAD" + groupId = getParam('["%s"]["groupId"]' % suffix, getParam('["groupId"]')) + uploadAll(envName, groupId, outdirApplVerBnbrSuffix, suffix, applVer, applVerSuffix) + +def buildDocker(): + """ Build a DOCKER image """ + image = getParam( '["docker"]["image"]', "n/a" ) + if image == "n/a": + global APPL + image = APPL + tag = getParam( '["docker"]["tag"]' ) + + infoMsg( 'Building a (local) docker image ...' ) + global args, TMPROOT + if args.skipexecution: + return + + L = TMPROOT + "/docker" + createDockerTempFiles(L) + + if args.skipbuild: + traceMsg('Skipping final build') + return + + cdCheck(L) + verboseOsSystem(". '%s'; docker build -t '%s:%s' ." % (args.environfile, image, tag)) + + if args.upload: + uploadDocker(image,tag) + + +def strToBool(string): + return True if (type(string) is str and string == "true") else False if (type(string) is str and string == "false") else string + +def main(): + """ the main executable function """ + + # + # deal with the program arguments - + # we build two different types of argument lists based on + # context. jenkins requires positional arguments while linux cmd line + # permits parameterized ones. the jenkins positional argument list is + # smaller + # + parser = argparse.ArgumentParser( + description="Build the specified packages. 'package-type' is one or more of " + + "docker, debian, tar, tgz" + + " (comma-separated), or 'all' to build all of them." + ) + + REPACKAGEYAML = "repackage.yaml" + REPACKAGEJSON = "repackage.json" + if os.environ.get("JENKINS"): + parser.add_argument("packagetype",help= "debian" + + "|docker|tar|tgz" + + "|all") + parser.add_argument("upload",help="upload package to appropriate repository",nargs='?',default="false") + parser.add_argument("directory", type=str, help="where to find the stage directory and %s. Defaults to '.'" % REPACKAGEYAML, default=".",nargs='?') + parser.add_argument("environfile", type=str, help="Optional environment file. Overrides $REPACKAGEENVFILE, defaults to /dev/null", default="/dev/null", nargs='?') + parser.add_argument("outputdirectory", type=str, help="Output directory. Defaults to 'output' under --directory path.", default=None, nargs='?') + parser.add_argument("verbose",help="turn on verbosity",nargs='?',default="true") + parser.add_argument("skipexecution",help="indcate packages and exit ",nargs='?',default="false") + parser.add_argument("skipbuild",help="skip actually bulding the packages",nargs='?',default="false") + parser.add_argument("usecache",help="if debian/tar/tgz artifact already exists use it",nargs='?',default="false") + parser.add_argument("keeptempfiles",help="keep temp files at exit",nargs='?',default="false") + else: + parser.add_argument("-n", "--skipexecution", help="indicate the packages and exit", action="store_true") + parser.add_argument("-c", "--usecache", help="if a debian/tar/tgz artifact already exists use it", action="store_true") + parser.add_argument("-N", "--skipbuild", help="skip actually building the packages", action="store_true") + parser.add_argument("-K", "--keeptempfiles", help="keep temp files at exit", action="store_true") + parser.add_argument("-v", "--verbose", help="turn on verbosity", action="store_true") + parser.add_argument("-b", "--packagetype", type=str, help="""The package-type may be specified multiple times or may use a ','-separated + or space-separated list. 'all' is an alias for all of them. Potential values are debian, docker""" + + ", tar or tgz", required=True) + parser.add_argument("-u", "--upload", action="store_true", help="""Depending on package type -- docker, debian, tar or tgz -- uploads the artifact to a remote repository. + For Docker, uses $DOCKERREGISTRY as the remote repository to push the image. + + For Debian, uses $REPACKAGEDEBIANUPLOAD as the command, with {0} as the local path to the debian image, {1} as the image name with build number, + and optionally {2} as groupId (may be used as part of the directory path), {3} as the image name without the build number, and {4} + as the image name with no build number and no .deb suffix. + For additional uploads, this will also look for $REPACKAGEDEBIANUPLOAD2, $REPACKAGEDEBIANUPLOAD3, etc., and repeat the upload. + + For tar, uses $REPACKAGETARUPLOAD as the command. Everything said about $REPACKAGEDEBIANUPLOAD applies to $REPACKAGETARUPLOAD. + For tgz, uses $REPACKAGETGZUPLOAD as the command. Everything said about $REPACKAGEDEBIANUPLOAD applies to $REPACKAGETGZUPLOAD. + + In addition, if --multipleuploadversions is used, the above will be executed using the list of upload version numbers specified there. + + This is typically used to create multiple versions (using --multipleuploadversions) on multiple remote repositories (using $REPACKAGE*UPLOAD). + """) + # For additional uploads, repackage will also look for $DOCKERREGISTRY2, $DOCKERREGISTRY3, etc. + parser.add_argument("-d", "--directory", type=str, help="where to find the stage directory and %s. Defaults to '.'" % REPACKAGEYAML, default=".") + parser.add_argument("-e", "--environfile", type=str, help="Optional environment file. Overrides $REPACKAGEENVFILE, defaults to /dev/null", default="/dev/null") + parser.add_argument("-o", "--outputdirectory", type=str, help="Output directory. Defaults to 'output' under --directory path.", default=None) + parser.add_argument("-y", "--repackageyaml", type=str, help="Name of parameter file. Defaults to '" + REPACKAGEYAML + "' or '" + REPACKAGEJSON + "' under --directory path.", default=REPACKAGEYAML) + parser.add_argument("-B", "--buildnumber", type=str, help="Build number. Defaults to $BUILD_NUMBER, which defaults to a date-based string.", default="") + parser.add_argument("-D", "--define", type=str, action='append', help="define an argument at runtime in key=value format") + parser.add_argument("-M", "--multipleuploadversions", type=str, help="Use multiple versions for upload. Comma-separated list of {datetime}, {buildnumber} or arbitrary strings. Defaults to {buildnumber}, which is the value from --buildnumber.", default="{buildnumber}") + + global args + args = parser.parse_args() + + # for some reason, the Jenkins branch leaves these as strings instead of the proper boolean values + args.upload = strToBool(args.upload) + args.verbose = strToBool(args.verbose) + args.skipexecution = strToBool(args.skipexecution) + args.skipbuild = strToBool(args.skipbuild) + args.usecache = strToBool(args.usecache) + args.keeptempfiles = strToBool(args.keeptempfiles) + + # arguments defined at runtime as key=value pairs + global rtdef + rtdef = {} + + if args.define: + for k in args.define: + tag, val = k.split("=") + rtdef[tag] = val + + for k, v in rtdef.items(): + traceMsg("runtime defined %s <- %s" % (k,v)) + + # check the -e/$REPACKAGEENVFILE value + if args.environfile == "": + if os.environ.get("REPACKAGEENVFILE") is not None: + args.environfile = os.environ["REPACKAGEENVFILE"] + if not os.path.isfile(args.environfile) and args.environfile != "/dev/null": + die("-e / $REPACKAGEENVFILE must be a file that can be sourced by the shell") + if not args.environfile.startswith("/"): + args.environfile = os.getcwd() + "/" + args.environfile + + allPackages = [ "debian", "tar", "tgz", + "docker" ] + args.builds = { } + for pkg in allPackages: + args.builds[pkg] = False + if args.packagetype == "all": + args.packagetype = ",".join(allPackages) + for build in re.split("[, \t]", args.packagetype): + args.builds[build] = True + + args.allUploadVersions = args.multipleuploadversions.split(",") + + if args.upload and args.builds["debian"]: + if os.environ.get("REPACKAGEDEBIANUPLOAD") is None: + die("-u requires $REPACKAGEDEBIANUPLOAD to be set when building debian") + elif not re.search("[{]0[}]", os.environ.get("REPACKAGEDEBIANUPLOAD")): + die("$REPACKAGEDEBIANUPLOAD is missing {0}") + elif not re.search("[{][13][}]", os.environ.get("REPACKAGEDEBIANUPLOAD")): + die("$REPACKAGEDEBIANUPLOAD is missing either {1}, {3} or {4}") + + if args.upload and args.builds["tar"]: + if os.environ.get("REPACKAGETARUPLOAD") is None: + die("-u requires $REPACKAGETARUPLOAD to be set when building tar") + elif not re.search("[{]0[}]", os.environ.get("REPACKAGETARUPLOAD")): + die("$REPACKAGETARUPLOAD is missing {0}") + elif not re.search("[{][134][}]", os.environ.get("REPACKAGETARUPLOAD")): + die("$REPACKAGETARUPLOAD is missing either {1}, {3} or {4}") + + if args.upload and args.builds["tgz"]: + if os.environ.get("REPACKAGETGZUPLOAD") is None: + die("-u requires $REPACKAGETGZUPLOAD to be set when building tgz") + elif not re.search("[{]0[}]", os.environ.get("REPACKAGETGZUPLOAD")): + die("$REPACKAGETGZUPLOAD is missing {0}") + elif not re.search("[{][134][}]", os.environ.get("REPACKAGETGZUPLOAD")): + die("$REPACKAGETGZUPLOAD is missing either {1}, {3} or {4}") + + if args.upload and args.builds["docker"] and os.environ.get("DOCKERREGISTRY") is None: + die("-u requires $DOCKERREGISTRY to be set when building docker") + + if not os.path.isdir(args.directory): + die("The root directory %s does not exist" % args.directory) + if not args.directory.startswith("/"): + args.directory = os.getcwd() + "/" + args.directory + if args.repackageyaml != REPACKAGEYAML: + if not os.path.exists(args.directory + "/" + args.repackageyaml): + die("The file %s/%s does not exist" % (args.directory, args.repackageyaml)) + else: + if os.path.exists(args.directory + "/" + REPACKAGEYAML): + args.repackageyaml = REPACKAGEYAML + elif os.path.exists(args.directory + "/" + REPACKAGEJSON): + args.repackageyaml = REPACKAGEJSON + else: + die("Either %s/%s or %s/%s must exist" % (args.directory, args.repackageyaml, args.directory, args.repackagejson)) + + if args.outputdirectory is None: + args.outputdirectory = args.directory + "/output" + else: + if not args.outputdirectory.startswith("/"): + args.outputdirectory = os.getcwd() + "/" + args.outputdirectory + if not os.path.isdir(args.outputdirectory): + die("The specified --outputdirectory %s does not exist" % args.outputdirectory) + + # establish some global variables used everywhere + global ROOTDIR, TMPROOT + ROOTDIR = args.directory + TMPROOT = args.directory + "/tmp" + + # and cd to our ROOTDIR + cdCheck(ROOTDIR) + + # unless -K is specified, remove any temp files at the end + if not args.keeptempfiles: + atexit.register(cleanupTmpRoot) + + # grab and share some variables that are used by lots of build functions + global APPL, MAINTAINER, VER, BNBR, TIMESTAMP + APPL = getParam( '["applicationName"]' ) + MAINTAINER = getParam( '["maintainer"]' ) + VER = getParam( '["version"]' ) + TIMESTAMP = time.strftime("%Y%m%d%H%M%S") + BNBR = args.buildnumber if args.buildnumber != "" else os.environ.get("BUILD_NUMBER") if os.environ.get("BUILD_NUMBER") is not None else TIMESTAMP + + # build whatever was requested + if args.builds["docker"]: + buildDocker() + if args.builds["debian"]: + buildDebian() + if args.builds["tar"]: + buildTar(False) + if args.builds["tgz"]: + buildTar(True) + +if __name__ == "__main__": + main() diff --git a/repackage/bin/yamltojson b/repackage/bin/yamltojson new file mode 100755 index 0000000..0dc85fc --- /dev/null +++ b/repackage/bin/yamltojson @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# -*- indent-tabs-mode: nil -*- vi: set expandtab: + +""" +NAME + yamltojson - convert a yaml file to a json file + +SYNOPSIS + yamltojson file.yaml ... + +DESCRIPTION + Read in a yaml file (whose name must end with ".yaml") and create cor‐ + responding json files, whose names will end with ".json". +""" + +import sys, re, yaml +try: + import simplejson as json +except: + import json + +def die(msg): + """ generate a FATAL message to stdout and exit """ + print("%s:FATAL:%s" % (date(), msg)) + sys.exit(2) + +for fname in sys.argv[1:]: + if fname.endswith(".yaml"): + y = None + with open(fname, "r") as fd: + try: + contents = fd.read() + contents = re.sub("^\t+", " ", contents, flags=re.M) + y = yaml.safe_load(contents) + except: + die("Invalid yaml in '%s'" % fname) + jsonfname = fname[:-5] + ".json" + with open(jsonfname, "w") as fd: + json.dump(y, fd, indent=4, sort_keys=True) diff --git a/repackage/man/repackage.1 b/repackage/man/repackage.1 new file mode 100644 index 0000000..aa5f411 --- /dev/null +++ b/repackage/man/repackage.1 @@ -0,0 +1,123 @@ +repackage(1) OpenECOMP repackage(1) + + + +NNAAMMEE + repackage - package an set of files into debian, or docker packages + +SSYYNNOOPPSSIISS + repackage [-h] [-n] [-c] [-N] [-K] [-v] -b PACKAGETYPE [-u] [-d DIREC‐ + TORY] [-e ENVIRONFILE] [-o OUTPUTDIRECTORY] + +DDEESSCCRRIIPPTTIIOONN + Build the specified packages. 'package-type' is one or more of docker, + debian, (comma-separated), or 'all' to build all of them. + +OOPPTTIIOONNSS + -h, --help show a help message and exit + + -n, --skipexecution indicate the packages and exit + + -c, --usecache if a debian/tar/tgz artifact already exists use it + + -N, --skipbuild skip actually building the packages + + -K, --keeptempfiles keep temp files at exit + + -v, --verbose turn on verbosity + + -b PACKAGETYPE, --packagetype PACKAGETYPE + The package-type may be specified multiple times or + may use a ','-separated or space-separated list. + 'all' is an alias for all of them. Potential val‐ + ues are debian, docker, tar or tgz + + -u, --upload Depending on package type -- docker, debian, tar or + tgz -- uploads the artifact to a remote repository. + For Docker, uses $DOCKERREGISTRY as the remote + repository to push the image. For Debian, uses + $REPACKAGEDEBIANUPLOAD as the command, with {0} as + the local path to the debian image, {1} as the + image name with build number, and optionally {2} as + groupId (may be used as part of the directory + path), {3} as the image name without the build num‐ + ber, and {4} as the image name with no build number + and no .deb suffix. For additional uploads, this + will also look for $REPACKAGEDEBIANUPLOAD2, + $REPACKAGEDEBIANUPLOAD3, etc., and repeat the + upload. For tar, uses $REPACKAGETARUPLOAD as the + command. Everything said about $REPACKAGEDEBIANU‐ + PLOAD applies to $REPACKAGETARUPLOAD. For tgz, uses + $REPACKAGETGZUPLOAD as the command. Everything said + about $REPACKAGEDEBIANUPLOAD applies to $REPACK‐ + AGETGZUPLOAD. In addition, if --multipleuploadver‐ + sions is used, the above will be executed using the + list of upload version numbers specified there. + This is typically used to create multiple versions + (using --multipleuploadversions) on multiple remote + repositories (using $REPACKAGE*UPLOAD). + + -d DIRECTORY, --directory DIRECTORY + where to find the stage directory and repack‐ + age.yaml. Defaults to '.' + + -e ENVIRONFILE, --environfile ENVIRONFILE + Optional environment file. Overrides $REPACKAGEEN‐ + VFILE, defaults to /dev/null + + -o OUTPUTDIRECTORY, --outputdirectory OUTPUTDIRECTORY + Output directory. Defaults to 'output' under + --directory path. + + -y FILE, --repackageyaml FILE + Name of parameter file. Defaults to 'repack‐ + age.yaml' or 'repackage.json' under --directory + path. + + -B buildnumber, --buildnumber BUILD-NUMBER + Build number. Defaults to $BUILD_NUMBER, which + defaults to a date-based string. + + -M MULTIPLEUPLOADVERSIONS, --multipleuploadversions MULTIPLEUPLOADVER‐ + SIONS + Use multiple versions for upload. Comma-separated + list of {datetime}, {buildnumber} or arbitrary + strings. Defaults to {buildnumber}, which is the + value from --buildnumber. + +DDIIRREECCTTOORRYY SSTTRRUUCCTTUURREE + Repackage requires as its input four items: + + stage/* A directory structure filled with files laid out + exactly as they are to appear on the end system, as + if "stage" were the root of the filesystem. For + DCAE applications, it is recommended that you use + the path opt/app/PACKAGENAME for all files associ‐ + ated with PACKAGENAME. + + repackage.yaml or repackage.json + A configuration file with information about the + packages, such as the package's name, version + information, dependencies, etc. More information + will be provided below. + + Dockerfile This file is only used for creating Docker images. + It contains the docker creation script. + + common/* If needed, any package installation or package + removal scripts would be placed here. They must be + named preinst, postinst, prerm or postrm. + + If the installation scripts need some temporary files, it is recom‐ + mended that you place them into stage/opt/app/PACKAGENAME/pkg. Your + installation script can then remove that directory if necessary after + successful installation. + +FFIILLEESS + /opt/app/repackage/bin/repackage + + /opt/app/repackage/man/repackage.1 + + + +OpenECOMP 2017-09-13 repackage(1) diff --git a/repackage/man/yamltojson.1 b/repackage/man/yamltojson.1 new file mode 100644 index 0000000..55a2c4d --- /dev/null +++ b/repackage/man/yamltojson.1 @@ -0,0 +1,22 @@ +yamltojson(1) OpenECOMP yamltojson(1) + + + +NNAAMMEE + yamltojson - convert a yaml file to a json file + +SSYYNNOOPPSSIISS + yamltojson file.yaml ... + +DDEESSCCRRIIPPTTIIOONN + Read in a yaml file (whose name must end with ".yaml") and create cor‐ + responding json files, whose names will end with ".json". + +FFIILLEESS + /opt/app/repackage/bin/yamltojson + + /opt/app/repackage/man/yamltojson.1 + + + +OpenECOMP 2017-01-26 yamltojson(1) |