diff options
-rw-r--r-- | Dockerfile_UT | 2 | ||||
-rw-r--r-- | LICENSE.txt | 6 | ||||
-rw-r--r-- | deployment-handler-API.yaml | 93 | ||||
-rw-r--r-- | deployment-handler.js | 29 | ||||
-rw-r--r-- | lib/auth.js | 2 | ||||
-rw-r--r-- | lib/cloudify.js | 137 | ||||
-rw-r--r-- | lib/config.js | 6 | ||||
-rw-r--r-- | lib/consul.js | 8 | ||||
-rw-r--r-- | lib/dcae-deployments.js | 109 | ||||
-rw-r--r-- | lib/deploy.js | 144 | ||||
-rw-r--r-- | lib/info.js | 40 | ||||
-rw-r--r-- | lib/inventory.js | 24 | ||||
-rw-r--r-- | lib/logging.js | 8 | ||||
-rw-r--r-- | lib/middleware.js | 29 | ||||
-rw-r--r-- | lib/policy.js | 545 | ||||
-rw-r--r-- | lib/promise_request.js | 22 | ||||
-rwxr-xr-x | mvn-phase-lib.sh | 4 | ||||
-rw-r--r-- | package.json | 4 | ||||
-rw-r--r-- | pom.xml | 8 | ||||
-rwxr-xr-x | set_version.sh | 17 | ||||
-rw-r--r-- | tests/mock_deployment_handler.js | 103 | ||||
-rw-r--r-- | tests/mock_utils.js | 36 | ||||
-rw-r--r-- | tests/test_dcae-deployments.js | 633 | ||||
-rw-r--r-- | tests/test_info.js | 59 | ||||
-rw-r--r-- | tests/test_policy.js | 589 | ||||
-rw-r--r-- | tests/test_zzz_run.js | 21 | ||||
-rw-r--r-- | version.js | 20 | ||||
-rw-r--r-- | version.properties | 4 |
28 files changed, 2085 insertions, 617 deletions
diff --git a/Dockerfile_UT b/Dockerfile_UT index 265e1bd..df4ddac 100644 --- a/Dockerfile_UT +++ b/Dockerfile_UT @@ -1,5 +1,5 @@ FROM node:6.10.3 -MAINTAINER maintainer + ENV INSROOT /opt/app ENV APPUSER dh ENV APPDIR ${INSROOT}/${APPUSER} diff --git a/LICENSE.txt b/LICENSE.txt index 69d5fc1..14cb17c 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,11 +1,11 @@ /* * ============LICENSE_START========================================== * =================================================================== -* Copyright © 2017 AT&T Intellectual Property. All rights reserved. +* Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. * =================================================================== * * Unless otherwise specified, all software contained herein is licensed -* under the Apache License, Version 2.0 (the “License”); +* under the Apache License, Version 2.0 (the "License"); * you may not use this software except in compliance with the License. * You may obtain a copy of the License at * @@ -20,7 +20,7 @@ * * * Unless otherwise specified, all documentation contained herein is licensed -* under the Creative Commons License, Attribution 4.0 Intl. (the “License”); +* under the Creative Commons License, Attribution 4.0 Intl. (the "License"); * you may not use this documentation except in compliance with the License. * You may obtain a copy of the License at * diff --git a/deployment-handler-API.yaml b/deployment-handler-API.yaml index 31395a5..b85a554 100644 --- a/deployment-handler-API.yaml +++ b/deployment-handler-API.yaml @@ -1,9 +1,26 @@ +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. --- swagger: '2.0' info: - version: "4.1.0" + version: "4.3.0" title: "deployment-handler API" license: name: "Apache 2.0" @@ -120,6 +137,12 @@ paths: in: path type: string required: true + - name: cfy_tenant_name + description: | + Tenant Name in Cloudify. Optional, if not specified, "default_tenant" will be used. + in: path + type: string + required: false - name: body in: body @@ -241,6 +264,12 @@ paths: in: path type: string required: true + - name: cfy_tenant_name + description: | + Tenant Name in Cloudify. Optional, if not specified, "default_tenant" will be used. + in: path + type: string + required: false responses: @@ -287,6 +316,12 @@ paths: - application/json parameters: + - name: cfy_tenant_name + description: | + Tenant Name in Cloudify. Optional, if not specified, "default_tenant" will be used. + in: path + type: string + required: false - name: body in: body schema: @@ -305,6 +340,14 @@ paths: produces: - application/json + parameters: + - name: cfy_tenant_name + description: | + Tenant Name in Cloudify. Optional, if not specified, "default_tenant" will be used. + in: path + type: string + required: false + responses: 200: description: deployment-handler found components with or without policies in cloudify @@ -469,11 +512,57 @@ definitions: description: request to update policies on DCAE components. type: object required: + - catch_up - latest_policies + - removed_policies properties: + catch_up: + description: flag to indicate whether the request contains all the policies in PDP or not + type: boolean + default: false + latest_policies: - description: "dictionary of (policy_id -> Policy object). In example: replace additionalProp1,2,3 with policy_id1,2,3 values" + description: | + dictionary of (policy_id -> DCAEPolicy object). + In example: replace additionalProp1,2,3 with policy_id1,2,3 values type: object + default: {} additionalProperties: $ref: "#/definitions/DCAEPolicy" + + removed_policies: + description: | + whether policy was removed from policy-engine. + dictionary of (policy_id -> true). + In example: replace additionalProp1,2,3 with policy_id1,2,3 values + type: object + default: {} + additionalProperties: + type: boolean + + errored_policies: + description: | + whether policy-engine returned an error on the policy. + dictionary of (policy_id -> true). + In example: replace additionalProp1,2,3 with policy_id1,2,3 values + type: object + default: {} + additionalProperties: + type: boolean + + errored_scopes: + description: > + on cartchup - list of policy scope_prefix values on wchich + the policy-engine experienced an error other than not-found data. + type: array + items: + type: string + + scope_prefixes: + description: > + on catchup - list of all scope_prefixes used by the policy-handler + to retrieve the policies from policy-engine. + type: array + items: + type: string diff --git a/deployment-handler.js b/deployment-handler.js index 1d59733..2ae1391 100644 --- a/deployment-handler.js +++ b/deployment-handler.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -18,7 +18,7 @@ See the License for the specific language governing permissions and limitations "use strict"; -const API_VERSION = "4.1.0"; +const API_VERSION = "4.3.0"; const fs = require('fs'); const util = require('util'); @@ -75,9 +75,10 @@ const start = function(config) { "policy": POLICY_PATH, "swagger-ui": SWAGGER_UI_PATH }; - exports.config = config; + process.mainModule.exports.config = config; - log.debug(null, "Configuration: " + JSON.stringify(config)); + log.info(null, "Configuration: " + JSON.stringify(config)); + console.log( (new Date()) + ": Configuration: " + JSON.stringify(config, undefined, 2) ); set_app(); @@ -103,7 +104,7 @@ const start = function(config) { } catch (e) { throw (createError('Could not create http(s) server--exiting: ' - + e.message, 500, 'system', 551)); + + (e.message || "") + " " + (e.stack || "").replace(/\n/g, " "), 500, 'system', 551)); } server.setTimeout(0); @@ -119,9 +120,11 @@ const start = function(config) { /* Set up handling for terminate signal */ process.on('SIGTERM', function() { var startTime = new Date(); - log.metrics(null, {startTime: startTime, complete: true}, "Deployment Handler API shutting down.") + log.metrics(null, {startTime: startTime, complete: true}, "Deployment Handler API shutting down."); + console.log( "startTime: " + startTime + ": Deployment Handler API shutting down." ) server.close(function() { - log.metrics(null, {startTime: startTime, complete: true}, "Deployment Handler API server shut down.") + log.metrics(null, {startTime: startTime, complete: true}, "Deployment Handler API server shut down."); + console.log( "startTime: " + startTime + ": Deployment Handler API shutting down" ) }); }); @@ -134,7 +137,8 @@ const start = function(config) { process.on('beforeExit', function() { if (!loggedExit) { loggedExit = true; - log.metrics(null, {startTime: startTime, complete: true}, "Deployment Handler process exiting.") + log.metrics(null, {startTime: startTime, complete: true}, "Deployment Handler process exiting."); + console.log( "startTime: " + startTime + ": Deployment Handler process exiting." ) } }); }; @@ -147,11 +151,10 @@ const log = logging.getLogger(); conf.configure() .then(start) .catch(function(e) { - log.error(e.logCode ? e : createError( - 'Deployment-handler exiting due to start-up problem: ' + e.message, 500, - 'system', 552)); - console.error("Deployment-handler exiting due to startup problem: " + e.message); + const fatal_msg = 'Deployment-handler exiting due to start-up problem: ' + (e.message || "") + + " " + (e.stack || "").replace(/\n/g, " "); + log.error(e.logCode ? e : createError(fatal_msg, 500, 'system', 552)); + console.error(fatal_msg); }); module.exports.app = app; -module.exports.set_app = set_app; diff --git a/lib/auth.js b/lib/auth.js index 9ddd7b3..901f318 100644 --- a/lib/auth.js +++ b/lib/auth.js @@ -62,4 +62,4 @@ exports.checkAuth = function(req, res, next) { else { next(); // Nothing to do, no authentication required } -};
\ No newline at end of file +}; diff --git a/lib/cloudify.js b/lib/cloudify.js index 23e779a..2db460a 100644 --- a/lib/cloudify.js +++ b/lib/cloudify.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -22,7 +22,7 @@ const CLOUDIFY = "cloudify-manager"; const FINISHED = [ "terminated", "cancelled", "failed" ]; const RETRY_INTERVAL = 5000; // Every 5 seconds const MAX_TRIES = 720; // Up to 1 hour - +const DEFAULT_TENANT = "default_tenant"; const doRequest = require('./promise_request').doRequest; const repeat = require('./repeat'); const admzip = require('adm-zip'); @@ -64,7 +64,8 @@ ExeQueue.prototype.nextExecution = function(deployment_id) { } return depl.exe_queue[0]; }; -var exeQueue = new ExeQueue(); +const exeQueue = new ExeQueue(); +exports.exeQueue = exeQueue; // Delay function--returns a promise that's resolved after 'dtime' // milliseconds.` @@ -75,32 +76,32 @@ var delay = function(dtime) { }; // Get current status of a workflow execution -const getExecutionStatus = function(execution_id, mainReq) { - /* Defense: Some callers do not supply mainReq */ - mainReq = mainReq || {}; +const getExecutionStatus = function(req, execution_id) { var reqOptions = { method : "GET", uri : cfyAPI + "/executions/" + execution_id }; - addAuthToOptions(reqOptions); - return doRequest(reqOptions, null, CLOUDIFY, mainReq); + + addAuthToOptions(reqOptions, req); + + return doRequest(req, reqOptions, null, CLOUDIFY); }; // Poll for the result of a workflow execution until it's done -var getWorkflowResult = function(execution_id, mainReq) { +const getWorkflowResult = function(mainReq, execution_id) { /* Defense: Some callers do not supply mainReq */ mainReq = mainReq || {}; - logger.debug(mainReq.dcaeReqId, "Getting workflow result for execution id: " + execution_id); + logger.info(mainReq.dcaeReqId, "Getting workflow result for execution id: " + execution_id); // Function for testing if workflow is finished // Expects the result of getExecStatus var checkStatus = function(res) { - logger.debug(mainReq.dcaeReqId, "Checking result: " + JSON.stringify(res) + " ==> " + (res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0)); + logger.info(mainReq.dcaeReqId, "Checking result: " + JSON.stringify(res) + " ==> " + (res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0)); return res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0; }; // Create execution status checker function - var getExecStatus = function() {return getExecutionStatus(execution_id, mainReq);}; + var getExecStatus = function() {return getExecutionStatus(mainReq, execution_id);}; return repeat.repeatWhile(getExecStatus, checkStatus, MAX_TRIES, RETRY_INTERVAL) .then( @@ -108,7 +109,7 @@ var getWorkflowResult = function(execution_id, mainReq) { /* Handle fulfilled promise from repeatWhile */ function(res) { - logger.debug(mainReq.dcaeReqId, 'workflow result: ' + JSON.stringify(res)); + logger.info(mainReq.dcaeReqId, 'workflow result: ' + JSON.stringify(res)); /* Successful completion */ if (res.json && res.json.status && res.json.status === 'terminated') { @@ -173,7 +174,9 @@ const startWorkflowExecution = function(mainReq, deployment_id, workflow_id, par "Accept" : "*/*" } }; - addAuthToOptions(reqOptions); + + addAuthToOptions(reqOptions, mainReq); + var body = { "deployment_id" : deployment_id, "workflow_id" : workflow_id @@ -181,18 +184,18 @@ const startWorkflowExecution = function(mainReq, deployment_id, workflow_id, par if (parameters) {body.parameters = parameters;} // Make the POST request - return doRequest(reqOptions, JSON.stringify(body), CLOUDIFY, mainReq); + return doRequest(mainReq, reqOptions, JSON.stringify(body), CLOUDIFY); }; //Initiate a workflow execution against a deployment -const initiateWorkflowExecution = function(deployment_id, workflow_id, parameters) { - return startWorkflowExecution(null, deployment_id, workflow_id, parameters) +const initiateWorkflowExecution = function(req, deployment_id, workflow_id, parameters) { + return startWorkflowExecution(req, deployment_id, workflow_id, parameters) .then(function(result) { - logger.debug(null, "Result from POSTing workflow execution start: " + JSON.stringify(result)); + logger.info(req.dcaeReqId, "Result from POSTing workflow execution start: " + JSON.stringify(result)); if (result.json && result.json.id) { return {deploymentId: deployment_id, workflowType: workflow_id, executionId: result.json.id}; } - logger.debug(null,"Did not get expected JSON body from POST to start workflow"); + logger.info(req.dcaeReqId,"Did not get expected JSON body from POST to start workflow"); var err = new Error("POST to start workflow got success response but no body"); err.status = err.code = 502; throw err; @@ -200,31 +203,32 @@ const initiateWorkflowExecution = function(deployment_id, workflow_id, parameter }; // Uploads a blueprint via the Cloudify API -exports.uploadBlueprint = function(bpid, blueprint) { +exports.uploadBlueprint = function(req, bpid, blueprint) { + logger.info(req.dcaeReqId, "uploadBlueprint " + bpid); // Cloudify API wants a gzipped tar of a directory, not the blueprint text - var zip = new admzip(); + const zip = new admzip(); zip.addFile('work/', new Buffer(0)); zip.addFile('work/blueprint.yaml', new Buffer(blueprint, 'utf8')); - var src = (zip.toBuffer()); + const zip_buffer = zip.toBuffer(); // Set up the HTTP PUT request - var reqOptions = { - method : "PUT", - uri : cfyAPI + "/blueprints/" + bpid, - headers : { - "Content-Type" : "application/octet-stream", - "Accept" : "*/*" - } + const reqOptions = { + method : "PUT", + uri : cfyAPI + "/blueprints/" + bpid, + headers : { + "Content-Type" : "application/octet-stream", + "Accept" : "*/*" + } }; - addAuthToOptions(reqOptions); + addAuthToOptions(reqOptions, req); // Initiate PUT request and return the promise for a result - return doRequest(reqOptions, src, CLOUDIFY); + return doRequest(req, reqOptions, zip_buffer, CLOUDIFY); }; // Creates a deployment from a blueprint -exports.createDeployment = function(dpid, bpid, inputs) { +exports.createDeployment = function(req, dpid, bpid, inputs) { // Set up the HTTP PUT request var reqOptions = { @@ -235,7 +239,7 @@ exports.createDeployment = function(dpid, bpid, inputs) { "Accept" : "*/*" } }; - addAuthToOptions(reqOptions); + addAuthToOptions(reqOptions, req); var body = { blueprint_id : bpid @@ -245,7 +249,7 @@ exports.createDeployment = function(dpid, bpid, inputs) { } // Make the PUT request to create the deployment - return doRequest(reqOptions, JSON.stringify(body), CLOUDIFY); + return doRequest(req, reqOptions, JSON.stringify(body), CLOUDIFY); }; // Initiate a workflow execution against a deployment @@ -258,19 +262,19 @@ exports.getWorkflowExecutionStatus = getExecutionStatus; exports.getWorkflowResult = getWorkflowResult; // Executes a workflow against a deployment and returns a promise for final result -exports.executeWorkflow = function(deployment_id, workflow_id, parameters) { - return initiateWorkflowExecution(deployment_id, workflow_id, parameters) +exports.executeWorkflow = function(req, deployment_id, workflow_id, parameters) { + return initiateWorkflowExecution(req, deployment_id, workflow_id, parameters) // Wait for the result .then (function(result) { - logger.debug(null, "Result from initiating workflow: " + JSON.stringify(result)); - return getWorkflowResult(result.executionId); + logger.info(req.dcaeReqId, "Result from initiating workflow: " + JSON.stringify(result)); + return getWorkflowResult(req, result.executionId); }); }; // Retrieves outputs for a deployment -exports.getOutputs = function(dpid) { +exports.getOutputs = function(req, dpid) { var reqOptions = { method : "GET", uri : cfyAPI + "/deployments/" + dpid + "/outputs", @@ -278,13 +282,14 @@ exports.getOutputs = function(dpid) { "Accept" : "*/*" } }; - addAuthToOptions(reqOptions); - return doRequest(reqOptions, null, CLOUDIFY); + addAuthToOptions(reqOptions, req); + + return doRequest(req, reqOptions, null, CLOUDIFY); }; // Get the output descriptions for a deployment -exports.getOutputDescriptions = function(dpid) { +exports.getOutputDescriptions = function(req, dpid) { var reqOptions = { method : "GET", uri : cfyAPI + "/deployments/" + dpid + "?include=outputs", @@ -292,31 +297,34 @@ exports.getOutputDescriptions = function(dpid) { "Accept" : "*/*" } }; - addAuthToOptions(reqOptions); - return doRequest(reqOptions, null, CLOUDIFY); + addAuthToOptions(reqOptions, req); + + return doRequest(req, reqOptions, null, CLOUDIFY); }; // Deletes a deployment -exports.deleteDeployment = function(dpid) { +exports.deleteDeployment = function(req, dpid) { var reqOptions = { method : "DELETE", uri : cfyAPI + "/deployments/" + dpid }; - addAuthToOptions(reqOptions); - return doRequest(reqOptions, null, CLOUDIFY); + addAuthToOptions(reqOptions, req); + + return doRequest(req, reqOptions, null, CLOUDIFY); }; // Deletes a blueprint -exports.deleteBlueprint = function(bpid) { +exports.deleteBlueprint = function(req, bpid) { var reqOptions = { method : "DELETE", uri : cfyAPI + "/blueprints/" + bpid }; - addAuthToOptions(reqOptions); - return doRequest(reqOptions, null, CLOUDIFY); + addAuthToOptions(reqOptions, req); + + return doRequest(req, reqOptions, null, CLOUDIFY); }; // Allow client to set the Cloudify API root address @@ -329,10 +337,16 @@ exports.setCredentials = function(user, password) { cfyAuth = cfyAuth || (user + ':' + password); }; -function addAuthToOptions(reqOptions) { +function addAuthToOptions(reqOptions, req) { + if (!!cfyAuth && cfyAuth !== "undefined:undefined") { reqOptions.auth = cfyAuth; } + reqOptions.headers = reqOptions.headers || {}; + reqOptions.headers.Tenant = req.query.cfy_tenant_name || DEFAULT_TENANT; + + logger.debug(req.dcaeReqId, "Calling " + reqOptions.uri + " with Tenant: " + reqOptions.headers.Tenant ); + } // Set a logger @@ -346,12 +360,13 @@ exports.getNodeInstances = function (mainReq, on_next_node_instances, offset) { method : "GET", uri : cfyAPI + "/node-instances?_include=id,deployment_id,runtime_properties&_offset=" + offset }; - addAuthToOptions(reqOptions); - logger.debug(mainReq.dcaeReqId, "getNodeInstances: " + JSON.stringify(reqOptions)); - return doRequest(reqOptions, null, CLOUDIFY, mainReq) + addAuthToOptions(reqOptions, mainReq); + + logger.info(mainReq.dcaeReqId, "getNodeInstances: " + JSON.stringify(reqOptions)); + return doRequest(mainReq, reqOptions, null, CLOUDIFY) .then(function(cloudify_response) { - logger.debug(mainReq.dcaeReqId, "getNodeInstances response: " + JSON.stringify(cloudify_response)); + logger.info(mainReq.dcaeReqId, "getNodeInstances response: " + JSON.stringify(cloudify_response)); var response = {}; cloudify_response = cloudify_response && cloudify_response.json; if (!cloudify_response || !Array.isArray(cloudify_response.items)) { @@ -364,7 +379,7 @@ exports.getNodeInstances = function (mainReq, on_next_node_instances, offset) { response.message = 'got no more node_instances'; return response; } - logger.debug(mainReq.dcaeReqId, 'getNodeInstances got node_instances ' + cloudify_response.items.length); + logger.info(mainReq.dcaeReqId, 'getNodeInstances got node_instances ' + cloudify_response.items.length); if (typeof on_next_node_instances === 'function') { on_next_node_instances(cloudify_response.items); } @@ -396,20 +411,20 @@ const runQueuedExecution = function(mainReq, deployment_id, workflow_id, paramet + " with params(" + JSON.stringify(parameters || {}) + ")"; startWorkflowExecution(mainReq, deployment_id, workflow_id, parameters) .then(function(result) { - logger.debug(mainReq.dcaeReqId, "result of start the execution for" + exe_deployment_str + ": " + JSON.stringify(result)); + logger.info(mainReq.dcaeReqId, "result of start the execution for" + exe_deployment_str + ": " + JSON.stringify(result)); execution_id = result.json && result.json.id; if (!execution_id) { throw createError("failed to start execution - no execution_id for" + exe_deployment_str, 553, "api", 553, CLOUDIFY); } exeQueue.setExecutionId(deployment_id, execution_id); - return getWorkflowResult(execution_id, mainReq); + return getWorkflowResult(mainReq, execution_id); }) .then(function(result) { - logger.debug(mainReq.dcaeReqId, 'successfully finished execution: ' + execution_id + " for" + exe_deployment_str); + logger.info(mainReq.dcaeReqId, 'successfully finished execution: ' + execution_id + " for" + exe_deployment_str); var nextExecution = exeQueue.nextExecution(deployment_id); if (nextExecution) { - logger.debug(nextExecution.mainReq.dcaeReqId, "next execution for deployment_id " + deployment_id + logger.info(nextExecution.mainReq.dcaeReqId, "next execution for deployment_id " + deployment_id + " to " + nextExecution.workflow_id + " with params(" + JSON.stringify(nextExecution.parameters || {}) + ")"); runQueuedExecution(nextExecution.mainReq, deployment_id, nextExecution.workflow_id, nextExecution.parameters); @@ -455,7 +470,7 @@ exports.executeOperation = function (mainReq, deployment_id, operation, operatio if (exeQueue.isDeploymentBusy(deployment_id)) { exeQueue.queueUpExecution(mainReq, deployment_id, workflow_id, parameters); - logger.debug(mainReq.dcaeReqId, "deployment busy - queue up execution for deployment_id " + deployment_id + logger.info(mainReq.dcaeReqId, "deployment busy - queue up execution for deployment_id " + deployment_id + " to " + workflow_id + " with params(" + JSON.stringify(parameters || {}) + ")"); return; } diff --git a/lib/config.js b/lib/config.js index e44e9b5..fd7d38c 100644 --- a/lib/config.js +++ b/lib/config.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -148,7 +148,8 @@ const getTLSCredentials = function() { } exports.configure = function() { - var config = {}; + const config = {}; + config.server_instance_uuid = utils.generateId(); /* Get configuration from configuration store */ return getFileContents(PACKAGE_JSON_FILE) @@ -213,6 +214,7 @@ exports.configure = function() { throw new Error ("Required configuration elements missing: " + missing.join(',')); config = null; } + console.log( (new Date()) + ": config -> " + JSON.stringify(config, undefined, 2)); return config; }); }; diff --git a/lib/consul.js b/lib/consul.js index 3a3257b..40de84b 100644 --- a/lib/consul.js +++ b/lib/consul.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -29,7 +29,7 @@ module.exports = { * If there is no such key, resolve to null. */ getKey: function(key) { - return doRequest({method: 'GET', uri: CONSUL_URL + KEY + key + '?raw'}, null, CONSUL) + return doRequest(null, {method: 'GET', uri: CONSUL_URL + KEY + key + '?raw'}, null, CONSUL) .then(function(res) { return res.json || res.body; }) @@ -51,7 +51,7 @@ module.exports = { * If the service is not found, returns a zero-length array. */ getService: function(serviceId) { - return doRequest({method: 'GET', uri: CONSUL_URL + SERVICE + serviceId}, null, CONSUL) + return doRequest(null, {method: 'GET', uri: CONSUL_URL + SERVICE + serviceId}, null, CONSUL) .then(function(res){ return res.json.map(function(r) { /* Address for external service is in r.Address with r.ServiceAddress empty */ @@ -59,4 +59,4 @@ module.exports = { }); }); } -};
\ No newline at end of file +}; diff --git a/lib/dcae-deployments.js b/lib/dcae-deployments.js index 38dc3c4..193f6b9 100644 --- a/lib/dcae-deployments.js +++ b/lib/dcae-deployments.js @@ -1,16 +1,16 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. +CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @@ -36,11 +36,18 @@ const inventory = inv({url: config.inventory.url}); /* Set up middleware stack for initial processing of request */ app.use(middleware.checkType('application/json')); // Validate type app.use(bodyParser.json({strict: true})); // Parse body as JSON +app.use(function(req, res, next) { + log.info(req.dcaeReqId, + "new req: " + req.method + " " + req.originalUrl + + " from: " + req.ip + " body: " + JSON.stringify(req.body) + ); + next(); +}); /* Return a promise for a blueprint for the given service type ID */ -const getBlueprint = function(serviceTypeId) { - return inventory.getBlueprintByType(serviceTypeId) +const getBlueprint = function(req, serviceTypeId) { + return inventory.getBlueprintByType(req, serviceTypeId) .then(function (blueprintInfo) { if (!blueprintInfo.blueprint) { var e = new Error("No service type with ID " + serviceTypeId); @@ -48,7 +55,7 @@ const getBlueprint = function(serviceTypeId) { throw e; } return blueprintInfo; - }) + }) }; /* Generate self and status links object for responses */ @@ -57,7 +64,7 @@ const createLinks = function(req, deploymentId, executionId) { return { self: baseURL, status: baseURL + '/operation/' + executionId - }; + }; }; /* Generate a success response body for PUT and DELETE operations */ @@ -71,13 +78,11 @@ const createResponse = function(req, result) { /* Look up running (or in process of deploying) instances of the given service type */ app.get('/', function (req, res, next) { var services = [] - - - var searchTerm = {}; + var searchTerm; req.query['serviceTypeId'] && (searchTerm = {typeId: req.query['serviceTypeId']}); - - inventory.getServicesByType(searchTerm) + + inventory.getServicesByType(req, searchTerm) .then(function (result) { var deployments = result.map(function(service){ return { @@ -92,123 +97,123 @@ app.get('/', function (req, res, next) { /* Accept an incoming deployment request */ app.put('/:deploymentId', function(req, res, next) { - - log.debug(req.dcaeReqId, "body: " + JSON.stringify(req.body)); - + /* Make sure there's a serviceTypeId in the body */ if (!req.body['serviceTypeId']) { var e = new Error ('Missing required parameter serviceTypeId'); e.status = 400; throw e; } - + /* Make sure the deploymentId doesn't already exist */ - inventory.verifyUniqueDeploymentId(req.params['deploymentId']) + inventory.verifyUniqueDeploymentId(req, req.params['deploymentId']) /* Get the blueprint for this service type */ .then(function(res) { - return getBlueprint(req.body['serviceTypeId']); + return getBlueprint(req, req.body['serviceTypeId']); }) - - /* Add this new service instance to inventory - * Easier to remove from inventory if deployment fails than vice versa + + /* Add this new service instance to inventory + * Easier to remove from inventory if deployment fails than vice versa * Also lets client check for deployed/deploying instances if client wants to limit number of instances */ .then(function (blueprintInfo) { req.dcaeBlueprint = blueprintInfo.blueprint; - return inventory.addService(req.params['deploymentId'], blueprintInfo.typeId, "dummyVnfId", "dummyVnfType", "dummyLocation"); + return inventory.addService(req, req.params['deploymentId'], blueprintInfo.typeId, "dummyVnfId", "dummyVnfType", "dummyLocation"); }) - + /* Upload blueprint, create deployment and start install workflow (but don't wait for completion */ .then (function() { req.dcaeAddedToInventory = true; - return deploy.launchBlueprint(req.params['deploymentId'], req.dcaeBlueprint, req.body['inputs']); + return deploy.launchBlueprint(req, req.params['deploymentId'], req.dcaeBlueprint, req.body['inputs']); }) - + /* Send the HTTP response indicating workflow has started */ .then(function(result) { res.status(202).json(createResponse(req, result)); log.audit(req, 202, "Execution ID: " + result.executionId); return result; }) - + /* Finish deployment--wait for the install workflow to complete, retrieve and annotate outputs */ .then(function(result) { - return deploy.finishInstallation(result.deploymentId, result.executionId); + return deploy.finishInstallation(req, result.deploymentId, result.executionId); }) - + /* Log completion in audit log */ .then (function(result) { log.audit(req, 200, "Deployed id: " + req.params['deploymentId']); }) - + /* All errors show up here */ - .catch(function(error) { - + .catch(function(error) { + /* If we haven't already sent a response, let the error handler send response and log the error */ if (!res.headersSent) { - + /* If we made an inventory entry, remove it */ if (req.dcaeAddedToInventory) { - inventory.deleteService(req.params['deploymentId']) + inventory.deleteService(req, req.params['deploymentId']) .catch(function(error) { log.error(error, req); }); } - + next(error); } else { /* Already sent the response, so just log error */ /* Don't remove from inventory, because there is a deployment on CM that might need to be removed */ error.message = "Error deploying deploymentId " + req.params['deploymentId'] + ": " + error.message + + " " + (error.stack || "").replace(/\n/g, " "); log.error(error, req); log.audit(req, 500, error.message); - } + } }); }); /* Delete a running service instance */ app.delete('/:deploymentId', function(req, res, next) { - + /* Launch the uninstall workflow */ - deploy.launchUninstall(req.params['deploymentId']) - + deploy.launchUninstall(req, req.params['deploymentId']) + /* Delete the service from inventory */ .then(function(result) { - return inventory.deleteService(req.params['deploymentId']) + return inventory.deleteService(req, req.params['deploymentId']) .then (function() { return result; }); }) - + /* Send the HTTP response indicating workflow has started */ .then(function(result) { res.status(202).send(createResponse(req, result)); log.audit(req, 202, "ExecutionId: " + result.executionId); return result; }) - + /* Finish the delete processing--wait for the uninstall to complete, delete deployment, delete blueprint */ .then(function(result) { - return deploy.finishUninstall(result.deploymentId, result.executionId); + return deploy.finishUninstall(req, result.deploymentId, result.executionId); }) - + /* Log completion in audit log */ .then(function(result) { - log.audit(req, 200, "Undeployed id: " + req.params['deploymentId']); + log.audit(req, 200, "Undeployed id: " + req.params['deploymentId']); }) - + /* All errors show up here */ .catch(function(error) { /* If we haven't already sent a response, give it to the error handler to send response */ - if (!res.headersSent) { + if (!res.headersSent) { next(error); } else { /* Error happened after we sent the response--log it */ error.message = "Error undeploying deploymentId " + req.params['deploymentId'] + ": " + error.message + + " " + (error.stack || "").replace(/\n/g, " "); log.error(error, req); log.audit(req, 500, error.message); } @@ -217,8 +222,8 @@ app.delete('/:deploymentId', function(req, res, next) { /* Get the status of a workflow execution */ app.get('/:deploymentId/operation/:executionId', function(req, res, next){ - deploy.getExecutionStatus(req.params['executionId']) - + deploy.getExecutionStatus(req, req.params['executionId']) + /* Send success response */ .then(function(result) { result.requestId = req.dcaeReqId; @@ -226,9 +231,9 @@ app.get('/:deploymentId/operation/:executionId', function(req, res, next){ res.status(200).json(result); log.audit(req, 200, "Workflow type: " + result.operationType + " -- execution status: " + result.status); }) - + .catch(next); /* Let the error handler send the response and log the error */ - + }); -module.exports = app;
\ No newline at end of file +module.exports = app; diff --git a/lib/deploy.js b/lib/deploy.js index 7f83620..ee31fd3 100644 --- a/lib/deploy.js +++ b/lib/deploy.js @@ -1,16 +1,16 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. +CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @@ -58,7 +58,7 @@ var parseContent = function(input) { // create a normalized representation of errors, whether they're a node.js Error or a Cloudify API error var normalizeError = function (err) { var e; - + if (err instanceof Error) { /* node.js system error */ e = createError("Error communicating with CM: " + err.message, 504, "system", 202, 'cloudify-manager'); @@ -71,7 +71,7 @@ var normalizeError = function (err) { var status = err.status || 502; var cfyCode = "UNKNOWN"; var cfyMessage; - + if (err.body) { var p = parseContent(err.body); if (p.json) { @@ -84,28 +84,28 @@ var normalizeError = function (err) { } message = "Status " + status + " from CM API -- error code: " + cfyCode + " -- message: " + cfyMessage; } - + /* Pass through 400-level status, recast 500-level */ var returnStatus = (err.status > 499) ? 502 : err.status; e = createError(message, returnStatus, "api", 502, 'cloudify-manager'); } - + return e; }; // Augment the raw outputs from a deployment with the descriptions from the blueprint -var annotateOutputs = function (id, rawOutputs) { +var annotateOutputs = function (req, id, rawOutputs) { return new Promise(function(resolve, reject) { - + var outItems = Object.keys(rawOutputs); - + if (outItems.length < 1) { // No output items, so obviously no descriptions, just return empty object resolve({}); } else { // Call Cloudify to get the descriptions - cfy.getOutputDescriptions(id) + cfy.getOutputDescriptions(req, id) .then(function(res) { // Assemble an outputs object with values from raw output and descriptions just obtained var p = parseContent(res.body); @@ -115,16 +115,16 @@ var annotateOutputs = function (id, rawOutputs) { outs[i] = {value: rawOutputs[i]}; if (p.content.outputs[i] && p.content.outputs[i].description) { outs[i].description = p.content.outputs[i].description; - } + } }); resolve(outs); } else { reject({code: "API_INVALID_RESPONSE", message: "Invalid response for output descriptions query"}); - } + } }); } - + }); }; @@ -137,41 +137,43 @@ var delay = function(dtime) { // Go through the Cloudify API call sequence to upload blueprint, create deployment, and launch install workflow // (but don't wait for the workflow to finish) -const launchBlueprint = function(id, blueprint, inputs) { - logger.debug(null, "deploymentId: " + id + " starting blueprint upload"); +const launchBlueprint = function(req, id, blueprint, inputs) { + logger.info(req.dcaeReqId, "deploymentId: " + id + " starting blueprint upload"); // Upload blueprint - return cfy.uploadBlueprint(id, blueprint) - + return cfy.uploadBlueprint(req, id, blueprint) + // Create deployment .then (function(result) { - logger.debug(null, "deploymentId: " + id + " blueprint uploaded"); + logger.info(req.dcaeReqId, "deploymentId: " + id + " blueprint uploaded"); // Create deployment - return cfy.createDeployment(id, id, inputs); + return cfy.createDeployment(req, id, id, inputs); }) - + // Launch the workflow, but don't wait for it to complete .then(function(result){ - logger.debug(null, "deploymentId: " + id + " deployment created"); + logger.info(req.dcaeReqId, "deploymentId: " + id + " deployment created"); return delay(DELAY_INSTALL_WORKFLOW) - .then(function(){ - return cfy.initiateWorkflowExecution(id, 'install'); + .then(function(){ + return cfy.initiateWorkflowExecution(req, id, 'install'); }); }) .catch(function(error) { - logger.debug(null, "Error: " + error + " for launch blueprint for deploymentId " + id); + logger.info(req.dcaeReqId, "Error: " + JSON.stringify(error) + " for launch blueprint for deploymentId " + id); throw normalizeError(error); }); }; exports.launchBlueprint = launchBlueprint; // Finish installation launched with launchBlueprint -const finishInstallation = function(deploymentId, executionId) { - logger.debug(null, "finishInstallation: " + deploymentId + " -- executionId: " + executionId); - return cfy.getWorkflowResult(executionId) +const finishInstallation = function(req, deploymentId, executionId) { + logger.info(req.dcaeReqId, "finishInstallation: " + deploymentId + " -- executionId: " + executionId); + return cfy.getWorkflowResult(req, executionId) .then (function(result){ - logger.debug(null, "deploymentId: " + deploymentId + " install workflow successfully executed"); + logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " install workflow successfully executed"); // Retrieve the outputs from the deployment, as specified in the blueprint - return delay(DELAY_RETRIEVE_OUTPUTS).then(function() { return cfy.getOutputs(deploymentId); }); + return delay(DELAY_RETRIEVE_OUTPUTS).then(function() { + return cfy.getOutputs(req, deploymentId); + }); }) .then(function(result) { // We have the raw outputs from the deployment but not annotated with the descriptions @@ -182,45 +184,49 @@ const finishInstallation = function(deploymentId, executionId) { if (p.content.outputs) { rawOutputs = p.content.outputs; } - } + } } - logger.debug(null, "output retrieval result for " + deploymentId + ": " + JSON.stringify(result)); - return annotateOutputs(deploymentId, rawOutputs); + logger.info(req.dcaeReqId, "output retrieval result for " + deploymentId + ": " + JSON.stringify(result)); + return annotateOutputs(req, deploymentId, rawOutputs); }) .catch(function(err) { - logger.debug(null, "Error finishing install workflow: " + err + " -- " + JSON.stringify(err)); + logger.info(req.dcaeReqId, "Error finishing install workflow: " + err + " -- " + JSON.stringify(err)); throw normalizeError(err); }); }; exports.finishInstallation = finishInstallation; // Initiate uninstall workflow against a deployment, but don't wait for workflow to finish -const launchUninstall = function(deploymentId) { - logger.debug(null, "deploymentId: " + deploymentId + " starting uninstall workflow"); +const launchUninstall = function(req, deploymentId) { + logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " starting uninstall workflow"); // Run uninstall workflow - return cfy.initiateWorkflowExecution(deploymentId, 'uninstall') + return cfy.initiateWorkflowExecution(req, deploymentId, 'uninstall') .then(function(result) { return result; }) .catch(function(err) { - logger.debug(null, "Error initiating uninstall workflow: " + err + " -- " + JSON.stringify(err)); + logger.info(req.dcaeReqId, "Error initiating uninstall workflow: " + err + " -- " + JSON.stringify(err)); throw normalizeError(err); - }); + }); }; exports.launchUninstall = launchUninstall; -const finishUninstall = function(deploymentId, executionId) { - logger.debug(null, "finishUninstall: " + deploymentId + " -- executionId: " + executionId); - return cfy.getWorkflowResult(executionId) +const finishUninstall = function(req, deploymentId, executionId) { + logger.info(req.dcaeReqId, "finishUninstall: " + deploymentId + " -- executionId: " + executionId); + return cfy.getWorkflowResult(req, executionId) .then (function(result){ - logger.debug(null, "deploymentId: " + deploymentId + " uninstall workflow successfully executed"); + logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " uninstall workflow successfully executed"); // Delete the deployment - return delay(DELAY_DELETE_DEPLOYMENT).then(function() {return cfy.deleteDeployment(deploymentId);}); + return delay(DELAY_DELETE_DEPLOYMENT).then(function() { + return cfy.deleteDeployment(req, deploymentId); + }); }) .then (function(result){ - logger.debug(null, "deploymentId: " + deploymentId + " deployment deleted"); + logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " deployment deleted"); // Delete the blueprint - return delay(DELAY_DELETE_BLUEPRINT).then(function() {return cfy.deleteBlueprint(deploymentId);}); + return delay(DELAY_DELETE_BLUEPRINT).then(function() { + return cfy.deleteBlueprint(req, deploymentId); + }); }) .then (function(result){ return result; @@ -228,19 +234,19 @@ const finishUninstall = function(deploymentId, executionId) { .catch (function(err){ throw normalizeError(err); }); - + }; exports.finishUninstall = finishUninstall; // Get the status of a workflow execution -exports.getExecutionStatus = function (exid) { - return cfy.getWorkflowExecutionStatus(exid) +exports.getExecutionStatus = function (req, exid) { + return cfy.getWorkflowExecutionStatus(req, exid) .then(function(res){ - + var result = { operationType: res.json.workflow_id }; - + // Map execution status if (res.json.status === "terminated") { result.status = "succeeded"; @@ -254,11 +260,11 @@ exports.getExecutionStatus = function (exid) { else { result.status = "processing"; } - + if (res.json.error) { result.error = res.json.error; } - logger.debug(null, "getExecutionStatus result: " + JSON.stringify(result)); + logger.info(req.dcaeReqId, "getExecutionStatus result: " + JSON.stringify(result)); return result; }) .catch(function(error) { @@ -267,37 +273,37 @@ exports.getExecutionStatus = function (exid) { }; // Go through the Cloudify API call sequence to do a deployment -exports.deployBlueprint = function(id, blueprint, inputs) { +exports.deployBlueprint = function(req, id, blueprint, inputs) { + + // Upload blueprint, create deployment, and initiate install workflow + return launchBlueprint(req, id, blueprint, inputs) - // Upload blueprint, create deployment, and initiate install workflow - return launchBlueprint(id, blueprint, inputs) - // Wait for the workflow to complete .then( - + // launchBlueprint promise fulfilled -- finish installation function(result){ - return finishInstallation(result.deploymentId, result.executionId); // Will throw normalized error if it fails + return finishInstallation(req, result.deploymentId, result.executionId); // Will throw normalized error if it fails }, - + // launchBlueprint promise rejected -- report error function(err) { - throw normalizeError(err); + throw normalizeError(err); }); }; // Go through the Cloudify API call sequence to do an undeployment of a previously deployed blueprint -exports.undeployDeployment = function(id) { - logger.debug(null, "deploymentId: " + id + " starting uninstall workflow"); - +exports.undeployDeployment = function(req, id) { + logger.info(req.dcaeReqId, "deploymentId: " + id + " starting uninstall workflow"); + // Run launch uninstall workflow - return launchUninstall(id) - + return launchUninstall(req, id) + // launchUninstall promise fulfilled -- finish uninstall .then (function(result){ - return finishUninstall(result.deploymentId, result.executionId); // Will throw normalized error if it fails + return finishUninstall(req, result.deploymentId, result.executionId); // Will throw normalized error if it fails }, - + // launchUninstall promise rejected -- report error function(err){ throw normalizeError(err); diff --git a/lib/info.js b/lib/info.js index f6b37a8..424f424 100644 --- a/lib/info.js +++ b/lib/info.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -19,27 +19,27 @@ See the License for the specific language governing permissions and limitations "use strict"; const router = require('express').Router(); - -/* Pick up config exported by main */ -const config = process.mainModule.exports.config; +const logger = require('./logging').getLogger(); /* Accept an incoming event */ router.get('/', function(req, res) { - res.json( - { - "server" : { - "name": config.name, - "description": config.description, - "version": config.version, - "branch": config.branch, - "commit": config.commit, - "commit_datetime": config.commit_datetime - }, - "apiVersion": config.apiVersion, - "links": config.apiLinks - } - ); - require('./logging').getLogger().audit(req, 200); + /* Pick up config exported by main */ + const config = process.mainModule.exports.config; + const info = { + "server" : { + "name": config.name, + "description": config.description, + "version": config.version, + "branch": config.branch, + "commit": config.commit, + "commit_datetime": config.commit_datetime, + "server_instance_uuid": config.server_instance_uuid + }, + "apiVersion": config.apiVersion, + "links": config.apiLinks + }; + res.json(info); + logger.audit(req, 200, JSON.stringify(info)); }); -module.exports = router;
\ No newline at end of file +module.exports = router; diff --git a/lib/inventory.js b/lib/inventory.js index c2e13c9..5935067 100644 --- a/lib/inventory.js +++ b/lib/inventory.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -55,7 +55,7 @@ module.exports = function(options) { return { /* Add a DCAE service to the inventory. Done after a deployment.*/ - addService: function(deploymentId, serviceType, vnfId, vnfType, vnfLocation, outputs) { + addService: function(req, deploymentId, serviceType, vnfId, vnfType, vnfLocation, outputs) { /* Create the service description */ var serviceDescription = @@ -83,23 +83,23 @@ module.exports = function(options) { json: serviceDescription }; - return doRequest(reqOptions, null, INVENTORY); + return doRequest(req, reqOptions, null, INVENTORY); }, /* Remove a DCAE service from the inventory. Done after an undeployment. */ - deleteService: function(serviceId) { - return doRequest({method: "DELETE", uri: url + INV_SERVICES + "/" + serviceId}, null, INVENTORY); + deleteService: function(req, serviceId) { + return doRequest(req, {method: "DELETE", uri: url + INV_SERVICES + "/" + serviceId}, null, INVENTORY); }, /* Find running/deploying instances of services (with a given type name, if specified) */ - getServicesByType: function(query) { + getServicesByType: function(req, query) { var options = { method: 'GET', uri: url + INV_SERVICES, - qs: query || {} + qs: query }; - return doRequest(options, null, INVENTORY) + return doRequest(req, options, null, INVENTORY) .then (function (result) { var services = []; var content = JSON.parse(result.body); @@ -113,8 +113,8 @@ module.exports = function(options) { }, /* Find a blueprint given the service type ID -- return blueprint and type ID */ - getBlueprintByType: function(serviceTypeId) { - return doRequest({ + getBlueprintByType: function(req, serviceTypeId) { + return doRequest(req, { method: "GET", uri: url + INV_SERV_TYPES + '/' + serviceTypeId }, null, INVENTORY) @@ -138,8 +138,8 @@ module.exports = function(options) { * deployment ID as service name. If it doesn't exist, the function * resolves its promise. If it *does* exist, then it throws an error. */ - verifyUniqueDeploymentId: function(deploymentId) { - return doRequest({ + verifyUniqueDeploymentId: function(req, deploymentId) { + return doRequest(req, { method: "GET", uri: url + INV_SERVICES + "/" + deploymentId }, null, INVENTORY) diff --git a/lib/logging.js b/lib/logging.js index a21f37e..8d94f87 100644 --- a/lib/logging.js +++ b/lib/logging.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -130,6 +130,7 @@ const DEBUG_MARKER = '^'; const formatAuditRecord = function(req, status, extra) { var rec = new Array(AUDIT_NFIELDS); const end = new Date(); + rec[AUDIT_INSTUUID] = (process.mainModule.exports.config || {}).server_instance_uuid || ""; rec[AUDIT_END] = end.toISOString(); rec[AUDIT_BEGIN] = req.startTime.toISOString(); rec[AUDIT_REQID] = req.dcaeReqId; @@ -161,6 +162,7 @@ const formatAuditRecord = function(req, status, extra) { const formatMetricsRecord = function(req, opInfo, extra) { var rec = new Array(METRICS_NFIELDS); const end = new Date(); + rec[METRICS_INSTUUID] = (process.mainModule.exports.config || {}).server_instance_uuid || ""; rec[METRICS_END] = end.toISOString(); rec[METRICS_BEGIN] = opInfo.startTime.toISOString(); @@ -257,6 +259,10 @@ exports.getLogger = function() { metricsLogger.info(formatMetricsRecord(req, opInfo, extra)); }, + info: function(reqId, msg) { + debugLogger.info(formatDebugRecord(reqId, msg)); + }, + debug: function(reqId, msg) { debugLogger.debug(formatDebugRecord(reqId, msg)); } diff --git a/lib/middleware.js b/lib/middleware.js index 183cf77..ee39863 100644 --- a/lib/middleware.js +++ b/lib/middleware.js @@ -1,16 +1,16 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. +CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @@ -32,12 +32,19 @@ exports.assignId = function(req, res, next) { /* Error handler -- send error with JSON body */ exports.handleErrors = function(err, req, res, next) { - var status = err.status || 500; - var msg = err.message || err.body || 'unknown error' - res.status(status).type('application/json').send({status: status, message: msg }); - log.audit(req, status, msg); + const response = { + status : err.status || 500, + message : err.message || err.body || 'unknown error' + }; + if (err.stack) { + response.stack = err.stack.split("\n"); + } + + res.status(response.status).type('application/json').send(response); + log.audit(req, response.status, JSON.stringify(response)); - if (status >= 500) { + if (response.status >= 500) { + err.message = response.message + (err.stack && " " + response.stack.join(', ')); log.error(err, req); } }; @@ -55,7 +62,7 @@ exports.checkType = function(type){ var err = new Error ('Content-Type must be \'' + type +'\''); err.status = 415; next (err); - } + } }; }; @@ -70,7 +77,7 @@ exports.checkProps = function(props) { } else { next(); - } + } }; }; diff --git a/lib/policy.js b/lib/policy.js index 620870c..1aefc8a 100644 --- a/lib/policy.js +++ b/lib/policy.js @@ -1,181 +1,364 @@ -/*
-Copyright(c) 2017 AT&T Intellectual Property. All rights reserved.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
-CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and limitations under the License.
-*/
-
-/**
- * handling policy updates
- */
-
-"use strict";
-
-const POLICY_UPDATE_OPERATION = "dcae.interfaces.policy.policy_update";
-
-const config = process.mainModule.exports.config;
-const createError = require('./dispatcher-error').createDispatcherError;
-const logger = require('./logging').getLogger();
-
-var cloudify = require("./cloudify.js");
-
-// Set config for cloudify interface library
-cloudify.setAPIAddress(config.cloudify.url);
-cloudify.setCredentials(config.cloudify.user, config.cloudify.password);
-cloudify.setLogger(logger);
-
-/**
- * receive the policy-updated message from the policy-handler
- */
-function policyUpdate(req, res, next) {
- var latest_policies = JSON.stringify((req.body && req.body.latest_policies) || {});
- logger.debug(req.dcaeReqId, "policyUpdate " + req.originalUrl + " " + latest_policies);
- /**
- * reply to and free up the policy_handler
- */
- res.json({});
-
- latest_policies = JSON.parse(latest_policies);
- /**
- * filter out the policies to what is deployed in components and needs updating (new policyVersion)
- */
- var policy_deployments = {};
- var policy_ids = {};
-
- cloudify.getNodeInstances(req, function(node_instances) {
- node_instances.forEach(node_instance => {
- if (!node_instance.runtime_properties || !node_instance.runtime_properties.policies) {
- return;
- }
- var deployment = policy_deployments[node_instance.deployment_id] || {
- "deployment_id": node_instance.deployment_id, "policies": {}, "component_ids": []
- };
-
- logger.debug(req.dcaeReqId, "have policy on node_instance: " + JSON.stringify(node_instance));
- var have_policies = false;
- Object.keys(node_instance.runtime_properties.policies).forEach(policy_id => {
- var deployed_policy = node_instance.runtime_properties.policies[policy_id];
- var latest_policy = latest_policies[policy_id];
- if (!latest_policy || !latest_policy.policy_body
- || isNaN(latest_policy.policy_body.policyVersion)
- || latest_policy.policy_body.policyVersion
- === (deployed_policy.policy_body && deployed_policy.policy_body.policyVersion)) {
- return;
- }
- have_policies = true;
- deployment.policies[policy_id] = latest_policy;
- policy_ids[policy_id] = true;
- });
- if (have_policies) {
- deployment.component_ids.push(node_instance.id);
- policy_deployments[deployment.deployment_id] = deployment;
- }
- });
-
- logger.debug(req.dcaeReqId, "collected policy_deployments to update " + JSON.stringify(policy_deployments));
- })
- .then(function(result) {
- logger.debug(req.dcaeReqId, "finished loading policy_deployments" + JSON.stringify(result));
- if (result.status !== 200) {
- const error_msg = "failed to retrieve component policies from cloudify " + result.message;
- logger.error(createError(error_msg, result.status, "api", 502, 'cloudify-manager'), req);
- logger.audit(req, result.status, error_msg);
- return;
- }
-
- var deployment_ids = Object.keys(policy_deployments);
- var policy_id_count = Object.keys(policy_ids).length;
- if (!deployment_ids.length) {
- const msg = "no updated policies to apply to deployments";
- logger.debug(req.dcaeReqId, msg);
- logger.audit(req, result.status, msg);
- return;
- }
- const msg = "going to apply updated policies[" + policy_id_count + "] to deployments " + deployment_ids.length;
- logger.debug(req.dcaeReqId, msg + ": " + JSON.stringify(deployment_ids));
- logger.audit(req, result.status, msg);
- deployment_ids.forEach(deployment_id => {
- var deployment = policy_deployments[deployment_id];
- deployment.policies = Object.keys(deployment.policies).map(policy_id => {
- return deployment.policies[policy_id];
- });
-
- logger.debug(req.dcaeReqId, "ready to execute-operation policy-update on deployment " + JSON.stringify(deployment));
- cloudify.executeOperation(req, deployment.deployment_id, POLICY_UPDATE_OPERATION,
- {'updated_policies': deployment.policies}, deployment.component_ids);
- });
- });
-}
-
-/**
- * retrieve all component-policies from cloudify
- */
-function getComponentPoliciesFromCloudify(req, res, next) {
- logger.debug(req.dcaeReqId, "getComponentPoliciesFromCloudify " + req.originalUrl);
- var response = {"requestId": req.dcaeReqId};
- response.started = new Date();
- response.component_policies = [];
- response.component_ids = [];
- response.node_instances = [];
-
- cloudify.getNodeInstances(req, function(node_instances) {
- Array.prototype.push.apply(response.node_instances, node_instances);
- node_instances.forEach(node_instance => {
- if (!node_instance.runtime_properties || !node_instance.runtime_properties.policies) {
- return;
- }
-
- var policies_count = 0;
- Object.keys(node_instance.runtime_properties.policies).forEach(policy_id => {
- ++policies_count;
- var policy = node_instance.runtime_properties.policies[policy_id];
- policy.component_id = node_instance.id;
- policy.deployment_id = node_instance.deployment_id;
- response.component_policies.push(policy);
- });
- if (policies_count) {
- response.component_ids.push({
- "component_id" : node_instance.id,
- "policies_count" : policies_count
- });
- }
- });
-
- logger.debug(req.dcaeReqId, "collected " + response.component_ids.length
- + " component_ids: " + JSON.stringify(response.component_ids)
- + " component_policies: " + JSON.stringify(response.component_policies));
- })
- .then(function(result) {
- response.ended = new Date();
- response.status = result.status;
- response.message = result.message;
- logger.debug(req.dcaeReqId, result.message);
- if (result.status !== 200) {
- logger.error(createError(result.message, result.status, "api", 502, 'cloudify-manager'), req);
- }
- res.status(result.status).json(response);
- logger.audit(req, result.status, result.message);
- });
-}
-
-// ========================================================
-
-const app = require('express')();
-app.set('x-powered-by', false);
-app.set('etag', false);
-app.use(require('./middleware').checkType('application/json'));
-app.use(require('body-parser').json({strict: true}));
-
-app.post('/', policyUpdate);
-app.get('/components', getComponentPoliciesFromCloudify);
-
-module.exports = app;
+/* +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +/** + * handling policy updates + */ + +"use strict"; + +const POLICY_UPDATE_OPERATION = "dcae.interfaces.policy.policy_update"; + +const config = process.mainModule.exports.config; +const createError = require('./dispatcher-error').createDispatcherError; +const logger = require('./logging').getLogger(); + +const cloudify = require("./cloudify.js"); + +// Set config for cloudify interface library +cloudify.setAPIAddress(config.cloudify.url); +cloudify.setCredentials(config.cloudify.user, config.cloudify.password); +cloudify.setLogger(logger); + +/** + * receive the policy-updated message from the policy-handler + */ +function policyUpdate(req, res, next) { + + const policy_update = { + catch_up : req.body && req.body.catch_up, + latest_policies : JSON.stringify((req.body && req.body.latest_policies) || {}), + removed_policies : JSON.stringify((req.body && req.body.removed_policies) || {}), + errored_policies : JSON.stringify((req.body && req.body.errored_policies) || {}), + errored_scopes : JSON.stringify((req.body && req.body.errored_scopes) || []), + scope_prefixes : JSON.stringify((req.body && req.body.scope_prefixes) || []), + policy_deployments : {}, + updated_policy_ids : {}, + added_policy_ids : {}, + removed_policy_ids : {} + }; + + logger.info(req.dcaeReqId, "policyUpdate " + + req.method + ' ' + req.protocol + '://' + req.get('host') + req.originalUrl + + " catch_up: " + policy_update.catch_up + + " latest_policies: " + policy_update.latest_policies + + " removed_policies: " + policy_update.removed_policies + + " errored_policies: " + policy_update.errored_policies + + " errored_scopes: " + policy_update.errored_scopes + + " scope_prefixes: " + policy_update.scope_prefixes + ); + /** + * reply to and free up the policy_handler + */ + const response = {"requestID": req.dcaeReqId}; + response.started = new Date(); + response.server_instance_uuid = process.mainModule.exports.config.server_instance_uuid; + res.json(response); + + policy_update.latest_policies = JSON.parse(policy_update.latest_policies); + policy_update.removed_policies = JSON.parse(policy_update.removed_policies); + policy_update.errored_policies = JSON.parse(policy_update.errored_policies); + policy_update.errored_scopes = JSON.parse(policy_update.errored_scopes); + policy_update.scope_prefixes = JSON.parse(policy_update.scope_prefixes); + + const is_policy_in_scopes = function(policy_id) { + return policy_update.scope_prefixes.some(scope_prefix => { + return policy_id.startsWith(scope_prefix); + }); + }; + + const is_policy_in_errored_scopes = function(policy_id) { + return policy_update.errored_scopes.some(errored_scope => { + return policy_id.startsWith(errored_scope); + }); + }; + /** + * filter out the policies to what is deployed in components and needs updating (new policyVersion) + */ + const collect_policy_deployments = function(node_instances) { + node_instances.forEach(node_instance => { + if (!node_instance.runtime_properties + || (!node_instance.runtime_properties.policies + && !node_instance.runtime_properties.policy_filters)) { + return; + } + logger.info(req.dcaeReqId, "checking policies on node_instance: " + JSON.stringify(node_instance)); + + const deployment = policy_update.policy_deployments[node_instance.deployment_id] || { + "deployment_id": node_instance.deployment_id, + "updated_policies": {}, + "added_policies": {}, + "removed_policy_ids": {}, + "node_instance_ids": [], + "is_deployment_busy": cloudify.exeQueue.isDeploymentBusy(node_instance.deployment_id) + }; + + var have_policies = false; + const deployed_policies = node_instance.runtime_properties.policies || {}; + + Object.keys(deployed_policies).forEach(policy_id => { + const deployed_policy = deployed_policies[policy_id]; + const latest_policy = policy_update.latest_policies[policy_id]; + if (policy_update.removed_policies[policy_id] + || (policy_update.catch_up + && (deployed_policy.policy_body || deployment.is_deployment_busy) + && !latest_policy + && !policy_update.errored_policies[policy_id] + && !is_policy_in_errored_scopes(policy_id) + && is_policy_in_scopes(policy_id))) { + have_policies = true; + deployment.removed_policy_ids[policy_id] = true; + policy_update.removed_policy_ids[policy_id] = true; + logger.info(req.dcaeReqId, "going to remove policy " + policy_id + " from node_instance: " + JSON.stringify(node_instance)); + return; + } + + if (!latest_policy || !latest_policy.policy_body + || isNaN(latest_policy.policy_body.policyVersion)) {return;} + + if (!deployment.is_deployment_busy && latest_policy.policy_body.policyVersion + === (deployed_policy.policy_body && deployed_policy.policy_body.policyVersion)) {return;} + + have_policies = true; + deployment.updated_policies[policy_id] = latest_policy; + policy_update.updated_policy_ids[policy_id] = true; + logger.info(req.dcaeReqId, "going to update policy " + policy_id + " on node_instance: " + JSON.stringify(node_instance)); + }); + + const policy_filters = node_instance.runtime_properties.policy_filters || {}; + const policy_filter_ids = Object.keys(policy_filters); + if (policy_filter_ids.length) { + logger.info(req.dcaeReqId, "matching latest policies to policy_filters[" + policy_filter_ids.length + "] on node_instance: " + JSON.stringify(node_instance)); + try { + Object.keys(policy_update.latest_policies).forEach(policy_id => { + if (!deployment.is_deployment_busy && deployed_policies[policy_id]) {return;} + + const latest_policy = policy_update.latest_policies[policy_id]; + const policy_body = latest_policy && latest_policy.policy_body; + if (!policy_body || isNaN(policy_body.policyVersion)) {return;} + const policy_name = policy_body.policyName; + if (!policy_name) {return;} + const matching_conditions = policy_body.matchingConditions || {}; + + logger.debug(req.dcaeReqId, "matching policy " + JSON.stringify(latest_policy)); + policy_filter_ids.some(policy_filter_id => { + const policy_filter = policy_filters[policy_filter_id].policy_filter; + if (!policy_filter || !policy_filter.policyName) {return false;} + + logger.debug(req.dcaeReqId, "matching to policy_filter " + JSON.stringify(policy_filter)); + + if (!!policy_filter.onapName + && policy_filter.onapName !== matching_conditions.ONAPName) { + logger.debug(req.dcaeReqId, "not match policy_filter_id " + policy_filter_id + + " by ONAPName: " + + policy_filter.onapName + " !== " + matching_conditions.ONAPName); + return false; + } + if (!!policy_filter.configName + && policy_filter.configName !== matching_conditions.ConfigName) { + logger.debug(req.dcaeReqId, "not match policy_filter_id " + policy_filter_id + + " by configName: " + + policy_filter.configName + " !== " + matching_conditions.ConfigName); + return false; + } + + if (policy_filter.configAttributes + && !Object.keys(policy_filter.configAttributes).every(filter_key => { + return (matching_conditions.hasOwnProperty(filter_key) + && policy_filter.configAttributes[filter_key] + === matching_conditions[filter_key]); + })) { + logger.debug(req.dcaeReqId, "not match policy_filter_id " + policy_filter_id + + " by configAttributes: " + + JSON.stringify(policy_filter.configAttributes) + " !== " + JSON.stringify(matching_conditions)); + return false; + } + + if (policy_filter.policyName !== policy_id && policy_filter.policyName !== policy_name) { + const match_policy_name = new RegExp(policy_filter.policyName); + if (!match_policy_name.test(policy_name)) { + logger.debug(req.dcaeReqId, "not match policy_filter_id " + policy_filter_id + + " by policyName: " + + policy_filter.policyName + " versus " + policy_name); + return false; + } + } + + have_policies = true; + if (!deployment.added_policies[policy_filter_id]) { + deployment.added_policies[policy_filter_id] = { + "policy_filter_id" : policy_filter_id, + "policies" : {} + }; + } + deployment.added_policies[policy_filter_id].policies[policy_id] = latest_policy; + policy_update.added_policy_ids[policy_id] = true; + logger.info(req.dcaeReqId, "going to add policy " + JSON.stringify(latest_policy) + + " per policy_filter_id " + policy_filter_id + + " on node_instance: " + JSON.stringify(node_instance)); + return true; + }); + }); + } catch (e) { + const error_msg = "error on matching policy to filter " + (e.message || "") + + " " + (e.stack || "").replace(/\n/g, " ") + logger.error(createError(error_msg, 500, "api", 553, 'deployment-handler'), req); + } + } + + if (have_policies) { + deployment.node_instance_ids.push(node_instance.id); + policy_update.policy_deployments[deployment.deployment_id] = deployment; + } + }); + + logger.info(req.dcaeReqId, "collected policy_deployments to update " + JSON.stringify(policy_update.policy_deployments)); + }; + + const update_policies_on_deployments = function(result) { + logger.info(req.dcaeReqId, "finished loading policy_deployments" + JSON.stringify(result)); + if (result.status !== 200) { + const error_msg = "failed to retrieve component policies from cloudify " + result.message; + logger.error(createError(error_msg, result.status, "api", 502, 'cloudify-manager'), req); + logger.audit(req, result.status, error_msg); + return; + } + + const deployment_ids = Object.keys(policy_update.policy_deployments); + if (!deployment_ids.length) { + const audit_msg = "no updated policies to apply to deployments"; + logger.debug(req.dcaeReqId, audit_msg); + logger.audit(req, result.status, audit_msg); + return; + } + const audit_msg = "going to apply updated policies[" + Object.keys(policy_update.updated_policy_ids).length + + "] and added policies[" + Object.keys(policy_update.added_policy_ids).length + + "] and removed policies[" + Object.keys(policy_update.removed_policy_ids).length + + "] to deployments[" + deployment_ids.length + "]"; + logger.info(req.dcaeReqId, audit_msg + ": " + JSON.stringify(deployment_ids)); + logger.audit(req, result.status, audit_msg); + deployment_ids.forEach(deployment_id => { + const deployment = policy_update.policy_deployments[deployment_id]; + deployment.updated_policies = Object.keys(deployment.updated_policies).map(policy_id => { + return deployment.updated_policies[policy_id]; + }); + deployment.removed_policy_ids = Object.keys(deployment.removed_policy_ids); + + logger.info(req.dcaeReqId, "ready to execute-operation policy-update on deployment " + JSON.stringify(deployment)); + cloudify.executeOperation(req, deployment.deployment_id, POLICY_UPDATE_OPERATION, + { + 'updated_policies': deployment.updated_policies, + 'added_policies': deployment.added_policies, + 'removed_policies': deployment.removed_policy_ids + }, + deployment.node_instance_ids + ); + }); + }; + + cloudify.getNodeInstances(req, collect_policy_deployments).then(update_policies_on_deployments); +} + +/** + * retrieve all component-policies from cloudify + */ +function getComponentPoliciesFromCloudify(req, res, next) { + logger.info(req.dcaeReqId, "getComponentPoliciesFromCloudify " + req.originalUrl); + const response = {"requestID": req.dcaeReqId}; + response.started = new Date(); + response.server_instance_uuid = process.mainModule.exports.config.server_instance_uuid; + response.node_instance_ids = []; + response.component_policies = []; + response.component_policy_filters = []; + response.node_instances = []; + + cloudify.getNodeInstances(req, function(node_instances) { + Array.prototype.push.apply(response.node_instances, node_instances); + node_instances.forEach(node_instance => { + if (!node_instance.runtime_properties + || (!node_instance.runtime_properties.policies + && !node_instance.runtime_properties.policy_filters)) { + return; + } + + var policies_count = 0; + var policy_filters_count = 0; + if (node_instance.runtime_properties.policies) { + Object.keys(node_instance.runtime_properties.policies).forEach(policy_id => { + ++policies_count; + const policy = node_instance.runtime_properties.policies[policy_id]; + policy.component_id = node_instance.id; + policy.deployment_id = node_instance.deployment_id; + response.component_policies.push(policy); + }); + } + if (node_instance.runtime_properties.policy_filters) { + Object.keys(node_instance.runtime_properties.policy_filters).forEach(policy_filter => { + ++policy_filters_count; + policy_filter = node_instance.runtime_properties.policy_filters[policy_filter]; + policy_filter.component_id = node_instance.id; + policy_filter.deployment_id = node_instance.deployment_id; + response.component_policy_filters.push(policy_filter); + }); + } + if (policies_count + policy_filters_count) { + response.node_instance_ids.push({ + "node_instance_id" : node_instance.id, + "deployment_id" : node_instance.deployment_id, + "policies_count" : policies_count, + "policy_filters_count" : policy_filters_count + }); + } + }); + + logger.info(req.dcaeReqId, "collected " + response.node_instance_ids.length + + " node_instance_ids: " + JSON.stringify(response.node_instance_ids) + + " component_policies: " + JSON.stringify(response.component_policies) + + " component_policy_filters: " + JSON.stringify(response.component_policy_filters) + ); + }) + .then(function(result) { + response.ended = new Date(); + response.status = result.status; + response.message = result.message; + logger.info(req.dcaeReqId, result.message); + if (result.status !== 200) { + logger.error(createError(result.message, result.status, "api", 502, 'cloudify-manager'), req); + } + res.status(result.status).json(response); + logger.audit(req, result.status, result.message); + }); +} + +// ======================================================== + +const app = require('express')(); +app.set('x-powered-by', false); +app.set('etag', false); +app.use(require('./middleware').checkType('application/json')); +app.use(require('body-parser').json({strict: true, limit: '150mb'})); +app.use(function(req, res, next) { + logger.info(req.dcaeReqId, + "new req: " + req.method + " " + req.originalUrl + + " from: " + req.ip + " body: " + JSON.stringify(req.body) + ); + next(); +}); + +app.post('/', policyUpdate); +app.get('/components', getComponentPoliciesFromCloudify); + +module.exports = app; diff --git a/lib/promise_request.js b/lib/promise_request.js index 0572ac4..200e516 100644 --- a/lib/promise_request.js +++ b/lib/promise_request.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -31,11 +31,11 @@ const url = require('url'); const querystring = require('querystring'); const logger = require('./logging').getLogger(); -exports.doRequest = function(options, body, targetEntity, mainReq) { - +exports.doRequest = function(mainReq, options, body, targetEntity) { + /* Defense: for now, some callers don't provide mainReq */ mainReq = mainReq || {}; - + var opInfo = {"startTime":new Date(), "targetEntity": targetEntity}; return new Promise(function(resolve, reject) { @@ -56,10 +56,12 @@ exports.doRequest = function(options, body, targetEntity, mainReq) { options.hostname = parsed.hostname; options.port = parsed.port; options.path = parsed.path; + opInfo.targetService = options.method + " " + options.uri; if (options.qs) { - options.path += ('?' + querystring.stringify(options.qs)); + const qry = ('?' + querystring.stringify(options.qs)); + options.path += qry; + opInfo.targetService += qry; } - opInfo.targetService = options.method + " " + options.uri; } try { @@ -68,7 +70,7 @@ exports.doRequest = function(options, body, targetEntity, mainReq) { catch (e) { opInfo.respCode = 500; opInfo.complete = false; - logger.metrics(mainReq, opInfo, e.message); + logger.metrics(mainReq, opInfo, (e.message || "") + " " + (e.stack || "").replace(/\n/g, " ")); reject(e); } @@ -111,16 +113,18 @@ exports.doRequest = function(options, body, targetEntity, mainReq) { } opInfo.respCode = resp.statusCode || 500; + const metrics_text = "res: " + result.body + + ((reqBody && " req: " + ((typeof(reqBody) !== 'string' && typeof(reqBody)) || reqBody)) || ""); if (resp.statusCode > 199 && resp.statusCode < 300) { // HTTP status code indicates success - resolve the promise opInfo.complete = true; - logger.metrics(mainReq, opInfo, result.body); + logger.metrics(mainReq, opInfo, metrics_text); resolve(result); } else { // Reject the promise opInfo.complete = false; - logger.metrics(mainReq, opInfo, result.body); + logger.metrics(mainReq, opInfo, metrics_text); reject(result); } diff --git a/mvn-phase-lib.sh b/mvn-phase-lib.sh index 6736048..9bb81cd 100755 --- a/mvn-phase-lib.sh +++ b/mvn-phase-lib.sh @@ -234,7 +234,7 @@ build_and_push_docker() VERSION="${MVN_PROJECT_VERSION//[^0-9.]/}" VERSION2=$(echo "$VERSION" | cut -f1-2 -d'.') - LFQI="${IMAGENAME}:${VERSION}-${TIMESTAMP}" + LFQI="${IMAGENAME}:${VERSION}-${TIMESTAMP}"Z BUILD_PATH="${WORKSPACE}" # build a docker image docker build --rm -f "${WORKSPACE}"/Dockerfile -t "${LFQI}" "${BUILD_PATH}" @@ -277,7 +277,7 @@ build_and_push_docker() fi OLDTAG="${LFQI}" - PUSHTAGS="${REPO}/${IMAGENAME}:${VERSION2}-${TIMESTAMP} ${REPO}/${IMAGENAME}:${VERSION2} ${REPO}/${IMAGENAME}:${VERSION2}-latest" + PUSHTAGS="${REPO}/${IMAGENAME}:${VERSION}-SNAPSHOT-${TIMESTAMP}Z ${REPO}/${IMAGENAME}:${VERSION} ${REPO}/${IMAGENAME}:latest" for NEWTAG in ${PUSHTAGS} do echo "tagging ${OLDTAG} to ${NEWTAG}" diff --git a/package.json b/package.json index 2a4f7e5..033d064 100644 --- a/package.json +++ b/package.json @@ -1,10 +1,10 @@ { "name": "onap-dcae-deployment-handler", - "version": "4.2.2", + "version": "4.5.0", "description": "ONAP DCAE Deployment Handler", "main": "deployment-handler.js", "dependencies": { - "adm-zip": "^0.4.7", + "adm-zip": "0.4.7", "body-parser": "^1.15.0", "express": "^4.13.4", "log4js": "^0.6.33", @@ -1,7 +1,7 @@ <?xml version="1.0"?> <!-- ================================================================================ -Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. ================================================================================ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -23,13 +23,13 @@ ECOMP is a trademark and service mark of AT&T Intellectual Property. <parent> <groupId>org.onap.oparent</groupId> <artifactId>oparent</artifactId> - <version>1.0.0-SNAPSHOT</version> + <version>1.1.0</version> </parent> <!--- CHANGE THE FOLLOWING 3 OBJECTS for your own repo --> <groupId>org.onap.dcaegen2.platform</groupId> <artifactId>deployment-handler</artifactId> <name>dcaegen2-platform-deployment-handler</name> - <version>1.0.0-SNAPSHOT</version> + <version>2.2.0-SNAPSHOT</version> <url>http://maven.apache.org</url> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> @@ -111,7 +111,7 @@ ECOMP is a trademark and service mark of AT&T Intellectual Property. </execution> </executions> </plugin> - <!-- maven-install-plugin is called during "install" phase by default behavior. it tries to copy stuff under + <!-- maven-install-plugin is called during "install" phase by default behavior. it tries to copy stuff under target dir to ~/.m2. we do not need it --> <plugin> <groupId>org.apache.maven.plugins</groupId> diff --git a/set_version.sh b/set_version.sh index 1169c15..d112615 100755 --- a/set_version.sh +++ b/set_version.sh @@ -1,3 +1,20 @@ +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. #!/bin/bash echo "exports.branch=\"$(git branch | grep \* | cut -d ' ' -f2)\";" > version.js diff --git a/tests/mock_deployment_handler.js b/tests/mock_deployment_handler.js new file mode 100644 index 0000000..7407e55 --- /dev/null +++ b/tests/mock_deployment_handler.js @@ -0,0 +1,103 @@ +/* +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +/** + * mock-deployment_handler - base server for all other tests + */ + +"use strict"; + +const nock = require('nock'); +const utils = require('./mock_utils'); + +const MAIN_PATH = './../'; + +const CONSUL_URL = 'http://consul:8500'; +const MOCK_CLOUDIFY_MANAGER = "mock_cloudify_manager"; +const CLOUDIFY_URL = "http://" + MOCK_CLOUDIFY_MANAGER + ":80"; + +const MOCK_INVENTORY = "mock_inventory"; +const INVENTORY_URL = "https://" + MOCK_INVENTORY + ":8080"; + +nock(CONSUL_URL).persist().get('/v1/kv/deployment_handler?raw') + .reply(200, {"logLevel": "DEBUG", "cloudify": {"protocol": "http"}}); + +nock(CONSUL_URL).persist().get('/v1/catalog/service/cloudify_manager') + .reply(200, [{ + "ID":"deadbeef-dead-beef-dead-beefdeadbeef", + "Node":"devorcl00", + "Address": MOCK_CLOUDIFY_MANAGER, + "Datacenter":"rework-central", + "TaggedAddresses":{"lan": MOCK_CLOUDIFY_MANAGER,"wan": MOCK_CLOUDIFY_MANAGER}, + "NodeMeta":{}, + "ServiceID":"cloudify_manager", + "ServiceName":"cloudify_manager", + "ServiceTags":["http://" + MOCK_CLOUDIFY_MANAGER + "/api/v2.1"], + "ServiceAddress": MOCK_CLOUDIFY_MANAGER, + "ServicePort":80, + "ServiceEnableTagOverride":false, + "CreateIndex":16, + "ModifyIndex":16 + }]); + +nock(CONSUL_URL).persist().get('/v1/catalog/service/inventory') + .reply(200, [{ + "ID": "", + "Node": "inventory_mock_node", + "Address": MOCK_INVENTORY, + "Datacenter": "rework-central", + "TaggedAddresses": null, + "NodeMeta": null, + "ServiceID": "inventory", + "ServiceName": "inventory", + "ServiceTags": [], + "ServiceAddress": "", + "ServicePort": 8080, + "ServiceEnableTagOverride": false, + "CreateIndex": 8068, + "ModifyIndex": 8068 + }]); + +const tests = []; + +const run_dh = function() { + describe('run deployment-handler', () => { + it('starting deployment-handler server', function() { + console.log("starting deployment-handler server"); + const dh_server = require(MAIN_PATH + 'deployment-handler'); + + return utils.sleep(5000).then(function() { + console.log("starting tests: count =", tests.length); + if (Array.isArray(tests)) { + tests.forEach(test => { + test(dh_server); + }); + } + }) + .catch(function(e) { + const error = "test of deployment-handler exiting due to test problem: " + e.message + + " " + (e.stack || "").replace(/\n/g, " "); + console.error(error); + throw e; + }); + }).timeout(10000); + }); +}; + +module.exports.INVENTORY_URL = INVENTORY_URL; +module.exports.CLOUDIFY_URL = CLOUDIFY_URL; +module.exports.add_tests = function(new_tests) {Array.prototype.push.apply(tests, new_tests);}; +module.exports.run_dh = run_dh; diff --git a/tests/mock_utils.js b/tests/mock_utils.js new file mode 100644 index 0000000..311e9dc --- /dev/null +++ b/tests/mock_utils.js @@ -0,0 +1,36 @@ +/* +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +"use strict"; + +module.exports.sleep = function(time) { + console.log("sleep for " + time + " msecs..."); + return new Promise((resolve) => setTimeout(() => { + console.log("woke up after " + time + " msecs"); + resolve(); + }, time)); +}; + +module.exports.ActionTimer = class ActionTimer { + constructor() { + this.started = Date.now(); + } + get step() { + let num = Date.now() - this.started; + return ("000000" + num).slice(-Math.max(5, (""+num).length)); + } +}; + diff --git a/tests/test_dcae-deployments.js b/tests/test_dcae-deployments.js new file mode 100644 index 0000000..664615e --- /dev/null +++ b/tests/test_dcae-deployments.js @@ -0,0 +1,633 @@ +/* +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +/** + * handling policy updates + */ + +"use strict"; + +const nock = require('nock') + , chai = require('chai') + , chaiHttp = require('chai-http') + , expect = chai.expect + , assert = chai.assert + , admzip = require('adm-zip'); + +chai.use(chaiHttp); + +const dh = require('./mock_deployment_handler'); +const utils = require('./mock_utils'); + +const INV_PATH_DCAE_SERVICES = "/dcae-services"; +const INV_PATH_DCAE_SERVICE_TYPES = "/dcae-service-types/"; +const INV_PARAM_TYPE_ID = "?typeId="; + +const I_DONT_KNOW = "i-dont-know"; +const DEPLOYMENT_ID_JFL = "dep-jfl-000"; +const DEPLOYMENT_ID_JFL_1 = "dep-jfl-001"; +const EXISTING_DEPLOYMENT_ID = "deployment-CL-2229"; +const INV_EXISTING_SERVICE_TYPE = "86615fc1-aed9-4aa2-9e4b-abdaccbe63de"; + +const Inventory = { + resp_empty: {"links":{"previousLink":null,"nextLink":null},"totalCount":0,"items":[]}, + resp_services: function(deployment_id, service_type, totalCount) { + service_type = service_type || "f93264ee-348c-44f6-af3d-15b157bba735"; + const res = { + "links": { + "previousLink": null, + "nextLink": { + "rel": "next", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICES + + (service_type && "/" + INV_PARAM_TYPE_ID + service_type + "&offset=25") || "/?offset=25" + } + }, + "totalCount": totalCount || 190, + "items": [] + }; + Array.from(Array(totalCount || 1), (_, idx) => idx).forEach(index => { + const dpl_id = deployment_id + ((index && "_" + index) || ""); + res.items.push({ + "serviceId": dpl_id, + "selfLink": { + "rel": "self", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICES + "/" + dpl_id + }, + "created": 1503668339483, + "modified": 1503668339483, + "typeLink": { + "rel": "type", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICE_TYPES + service_type + }, + "vnfId": "dummyVnfId", + "vnfLink": null, + "vnfType": "dummyVnfType", + "vnfLocation": "dummyLocation", + "deploymentRef": dpl_id, + "components": [{ + "componentId": "/components/dummy", + "componentLink": null, + "created": 1489768104449, + "modified": 1508260526203, + "componentType": "dummyComponent", + "componentSource": "DCAEController", + "status": null, + "location": null, + "shareable": 0 + }] + }); + }); + return res; + }, + resp_not_found_service: function(service_id) { + return { + "code": 1, + "type": "error", + "message": "DCAEService not found: " + service_id + }; + }, + resp_existing_blueprint: function(service_type) { + return { + "owner": "dcaeorch", + "typeName": "svc-type-000", + "typeVersion": 1, + "blueprintTemplate": "tosca_definitions_version: cloudify_dsl_1_3\nimports:\n - \"http://www.getcloudify.org/spec/cloudify/3.4/types.yaml\"\n - https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R3/dockerplugin/3.2.0/dockerplugin_types.yaml\n - https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R3/relationshipplugin/1.0.0/relationshipplugin_types.yaml\n - https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R3/dcaepolicyplugin/2.3.0/dcaepolicyplugin_types.yaml\n\ninputs:\n dh_override:\n type: string\n default: \"dockerhost\"\n dh_location_id:\n type: string\n default: \"zone1\"\n aaiEnrichmentHost:\n type: string\n default: \"none\"\n aaiEnrichmentPort:\n type: string \n default: 8443\n enableAAIEnrichment:\n type: string\n default: false\n dmaap_host:\n type: string\n default: dmaap.onap-message-router \n dmaap_port:\n type: string\n default: 3904 \n enableRedisCaching:\n type: string\n default: false \n redisHosts:\n type: string \n tag_version:\n type: string\n default: \"nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.tca-cdap-container:1.0.0\"\n consul_host:\n type: string\n default: consul-server.onap-consul\n consul_port:\n type: string\n default: \"8500\"\n cbs_host:\n type: string\n default: \"config-binding-service.dcae\"\n cbs_port:\n type: string\n default: \"10000\"\n policy_id:\n type: string\n default: \"none\"\n external_port:\n type: string\n description: \"Port for CDAPgui to be exposed\"\n default: \"32010\"\n scn_name: \n default: dcaegen2-analytics_tca_clampinstance_1\n type: string\nnode_templates:\n docker_service_host:\n properties:\n docker_host_override:\n get_input: dh_override\n location_id:\n get_input: dh_location_id\n type: dcae.nodes.SelectedDockerHost\n tca_docker:\n relationships:\n - type: dcae.relationships.component_contained_in\n target: docker_service_host\n - target: tca_policy\n type: cloudify.relationships.depends_on \n type: dcae.nodes.DockerContainerForComponentsUsingDmaap\n properties:\n application_config:\n app_config:\n appDescription: DCAE Analytics Threshold Crossing Alert Application\n appName: dcae-tca\n tcaAlertsAbatementTableName: TCAAlertsAbatementTable\n tcaAlertsAbatementTableTTLSeconds: '1728000'\n tcaSubscriberOutputStreamName: TCASubscriberOutputStream\n tcaVESAlertsTableName: TCAVESAlertsTable\n tcaVESAlertsTableTTLSeconds: '1728000'\n tcaVESMessageStatusTableName: TCAVESMessageStatusTable\n tcaVESMessageStatusTableTTLSeconds: '86400'\n thresholdCalculatorFlowletInstances: '2'\n app_preferences:\n aaiEnrichmentHost: \n get_input: aaiEnrichmentHost\n aaiEnrichmentIgnoreSSLCertificateErrors: 'true'\n aaiEnrichmentPortNumber: '8443'\n aaiEnrichmentProtocol: https\n aaiEnrichmentUserName: DCAE\n aaiEnrichmentUserPassword: DCAE\n aaiVMEnrichmentAPIPath: /aai/v11/search/nodes-query\n aaiVNFEnrichmentAPIPath: /aai/v11/network/generic-vnfs/generic-vnf\n enableAAIEnrichment: \n get_input: enableAAIEnrichment\n enableRedisCaching: \n get_input: enableRedisCaching\n redisHosts: \n get_input: redisHosts\n enableAlertCEFFormat: 'false'\n publisherContentType: application/json\n publisherHostName: \n get_input: dmaap_host\n publisherHostPort: \n get_input: dmaap_port \n publisherMaxBatchSize: '1'\n publisherMaxRecoveryQueueSize: '100000'\n publisherPollingInterval: '20000'\n publisherProtocol: http\n publisherTopicName: unauthenticated.DCAE_CL_OUTPUT\n subscriberConsumerGroup: OpenDCAE-c12\n subscriberConsumerId: c12\n subscriberContentType: application/json\n subscriberHostName: \n get_input: dmaap_host\n subscriberHostPort:\n get_input: dmaap_port \n subscriberMessageLimit: '-1'\n subscriberPollingInterval: '30000'\n subscriberProtocol: http\n subscriberTimeoutMS: '-1'\n subscriberTopicName: unauthenticated.SEC_MEASUREMENT_OUTPUT\n tca_policy_default: '{\"domain\":\"measurementsForVfScaling\",\"metricsPerEventName\":[{\"eventName\":\"vFirewallBroadcastPackets\",\"controlLoopSchemaType\":\"VNF\",\"policyScope\":\"DCAE\",\"policyName\":\"DCAE.Config_tca-hi-lo\",\"policyVersion\":\"v0.0.1\",\"thresholds\":[{\"closedLoopControlName\":\"ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a\",\"version\":\"1.0.2\",\"fieldPath\":\"$.event.measurementsForVfScalingFields.vNicUsageArray[*].receivedTotalPacketsDelta\",\"thresholdValue\":300,\"direction\":\"LESS_OR_EQUAL\",\"severity\":\"MAJOR\",\"closedLoopEventStatus\":\"ONSET\"},{\"closedLoopControlName\":\"ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a\",\"version\":\"1.0.2\",\"fieldPath\":\"$.event.measurementsForVfScalingFields.vNicUsageArray[*].receivedTotalPacketsDelta\",\"thresholdValue\":700,\"direction\":\"GREATER_OR_EQUAL\",\"severity\":\"CRITICAL\",\"closedLoopEventStatus\":\"ONSET\"}]},{\"eventName\":\"vLoadBalancer\",\"controlLoopSchemaType\":\"VM\",\"policyScope\":\"DCAE\",\"policyName\":\"DCAE.Config_tca-hi-lo\",\"policyVersion\":\"v0.0.1\",\"thresholds\":[{\"closedLoopControlName\":\"ControlLoop-vDNS-6f37f56d-a87d-4b85-b6a9-cc953cf779b3\",\"version\":\"1.0.2\",\"fieldPath\":\"$.event.measurementsForVfScalingFields.vNicUsageArray[*].receivedTotalPacketsDelta\",\"thresholdValue\":300,\"direction\":\"GREATER_OR_EQUAL\",\"severity\":\"CRITICAL\",\"closedLoopEventStatus\":\"ONSET\"}]},{\"eventName\":\"Measurement_vGMUX\",\"controlLoopSchemaType\":\"VNF\",\"policyScope\":\"DCAE\",\"policyName\":\"DCAE.Config_tca-hi-lo\",\"policyVersion\":\"v0.0.1\",\"thresholds\":[{\"closedLoopControlName\":\"ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e\",\"version\":\"1.0.2\",\"fieldPath\":\"$.event.measurementsForVfScalingFields.additionalMeasurements[*].arrayOfFields[0].value\",\"thresholdValue\":0,\"direction\":\"EQUAL\",\"severity\":\"MAJOR\",\"closedLoopEventStatus\":\"ABATED\"},{\"closedLoopControlName\":\"ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e\",\"version\":\"1.0.2\",\"fieldPath\":\"$.event.measurementsForVfScalingFields.additionalMeasurements[*].arrayOfFields[0].value\",\"thresholdValue\":0,\"direction\":\"GREATER\",\"severity\":\"CRITICAL\",\"closedLoopEventStatus\":\"ONSET\"}]}]}'\n service_component_type: dcaegen2-analytics_tca \n docker_config:\n healthcheck:\n endpoint: /\n interval: 15s\n timeout: 1s\n type: http\n image:\n get_input: tag_version \n service_component_name_override: \n get_input: scn_name \n interfaces:\n cloudify.interfaces.lifecycle:\n start:\n inputs:\n envs:\n DMAAPHOST: \n { get_input: dmaap_host }\n DMAAPPORT:\n { get_input: dmaap_port }\n DMAAPPUBTOPIC: \"unauthenticated.DCAE_CL_OUTPUT\"\n DMAAPSUBTOPIC: \"unauthenticated.SEC_MEASUREMENT_OUTPUT\"\n AAIHOST: \n { get_input: aaiEnrichmentHost }\n AAIPORT: \n { get_input: aaiEnrichmentPort }\n CONSUL_HOST: \n { get_input: consul_host }\n CONSUL_PORT: \n { get_input: consul_port }\n CBS_HOST: \n { get_input: cbs_host }\n CBS_PORT: \n { get_input: cbs_port }\n CONFIG_BINDING_SERVICE: \"config_binding_service\" \n SERVICE_11011_NAME: \n { get_input: scn_name }\n SERVICE_11015_IGNORE: \"true\" \n ports:\n - concat: [\"11011:\", { get_input: external_port }] \n stop:\n inputs:\n cleanup_image: true \n tca_policy:\n type: dcae.nodes.policy\n properties:\n policy_id:\n get_input: policy_id\n", + "serviceIds": null, + "vnfTypes": ["TESTVNF000"], + "serviceLocations": null, + "asdcServiceId": null, + "asdcResourceId": null, + "asdcServiceURL": null, + "typeId": service_type, + "selfLink": { + "rel": "self", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICE_TYPES + service_type + }, + "created": 1500910967567, + "deactivated": null + }; + }, + resp_put_service: function(deployment_id, service_type) { + return { + "serviceId": deployment_id, + "selfLink": { + "rel": "self", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICES + "/" + deployment_id + }, + "created": 1516376798582, + "modified": 1516376798582, + "typeLink": { + "rel": "type", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICE_TYPES + service_type + }, + "vnfId": "dummyVnfId", + "vnfLink": null, + "vnfType": "dummyVnfType", + "vnfLocation": "dummyLocation", + "deploymentRef": deployment_id, + "components": [{ + "componentId": "/components/dummy", + "componentLink": null, + "created": 1489768104449, + "modified": 1516376798582, + "componentType": "dummy_component", + "componentSource": "DCAEController", + "status": null, + "location": null, + "shareable": 0 + }] + }; + } +}; + +const Cloudify = { + resp_blueprint: function(deployment_id) { + return { + "main_file_name": "blueprint.yaml", + "description": null, + "created_at": "2018-01-19 15:46:47.037084", + "updated_at": "2018-01-19 15:46:47.037084", + "plan": {}, + "id": deployment_id + }; + }, + resp_deploy: function(deployment_id, blueprint_id, inputs) { + return { + "inputs": (inputs && JSON.parse(JSON.stringify(inputs)) || null), + "description": null, + "created_at": "2018-01-19 15:46:47.037084", + "updated_at": "2018-01-19 15:46:47.037084", + "id": deployment_id, + "blueprint_id": blueprint_id || deployment_id + }; + }, + resp_execution: function(deployment_id, blueprint_id, execution_id, terminated, workflow_id) { + return { + "status": (terminated && "terminated") || "pending", + "created_at": "2018-01-19 15:51:21.866227", + "workflow_id": workflow_id || "install", + "is_system_workflow": false, + "parameters": {}, + "blueprint_id": blueprint_id || deployment_id, + "deployment_id": deployment_id, + "error": "", + "id": execution_id + }; + }, + resp_outputs: function(deployment_id) { + return {"outputs": {}, "deployment_id": deployment_id}; + } +}; + +function test_get_dcae_deployments(dh_server) { + const req_path = "/dcae-deployments"; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + it('GET all the dcae-deployments from inventory', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + + const inv_resp = Inventory.resp_services(EXISTING_DEPLOYMENT_ID); + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICES) + .reply(200, function(uri) { + console.log(action_timer.step, "get", dh.INVENTORY_URL, uri); + return JSON.stringify(inv_resp); + }); + + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + assert.containsAllKeys(res.body, {"requestId": "", "deployments": []}); + assert.isString(res.body.requestId); + assert.isArray(res.body.deployments); + assert.lengthOf(res.body.deployments, inv_resp.items.length); + assert.containsAllKeys(res.body.deployments[0], {"href":null}); + assert.match(res.body.deployments[0].href, + new RegExp("^http:[/][/]127.0.0.1:[0-9]+[/]dcae-deployments[/]" + EXISTING_DEPLOYMENT_ID)); + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }); + }); +} + +function test_get_dcae_deployments_service_type_unknown(dh_server) { + const req_path = "/dcae-deployments?serviceTypeId=" + I_DONT_KNOW; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + it('GET nothing for unknown service-type from inventory', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICES + INV_PARAM_TYPE_ID + I_DONT_KNOW) + .reply(200, function(uri) { + console.log(action_timer.step, "get", dh.INVENTORY_URL, uri); + return JSON.stringify(Inventory.resp_empty); + } + ); + + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + assert.containsAllKeys(res.body, {"requestId": "", "deployments": []}); + assert.isString(res.body.requestId); + assert.isArray(res.body.deployments); + assert.lengthOf(res.body.deployments, 0); + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }); + }); +} + +function create_main_message(service_type_id, include_inputs) { + var msg = {"serviceTypeId": service_type_id}; + if (include_inputs) { + msg.inputs= { + "dcae_service_location" : "loc00", + "dcae_target_type" : "type000", + "dcae_target_name" : "target000" + }; + } + return msg; +} + +function test_put_dcae_deployments_i_dont_know(dh_server) { + const req_path = "/dcae-deployments/" + I_DONT_KNOW; + const message = create_main_message(I_DONT_KNOW); + const test_txt = "PUT " + req_path + ": " + JSON.stringify(message); + describe(test_txt, () => { + it('Fail to deploy i-dont-know service', function(done) { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICES + "/" + I_DONT_KNOW) + .reply(404, function(uri) { + console.log(action_timer.step, "get", dh.INVENTORY_URL, uri); + return JSON.stringify(Inventory.resp_not_found_service(I_DONT_KNOW)); + }); + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICE_TYPES + I_DONT_KNOW) + .reply(404, function(uri) { + console.log(action_timer.step, "get", dh.INVENTORY_URL, uri); + return "<html> <head><title>Error 404 Not Found</title></head><body></body> </html>"; + }); + + chai.request(dh_server.app).put(req_path) + .set('content-type', 'application/json') + .send(message) + .end(function(err, res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(404); + expect(res.body).to.have.property('message'); + expect(res.body.message).to.be.equal("No service type with ID " + I_DONT_KNOW); + done(); + }); + }); + }); +} + +function test_put_dcae_deployments_missing_input_error(dh_server) { + const req_path = "/dcae-deployments/" + DEPLOYMENT_ID_JFL; + const message = create_main_message(INV_EXISTING_SERVICE_TYPE); + const test_txt = "PUT " + req_path + ": " + JSON.stringify(message); + describe(test_txt, () => { + it('Fail to deploy service - missing_input', function(done) { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL) + .reply(404, function(uri) { + console.log(action_timer.step, "get", dh.INVENTORY_URL, uri); + return JSON.stringify(Inventory.resp_not_found_service(DEPLOYMENT_ID_JFL)); + }); + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICE_TYPES + INV_EXISTING_SERVICE_TYPE) + .reply(200, function(uri) { + console.log(action_timer.step, "get", dh.INVENTORY_URL, uri); + return JSON.stringify(Inventory.resp_existing_blueprint(INV_EXISTING_SERVICE_TYPE)); + }); + nock(dh.INVENTORY_URL).put(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL) + .reply(200, function(uri, requestBody) { + console.log(action_timer.step, "put", dh.INVENTORY_URL, uri, JSON.stringify(requestBody)); + return JSON.stringify(Inventory.resp_put_service(DEPLOYMENT_ID_JFL, INV_EXISTING_SERVICE_TYPE)); + }); + nock(dh.INVENTORY_URL).delete(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL) + .reply(200, function(uri) { + console.log(action_timer.step, "delete", dh.INVENTORY_URL, uri); + return ""; + }); + + nock(dh.CLOUDIFY_URL).put("/api/v2.1/blueprints/" + DEPLOYMENT_ID_JFL) + .reply(200, function(uri, requestBody) { + console.log(action_timer.step, "put", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); + return JSON.stringify(Cloudify.resp_blueprint(DEPLOYMENT_ID_JFL)); + }); + + const depl_rejected = { + "message": "Required inputs blah...", + "error_code": "missing_required_deployment_input_error", + "server_traceback": "Traceback blah..." + }; + nock(dh.CLOUDIFY_URL).put("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL) + .reply(400, function(uri) { + console.log(action_timer.step, "put", dh.CLOUDIFY_URL, uri); + return JSON.stringify(depl_rejected); + }); + + chai.request(dh_server.app).put(req_path) + .set('content-type', 'application/json') + .send(message) + .end(function(err, res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(400); + expect(res.body).to.have.property('message'); + expect(res.body.message).to.be.equal("Status 400 from CM API -- error code: " + depl_rejected.error_code + " -- message: " + depl_rejected.message); + done(); + }); + }); + }); +} + +function test_put_dcae_deployments_success(dh_server) { + const req_path = "/dcae-deployments/" + DEPLOYMENT_ID_JFL_1; + const message = create_main_message(INV_EXISTING_SERVICE_TYPE, true); + const test_txt = "PUT " + req_path + ": " + JSON.stringify(message); + const execution_id = "execution_" + DEPLOYMENT_ID_JFL_1; + describe(test_txt, () => { + it('Success deploy service', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL_1) + .reply(404, function(uri) { + console.log(action_timer.step, "get", dh.INVENTORY_URL, uri); + return JSON.stringify(Inventory.resp_not_found_service(DEPLOYMENT_ID_JFL_1)); + }); + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICE_TYPES + INV_EXISTING_SERVICE_TYPE) + .reply(200, function(uri) { + console.log(action_timer.step, "get", dh.INVENTORY_URL, uri); + return JSON.stringify(Inventory.resp_existing_blueprint(INV_EXISTING_SERVICE_TYPE)); + }); + nock(dh.INVENTORY_URL).put(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL_1) + .reply(200, function(uri, requestBody) { + console.log(action_timer.step, "put", dh.INVENTORY_URL, uri, JSON.stringify(requestBody)); + return JSON.stringify(Inventory.resp_put_service(DEPLOYMENT_ID_JFL_1, INV_EXISTING_SERVICE_TYPE)); + }); + + nock(dh.CLOUDIFY_URL).put("/api/v2.1/blueprints/" + DEPLOYMENT_ID_JFL_1) + .reply(200, function(uri, requestBody) { + console.log(action_timer.step, "put", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); + return JSON.stringify(Cloudify.resp_blueprint(DEPLOYMENT_ID_JFL_1)); + }); + + nock(dh.CLOUDIFY_URL).put("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL_1) + .reply(201, function(uri, requestBody) { + console.log(action_timer.step, "put", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); + return JSON.stringify(Cloudify.resp_deploy(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, message.inputs)); + }); + + nock(dh.CLOUDIFY_URL).post("/api/v2.1/executions") + .reply(201, function(uri, requestBody) { + console.log(action_timer.step, "post", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); + return JSON.stringify(Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, execution_id)); + }); + + nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions/" + execution_id) + .reply(200, function(uri) { + console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); + return JSON.stringify(Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, execution_id, true)); + }); + + nock(dh.CLOUDIFY_URL).get("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL_1 + "/outputs") + .reply(200, function(uri) { + console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); + return JSON.stringify(Cloudify.resp_outputs(DEPLOYMENT_ID_JFL_1)); + }); + + return chai.request(dh_server.app).put(req_path) + .set('content-type', 'application/json') + .send(message) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(202); + expect(res).to.be.json; + + return utils.sleep(10000); + }) + .then(function() { + console.log(action_timer.step, "the end of test"); + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }).timeout(50000); + }); +} + +function test_get_dcae_deployments_operation(dh_server) { + const execution_id = "execution_" + DEPLOYMENT_ID_JFL_1; + const req_path = "/dcae-deployments/" + DEPLOYMENT_ID_JFL_1 + "/operation/" + execution_id; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + it('Get operation execution succeeded', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions/" + execution_id) + .reply(200, function(uri) { + console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); + return JSON.stringify(Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, execution_id, true)); + }); + + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }); + }); +} + +function test_get_dcae_deployments_service_type_deployed(dh_server) { + const req_path = "/dcae-deployments?serviceTypeId=" + INV_EXISTING_SERVICE_TYPE; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + it('GET services=deployments of the service-type from inventory', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + const deployed_count = 10; + nock(dh.INVENTORY_URL) + .get(INV_PATH_DCAE_SERVICES + INV_PARAM_TYPE_ID + INV_EXISTING_SERVICE_TYPE) + .reply(200, function(uri) { + console.log(action_timer.step, "get", dh.INVENTORY_URL, uri); + return JSON.stringify(Inventory.resp_services(DEPLOYMENT_ID_JFL_1, INV_EXISTING_SERVICE_TYPE, deployed_count)); + }); + + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + assert.containsAllKeys(res.body, {"requestId": "", "deployments": []}); + assert.isString(res.body.requestId); + assert.isArray(res.body.deployments); + assert.lengthOf(res.body.deployments, deployed_count); + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }); + }); +} + +function test_delete_dcae_deployments_success(dh_server) { + const req_path = "/dcae-deployments/" + DEPLOYMENT_ID_JFL_1; + const test_txt = "DELETE " + req_path; + const workflow_id = "uninstall"; + const execution_id = workflow_id + "_" + DEPLOYMENT_ID_JFL_1; + describe(test_txt, () => { + it('Success DELETE service', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + + nock(dh.CLOUDIFY_URL).post("/api/v2.1/executions") + .reply(201, function(uri, requestBody) { + console.log(action_timer.step, "post", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); + return JSON.stringify(Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, + execution_id, false, workflow_id)); + }); + + nock(dh.INVENTORY_URL).delete(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL_1) + .reply(200, function(uri) { + console.log(action_timer.step, "delete", dh.INVENTORY_URL, uri); + return ""; + }); + + nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions/" + execution_id) + .reply(200, function(uri) { + console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); + return JSON.stringify(Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, + execution_id, true, workflow_id)); + }); + + nock(dh.CLOUDIFY_URL).delete("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL_1) + .reply(201, function(uri) { + console.log(action_timer.step, "delete", dh.CLOUDIFY_URL, uri); + return JSON.stringify(Cloudify.resp_deploy(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1)); + }); + + nock(dh.CLOUDIFY_URL).delete("/api/v2.1/blueprints/" + DEPLOYMENT_ID_JFL_1) + .reply(200, function(uri) { + console.log(action_timer.step, "delete", dh.CLOUDIFY_URL, uri); + return JSON.stringify(Cloudify.resp_blueprint(DEPLOYMENT_ID_JFL_1)); + }); + + return chai.request(dh_server.app).delete(req_path) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(202); + expect(res).to.be.json; + + return utils.sleep(45000); + }) + .then(function() { + console.log(action_timer.step, "the end of test"); + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }).timeout(60000); + }); +} + +function test_zipper(dh_server) { + const test_txt = 'zip the blueprint'; + describe(test_txt, () => { + it(test_txt, function() { + var blueprint = ""; + const failed_blueprints = []; + const success_blueprints = []; + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + + return utils.sleep(100).then(function() { + console.log("starting test_zipper"); + var first_exc; + for (var i=0; i< 100; i++) { + blueprint = blueprint + (i % 10); + try { + const zip = new admzip(); + zip.addFile('work/', new Buffer(0)); + zip.addFile('work/blueprint.yaml', new Buffer(blueprint, 'utf8')); + const zip_buffer = zip.toBuffer(); + success_blueprints.push(blueprint); + } catch (e) { + // TypeError + const error = "failed to zip: " + e.message + + " " + (e.stack || "").replace(/\n/g, " ") + + "blueprint(" + blueprint + ")"; + console.error(error); + failed_blueprints.push(blueprint); + if (!first_exc) { + first_exc = e; + first_exc.blueprint = blueprint; + } + } + } + console.log("success", success_blueprints.length / (failed_blueprints.length + success_blueprints.length)); + console.log("failed_blueprints", failed_blueprints); + console.log("success_blueprints", success_blueprints); + if (first_exc) { + throw first_exc; + } + }) + .catch(function(e) { + const error = "test of zipper exiting due to test problem: " + e.message + + " " + (e.stack || "").replace(/\n/g, " ") + "blueprint(" + e.blueprint + ")"; + console.error(error); + throw e; + }); + }); + }); +} + + +dh.add_tests([ + test_zipper, + test_get_dcae_deployments, + test_get_dcae_deployments_service_type_unknown, + test_put_dcae_deployments_i_dont_know, + test_put_dcae_deployments_missing_input_error, + test_get_dcae_deployments_operation, + test_get_dcae_deployments_service_type_deployed, + test_put_dcae_deployments_success, + test_delete_dcae_deployments_success +]); diff --git a/tests/test_info.js b/tests/test_info.js new file mode 100644 index 0000000..1156d59 --- /dev/null +++ b/tests/test_info.js @@ -0,0 +1,59 @@ +/* +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +/** + * handling policy updates + */ + +"use strict"; + +const chai = require('chai') + , chaiHttp = require('chai-http') + , expect = chai.expect + , assert = chai.assert; + +chai.use(chaiHttp); + +const dh = require('./mock_deployment_handler'); +const utils = require('./mock_utils'); + +function test_get_info(dh_server) { + const req_path = "/"; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + it('GET info', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + const info = res.body; + const config = process.mainModule.exports.config; + assert.include(config, info.server); + assert.deepEqual(config.apiLinks, info.links); + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }); + }); +} + +dh.add_tests([test_get_info]); diff --git a/tests/test_policy.js b/tests/test_policy.js index c0ad243..0d7550e 100644 --- a/tests/test_policy.js +++ b/tests/test_policy.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,205 +20,464 @@ See the License for the specific language governing permissions and limitations "use strict"; -const nock = require('nock'); -const chai = require('chai') +const nock = require('nock') + , chai = require('chai') , chaiHttp = require('chai-http') - , expect = chai.expect; + , expect = chai.expect + , assert = chai.assert; chai.use(chaiHttp); -const REQ_ID = "111"; +const dh = require('./mock_deployment_handler'); +const utils = require('./mock_utils'); + const RUN_TS = new Date(); const RUN_TS_HOURS = RUN_TS.getHours(); -const CONSUL_URL = 'http://consul:8500'; -const TEST_CLOUDIFY_MANAGER = "test_cloudify_manager"; -const CLOUDIFY_URL = "http://" + TEST_CLOUDIFY_MANAGER + ":80"; - const POLICY_ID = 'policy_id'; const POLICY_VERSION = "policyVersion"; const POLICY_NAME = "policyName"; const POLICY_BODY = 'policy_body'; const POLICY_CONFIG = 'config'; -const MONKEYED_POLICY_ID = "DCAE_alex.Config_peach" -const MONKEYED_POLICY_ID_2 = "DCAE_alex.Config_peach_2" +const BLUEPRINT_ID = "demo_dcaepolicy"; +const DEPLOYMENT_ID = "demo_dcae_policy_depl"; +const OPERATION_POLICY_UPDATE = "dcae.interfaces.policy.policy_update"; +const EXECUTE_OPERATION = "execute_operation"; + +const MONKEYED_POLICY_ID = "DCAE_alex.Config_peach"; +const MONKEYED_POLICY_ID_2 = "DCAE_alex.Config_peach_2"; +const MONKEYED_POLICY_ID_3 = "DCAE_alex.Config_peach_3"; +const MONKEYED_POLICY_ID_4 = "DCAE_alex.Config_peach_4"; +const MONKEYED_POLICY_ID_5 = "DCAE_alex.Config_peach_5"; +const MONKEYED_POLICY_ID_6 = "DCAE_alex.Config_peach_6"; +const CLAMP_POLICY_ID = "CLAMP.Config_clamp_policy"; -function create_policy_body(policy_id, policy_version=1) { - const prev_ver = policy_version - 1; - const timestamp = new Date(RUN_TS.getTime()); - timestamp.setHours(RUN_TS_HOURS + prev_ver); +const CFY_API = "/api/v2.1"; +const CFY_API_NODE_INSTANCES = CFY_API + "/node-instances"; +const CFY_API_EXECUTIONS = CFY_API + "/executions"; +const CFY_API_EXECUTION = CFY_API_EXECUTIONS + "/"; +function create_policy_body(policy_id, policy_version=1, matching_conditions=null) { const this_ver = policy_version.toString(); - const config = { - "policy_updated_from_ver": prev_ver.toString(), - "policy_updated_to_ver": this_ver.toString(), - "policy_hello": "world!", - "policy_updated_ts": timestamp, - "updated_policy_id": policy_id + + const matchingConditions = { + "ONAPName": "DCAE", + "ConfigName": "alex_config_name" }; + if (matching_conditions) { + Object.assign(matchingConditions, matching_conditions); + } return { "policyConfigMessage": "Config Retrieved! ", "policyConfigStatus": "CONFIG_RETRIEVED", "type": "JSON", - POLICY_NAME: policy_id + "." + this_ver + ".xml", - POLICY_VERSION: this_ver, - POLICY_CONFIG: config, - "matchingConditions": { - "ECOMPName": "DCAE", - "ConfigName": "alex_config_name" - }, + [POLICY_NAME]: (policy_id && (policy_id + "." + this_ver + ".xml") || null), + [POLICY_VERSION]: this_ver, + [POLICY_CONFIG]: {"policy_hello": "world!"}, + "matchingConditions": matchingConditions, "responseAttributes": {}, "property": null }; } -function create_policy(policy_id, policy_version=1) { +function create_policy(policy_id, policy_version=1, matching_conditions=null) { return { - POLICY_ID : policy_id, - POLICY_BODY : MonkeyedPolicyBody.create_policy_body(policy_id, policy_version) + [POLICY_ID] : policy_id, + [POLICY_BODY] : create_policy_body(policy_id, policy_version, matching_conditions) }; } -nock(CONSUL_URL).persist().get('/v1/kv/deployment_handler?raw') - .reply(200, {"logLevel": "DEBUG", "cloudify": {"protocol": "http"}}); - -nock(CONSUL_URL).persist().get('/v1/catalog/service/cloudify_manager') - .reply(200, [{ - "ID":"deadbeef-dead-beef-dead-beefdeadbeef", - "Node":"devorcl00", - "Address": TEST_CLOUDIFY_MANAGER, - "Datacenter":"rework-central", - "TaggedAddresses":{"lan": TEST_CLOUDIFY_MANAGER,"wan": TEST_CLOUDIFY_MANAGER}, - "NodeMeta":{}, - "ServiceID":"cloudify_manager", - "ServiceName":"cloudify_manager", - "ServiceTags":["http://" + TEST_CLOUDIFY_MANAGER + "/api/v2.1"], - "ServiceAddress": TEST_CLOUDIFY_MANAGER, - "ServicePort":80, - "ServiceEnableTagOverride":false, - "CreateIndex":16, - "ModifyIndex":16 - }]); - -nock(CONSUL_URL).persist().get('/v1/catalog/service/inventory') - .reply(200, [{ - "ID": "", - "Node": "inventory_test", - "Address": "inventory", - "Datacenter": "rework-central", - "TaggedAddresses": null, - "NodeMeta": null, - "ServiceID": "inventory", - "ServiceName": "inventory", - "ServiceTags": [], - "ServiceAddress": "inventory", - "ServicePort": 8080, - "ServiceEnableTagOverride": false, - "CreateIndex": 8068, - "ModifyIndex": 8068 - }]); - -nock(CLOUDIFY_URL).persist().get(/[/]api[/]v2[.]1[/]node-instances/) - .reply(200, { - "items": [ - { - "deployment_id": "demo_dcae_policy_depl", - "id": "host_vm_163f7", - "runtime_properties": { - "application_config": { - "capacity_ts": "2017-09-07T16:54:31.696Z", - "capacity": "123", - "policy_hello": "world!", - "policy_updated_ts": "2017-09-05T18:09:54.109548Z", - "policy_updated_from_ver": "20", - "location": "neverland", - "updated_policy_id": MONKEYED_POLICY_ID_2, - "policy_updated_to_ver": "21", - "location_ts": "2017-09-07T16:54:31.696Z" - }, - "execute_operation": "policy_update", - "service_component_name": "2caa5ccf-bfc6-4a75-aca7-4af03745f478.unknown.unknown.unknown.dcae.onap.org", - "exe_task": "node_configure", - "policies": { - "DCAE_alex.Config_host_location_policy_id_value": { - "policy_required": true, - "policy_body": create_policy_body(MONKEYED_POLICY_ID, 55), - "policy_id": MONKEYED_POLICY_ID - }, - "DCAE_alex.Config_host_capacity_policy_id_value": { - "policy_required": true, - "policy_body": create_policy_body(MONKEYED_POLICY_ID_2, 21), - "policy_id": MONKEYED_POLICY_ID_2 - } +const message_catch_up = { + "errored_scopes": [], + "catch_up": true, + "scope_prefixes": ["DCAE_alex.Config_", "DCAE.Config_"], + "errored_policies": {}, + "latest_policies": {} +}; + +const cloudify_node_instances = [ + { + "deployment_id": DEPLOYMENT_ID, + "id": "host_vm_163f7", + "runtime_properties": { + "application_config": { + "policy_hello": "world!", + "location": "neverland", + "location_ts": "2017-09-07T16:54:31.696Z" + }, + [EXECUTE_OPERATION]: "policy_update", + "service_component_name": "2caa5ccf-bfc6-4a75-aca7-4af03745f478.unknown.unknown.unknown.dcae.onap.org", + "exe_task": "node_configure", + "policies": { + [MONKEYED_POLICY_ID]: { + "policy_required": true, + "policy_persistent": true, + "policy_body": create_policy_body(MONKEYED_POLICY_ID, 55), + "policy_id": MONKEYED_POLICY_ID + }, + [MONKEYED_POLICY_ID_2]: { + "policy_persistent": false, + "policy_body": create_policy_body(MONKEYED_POLICY_ID_2, 21, {"key1": "value1"}), + "policy_id": MONKEYED_POLICY_ID_2 + }, + [MONKEYED_POLICY_ID_3]: { + "policy_persistent": false, + "policy_body": create_policy_body(MONKEYED_POLICY_ID_3, 33, {"service": "alex_service"}), + "policy_id": MONKEYED_POLICY_ID_3 + }, + [MONKEYED_POLICY_ID_5]: { + "policy_persistent": false, + "policy_body": create_policy_body(MONKEYED_POLICY_ID_5, 1), + "policy_id": MONKEYED_POLICY_ID_5 + }, + [CLAMP_POLICY_ID]: { + "policy_persistent": false, + "policy_body": create_policy_body(CLAMP_POLICY_ID, 9), + "policy_id": CLAMP_POLICY_ID + } + }, + "policy_filters": { + "db_client_policies_c83de": { + "policy_filter_id": "db_client_policies_c83de", + "policy_filter": { + "policyName": MONKEYED_POLICY_ID_2 + ".*", + "unique": false, + "onapName": "DCAE", + "configName": "alex_config_name", + "configAttributes": {"key1": "value1"} + } + }, + "db_client_policies_microservice_09f09": { + "policy_filter_id": "db_client_policies_microservice_09f09", + "policy_filter": { + "policyName": MONKEYED_POLICY_ID + ".*", + "unique": false, + "onapName": "DCAE", + "configName": "alex_config_name", + "configAttributes": {"service": "alex_service"} + } + }, + "policy_filter_by_id_02d02": { + "policy_filter_id": "policy_filter_by_id_02d02", + "policy_filter": { + "policyName": MONKEYED_POLICY_ID_6 + } + }, + "new_policies_09f09": { + "policy_filter_id": "new_policies_09f09", + "policy_filter": { + "policyName": MONKEYED_POLICY_ID_4 + ".*", + "unique": false, + "onapName": "DCAE", + "configName": "alex_config_name", + "configAttributes": {"service": "alex_service"} + } + }, + "db_client_policies_not_found_cfed6": { + "policy_filter_id": "db_client_policies_not_found_cfed6", + "policy_filter": { + "configAttributes": {"not-to-be-found": "ever"}, + "unique": false, + "onapName": "DCAE", + "policyName": "DCAE_alex.Config_not_found_ever_.*" + } + }, + "filter_without_policy_name_22abcd": { + "policy_filter_id": "filter_without_policy_name", + "policy_filter": {"onapName": "DCAE"} + }, + "db_client_policies_no_match_afed8": { + "policy_filter_id": "db_client_policies_no_match_afed8", + "policy_filter": { + "policyName": "DCAE_alex.Config_not_found_ever_.*" } } } - ], - "metadata": { - "pagination": { - "total": 1, - "offset": 0, - "size": 10000 - } } - }); + }, + { + "deployment_id": DEPLOYMENT_ID, + "id": "no_policies_on_node_1212beef", + "runtime_properties": {"application_config": {}} + }, + { + "deployment_id": DEPLOYMENT_ID, + "id": "no_policy_filters_on_node_55ham", + "runtime_properties": { + "application_config": {}, + "policies": {} + } + } +]; -describe('test policy on deployment-handler', () => { - it('starting', function() { - console.log("go testing deployment-handler"); - - const conf = require('./../lib/config'); - const logging = require('./../lib/logging'); - const log = logging.getLogger(); - - console.log("started logger"); - log.debug(REQ_ID, "started logger"); - - console.log("conf.configure"); - - return conf.configure() - .then(function(config) { - logging.setLevel(config.logLevel); - - /* Set up exported configuration */ - config.apiLinks = {"test" : true}; - // exports.config = config; - process.mainModule.exports.config = config; - - console.log("got configuration:", JSON.stringify(config)); - - log.debug(REQ_ID, "Configuration: " + JSON.stringify(config)); - - const main_app = require('./../deployment-handler'); - console.log("setting main_app..."); - main_app.set_app(); - console.log("set main_app"); - - const req_path = "/policy/components"; - const test_txt = "GET " + req_path; - describe(test_txt, () => { - console.log(test_txt); - it('GET all the components with policy from cloudify', function() { - console.log("chai", test_txt); - return chai.request(main_app.app).get(req_path) - .then(function(res) { - console.log("res for", test_txt, JSON.stringify(res.body)); - log.debug(REQ_ID, "received " + JSON.stringify(res.body)); - expect(res).to.have.status(200); - expect(res).to.be.json; - }) - .catch(function(err) { - console.error("err for", test_txt, err); - throw err; - }); - }); +function nock_cfy_node_instances(action_timer) { + nock(dh.CLOUDIFY_URL).get(CFY_API_NODE_INSTANCES).query(true) + .reply(200, function(uri) { + console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); + return JSON.stringify({ + "items": cloudify_node_instances, + "metadata": {"pagination": {"total": cloudify_node_instances.length, "offset": 0, "size": 10000}} }); - }) - .catch(function(e) { - const error = "test of deployment-handler exiting due to startup problem: " + e.message; - console.error(error); - throw e; }); +} + +function test_get_policy_components(dh_server) { + const req_path = "/policy/components"; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + it('GET all the components with policy from cloudify', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + nock_cfy_node_instances(action_timer); + + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }); + }); +} + +function test_post_policy_catch_up(dh_server) { + const req_path = "/policy"; + const message = JSON.parse(JSON.stringify(message_catch_up)); + message.errored_scopes = ["CLAMP.Config_"]; + message.latest_policies = { + [MONKEYED_POLICY_ID]: create_policy(MONKEYED_POLICY_ID, 55), + [MONKEYED_POLICY_ID_2]: create_policy(MONKEYED_POLICY_ID_2, 22, {"key1": "value1"}), + [MONKEYED_POLICY_ID_4]: create_policy(MONKEYED_POLICY_ID_4, 77, {"service": "alex_service"}), + [MONKEYED_POLICY_ID_5]: create_policy(MONKEYED_POLICY_ID_5, "nan_version"), + [MONKEYED_POLICY_ID_6]: create_policy(MONKEYED_POLICY_ID_6, 66), + "junk_policy": create_policy("junk_policy", "nan_version"), + "fail_filtered": create_policy("fail_filtered", 12, {"ONAPName": "not-match"}), + "fail_filtered_2": create_policy("fail_filtered_2", 32, {"ConfigName": "not-match2"}), + "": create_policy("", 1) + }; + const test_txt = "POST " + req_path + " - catchup " + JSON.stringify(message); + describe(test_txt, () => { + it('POST policy-update - catchup', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + const execution_id = "policy_catch_up"; + const resp_to_exe = {"status": "none"}; + nock_cfy_node_instances(action_timer); + + nock(dh.CLOUDIFY_URL).post(CFY_API_EXECUTIONS) + .reply(201, function(uri, requestBody) { + requestBody = JSON.stringify(requestBody); + console.log(action_timer.step, "on_post", dh.CLOUDIFY_URL, uri, requestBody); + Object.assign(resp_to_exe, JSON.parse(requestBody)); + resp_to_exe.status = "pending"; + resp_to_exe.created_at = RUN_TS; + resp_to_exe.workflow_id = EXECUTE_OPERATION; + resp_to_exe.is_system_workflow = false; + resp_to_exe.blueprint_id = BLUEPRINT_ID; + resp_to_exe.error = ""; + resp_to_exe.id = execution_id; + resp_to_exe.parameters.run_by_dependency_order = false; + resp_to_exe.parameters.operation = OPERATION_POLICY_UPDATE; + resp_to_exe.parameters.type_names = []; + + console.log(action_timer.step, "reply to post", dh.CLOUDIFY_URL, uri, JSON.stringify(resp_to_exe)); + + return JSON.stringify(resp_to_exe); + }); + + nock(dh.CLOUDIFY_URL).get(CFY_API_EXECUTION + execution_id) + .reply(200, function(uri) { + resp_to_exe.status = "pending"; + console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri, JSON.stringify(resp_to_exe)); + return JSON.stringify(resp_to_exe); + }); + nock(dh.CLOUDIFY_URL).get(CFY_API_EXECUTION + execution_id) + .times(2) + .reply(200, function(uri) { + resp_to_exe.status = "started"; + console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri, JSON.stringify(resp_to_exe)); + return JSON.stringify(resp_to_exe); + }); + nock(dh.CLOUDIFY_URL).get(CFY_API_EXECUTION + execution_id) + .reply(200, function(uri) { + resp_to_exe.status = "terminated"; + console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri, JSON.stringify(resp_to_exe)); + return JSON.stringify(resp_to_exe); + }); + + for (var extra_i = 1; extra_i <= 100000; extra_i++) { + const policy_id = "extra_" + extra_i; + message.latest_policies[policy_id] = create_policy(policy_id, extra_i); + } + + return chai.request(dh_server.app).post(req_path) + .set('content-type', 'application/json') + .set('X-ECOMP-RequestID', 'test_post_policy_catch_up') + .send(message) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + return utils.sleep(25000); + }) + .then(function() { + console.log(action_timer.step, "the end of test"); + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }).timeout(60000); + }); +} + +function test_fail_cfy_policy_catch_up(dh_server) { + const req_path = "/policy"; + const message = JSON.parse(JSON.stringify(message_catch_up)); + message.latest_policies = { + [MONKEYED_POLICY_ID_6]: create_policy(MONKEYED_POLICY_ID_6, 66) + }; + const test_txt = "fail POST " + req_path + " - catchup without execution_id " + JSON.stringify(message); + describe(test_txt, () => { + it('fail POST policy-update - catchup without execution_id', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + const execution_id = "policy_catch_up"; + const resp_to_exe = {"status": "none"}; + nock_cfy_node_instances(action_timer); + + nock(dh.CLOUDIFY_URL).post(CFY_API_EXECUTIONS) + .reply(201, function(uri, requestBody) { + requestBody = JSON.stringify(requestBody); + console.log(action_timer.step, "on_post", dh.CLOUDIFY_URL, uri, requestBody); + Object.assign(resp_to_exe, JSON.parse(requestBody)); + resp_to_exe.status = "pending"; + + console.log(action_timer.step, "reply to post", dh.CLOUDIFY_URL, uri, JSON.stringify(resp_to_exe)); + + return JSON.stringify(resp_to_exe); + }); + + return chai.request(dh_server.app).post(req_path) + .set('content-type', 'application/json') + .set('X-ECOMP-RequestID', 'test_post_policy_catch_up') + .send(message) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + return utils.sleep(1000); + }) + .then(function() { + console.log(action_timer.step, "the end of test"); + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }).timeout(30000); + }); +} + +function test_fail_400_cfy_policy_catch_up(dh_server) { + const req_path = "/policy"; + const message = JSON.parse(JSON.stringify(message_catch_up)); + message.latest_policies = { + [MONKEYED_POLICY_ID_6]: create_policy(MONKEYED_POLICY_ID_6, 66) + }; + const test_txt = "fail 400 POST " + req_path + " - existing_running_execution_error " + JSON.stringify(message); + describe(test_txt, () => { + it('fail 400 POST policy-update - existing_running_execution_error', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + const execution_id = "policy_catch_up"; + const resp_to_exe = {"error_code": "existing_running_execution_error"}; + nock_cfy_node_instances(action_timer); + + nock(dh.CLOUDIFY_URL).post(CFY_API_EXECUTIONS).times(5) + .reply(400, function(uri, requestBody) { + console.log(action_timer.step, "on_post", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); + console.log(action_timer.step, "reply to post", dh.CLOUDIFY_URL, uri, JSON.stringify(resp_to_exe)); + return JSON.stringify(resp_to_exe); + }); + + return chai.request(dh_server.app).post(req_path) + .set('content-type', 'application/json') + .set('X-ECOMP-RequestID', 'test_post_policy_catch_up') + .send(message) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + return utils.sleep(25000); + }) + .then(function() { + console.log(action_timer.step, "the end of test"); + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }).timeout(30000); }); -});
\ No newline at end of file +} + +function test_fail_404_cfy_policy_catch_up(dh_server) { + const req_path = "/policy"; + const message = JSON.parse(JSON.stringify(message_catch_up)); + message.latest_policies = { + [MONKEYED_POLICY_ID_6]: create_policy(MONKEYED_POLICY_ID_6, 66) + }; + const test_txt = "fail 404 POST " + req_path + " - not_found_error " + JSON.stringify(message); + describe(test_txt, () => { + it('fail 404 POST policy-update - not_found_error', function() { + const action_timer = new utils.ActionTimer(); + console.log(action_timer.step, test_txt); + const execution_id = "policy_catch_up"; + const resp_to_exe = {"error_code": "not_found_error"}; + nock_cfy_node_instances(action_timer); + + nock(dh.CLOUDIFY_URL).post(CFY_API_EXECUTIONS).times(5) + .reply(404, function(uri, requestBody) { + console.log(action_timer.step, "on_post", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); + console.log(action_timer.step, "reply to post", dh.CLOUDIFY_URL, uri, JSON.stringify(resp_to_exe)); + return JSON.stringify(resp_to_exe); + }); + + return chai.request(dh_server.app).post(req_path) + .set('content-type', 'application/json') + .set('X-ECOMP-RequestID', 'test_post_policy_catch_up') + .send(message) + .then(function(res) { + console.log(action_timer.step, "res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + return utils.sleep(1000); + }) + .then(function() { + console.log(action_timer.step, "the end of test"); + }) + .catch(function(err) { + console.error(action_timer.step, "err for", test_txt, err); + throw err; + }); + }).timeout(30000); + }); +} + +dh.add_tests([ + test_get_policy_components, + test_post_policy_catch_up, + test_fail_cfy_policy_catch_up, + test_fail_400_cfy_policy_catch_up, + test_fail_404_cfy_policy_catch_up +]); diff --git a/tests/test_zzz_run.js b/tests/test_zzz_run.js new file mode 100644 index 0000000..8ae405a --- /dev/null +++ b/tests/test_zzz_run.js @@ -0,0 +1,21 @@ +/* +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +"use strict"; + +const dh = require('./mock_deployment_handler'); + +dh.run_dh(); @@ -1 +1,21 @@ +/*- + * ============LICENSE_START======================================================= + * PROJECT + * ================================================================================ + * Copyright (C) 2018 AT&T Intellectual Property. All rights + * reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ exports.commit="unspecified"; diff --git a/version.properties b/version.properties index 73415a7..5791c10 100644 --- a/version.properties +++ b/version.properties @@ -1,5 +1,5 @@ -major=1
-minor=1
+major=2
+minor=2
patch=0
base_version=${major}.${minor}.${patch}
release_version=${base_version}
|