diff options
author | Alex Shatov <alexs@att.com> | 2018-05-04 12:26:17 -0400 |
---|---|---|
committer | Alex Shatov <alexs@att.com> | 2018-05-04 12:26:17 -0400 |
commit | a46339420faefc49cb48adf2989a0884ff961278 (patch) | |
tree | 3862914b2996f355982085784bdea3f77ba0fd85 | |
parent | f8cab3eebdcee288332e16bda5bd6b2fa17e02ac (diff) |
fixed 500 "value" argument is out of bounds
- convert the EOL to linux "\n" in the blueprint
before zipping and sending to cloudify to avoid crashing
when the blueprint is windows "\r\n" or mac "\r" based
- on catching the exception - log the stack
- added logger.info that replaced the logger.debug
in a variety of places
- external version 2.1.2, internal version 4.4.2
- unit test code coverage
Statements : 84.26% ( 942/1118 )
Branches : 64.14% ( 322/502 )
Functions : 81.68% ( 156/191 )
Lines : 84.32% ( 930/1103 )
Change-Id: I9f5f28ddd5d143ca4903316c9199df7d27682143
Signed-off-by: Alex Shatov <alexs@att.com>
Issue-ID: DCAEGEN2-487
-rw-r--r-- | deployment-handler.js | 12 | ||||
-rw-r--r-- | lib/cloudify.js | 38 | ||||
-rw-r--r-- | lib/dcae-deployments.js | 4 | ||||
-rw-r--r-- | lib/deploy.js | 30 | ||||
-rw-r--r-- | lib/logging.js | 4 | ||||
-rw-r--r-- | lib/middleware.js | 29 | ||||
-rw-r--r-- | lib/policy.js | 28 | ||||
-rw-r--r-- | lib/promise_request.js | 2 | ||||
-rw-r--r-- | package.json | 2 | ||||
-rw-r--r-- | pom.xml | 2 | ||||
-rw-r--r-- | tests/mock_deployment_handler.js | 3 | ||||
-rw-r--r-- | tests/test_dcae-deployments.js | 2 | ||||
-rw-r--r-- | version.properties | 2 |
13 files changed, 87 insertions, 71 deletions
diff --git a/deployment-handler.js b/deployment-handler.js index e049b82..bc1a85a 100644 --- a/deployment-handler.js +++ b/deployment-handler.js @@ -77,7 +77,7 @@ const start = function(config) { }; process.mainModule.exports.config = config; - log.debug(null, "Configuration: " + JSON.stringify(config)); + log.info(null, "Configuration: " + JSON.stringify(config)); set_app(); @@ -103,7 +103,7 @@ const start = function(config) { } catch (e) { throw (createError('Could not create http(s) server--exiting: ' - + e.message, 500, 'system', 551)); + + (e.message || "") + " " + (e.stack || "").replace(/\n/g, " "), 500, 'system', 551)); } server.setTimeout(0); @@ -147,10 +147,10 @@ const log = logging.getLogger(); conf.configure() .then(start) .catch(function(e) { - log.error(e.logCode ? e : createError( - 'Deployment-handler exiting due to start-up problem: ' + e.message, 500, - 'system', 552)); - console.error("Deployment-handler exiting due to startup problem: " + e.message); + const fatal_msg = 'Deployment-handler exiting due to start-up problem: ' + (e.message || "") + + " " + (e.stack || "").replace(/\n/g, " "); + log.error(e.logCode ? e : createError(fatal_msg, 500, 'system', 552)); + console.error(fatal_msg); }); module.exports.app = app; diff --git a/lib/cloudify.js b/lib/cloudify.js index c0504af..51be6e1 100644 --- a/lib/cloudify.js +++ b/lib/cloudify.js @@ -91,12 +91,12 @@ const getExecutionStatus = function(req, execution_id) { const getWorkflowResult = function(mainReq, execution_id) { /* Defense: Some callers do not supply mainReq */ mainReq = mainReq || {}; - logger.debug(mainReq.dcaeReqId, "Getting workflow result for execution id: " + execution_id); + logger.info(mainReq.dcaeReqId, "Getting workflow result for execution id: " + execution_id); // Function for testing if workflow is finished // Expects the result of getExecStatus var checkStatus = function(res) { - logger.debug(mainReq.dcaeReqId, "Checking result: " + JSON.stringify(res) + " ==> " + (res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0)); + logger.info(mainReq.dcaeReqId, "Checking result: " + JSON.stringify(res) + " ==> " + (res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0)); return res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0; }; @@ -109,7 +109,7 @@ const getWorkflowResult = function(mainReq, execution_id) { /* Handle fulfilled promise from repeatWhile */ function(res) { - logger.debug(mainReq.dcaeReqId, 'workflow result: ' + JSON.stringify(res)); + logger.info(mainReq.dcaeReqId, 'workflow result: ' + JSON.stringify(res)); /* Successful completion */ if (res.json && res.json.status && res.json.status === 'terminated') { @@ -189,11 +189,11 @@ const startWorkflowExecution = function(mainReq, deployment_id, workflow_id, par const initiateWorkflowExecution = function(req, deployment_id, workflow_id, parameters) { return startWorkflowExecution(req, deployment_id, workflow_id, parameters) .then(function(result) { - logger.debug(req.dcaeReqId, "Result from POSTing workflow execution start: " + JSON.stringify(result)); + logger.info(req.dcaeReqId, "Result from POSTing workflow execution start: " + JSON.stringify(result)); if (result.json && result.json.id) { return {deploymentId: deployment_id, workflowType: workflow_id, executionId: result.json.id}; } - logger.debug(req.dcaeReqId,"Did not get expected JSON body from POST to start workflow"); + logger.info(req.dcaeReqId,"Did not get expected JSON body from POST to start workflow"); var err = new Error("POST to start workflow got success response but no body"); err.status = err.code = 502; throw err; @@ -202,15 +202,17 @@ const initiateWorkflowExecution = function(req, deployment_id, workflow_id, para // Uploads a blueprint via the Cloudify API exports.uploadBlueprint = function(req, bpid, blueprint) { + logger.info(req.dcaeReqId, "uploadBlueprint " + bpid); // Cloudify API wants a gzipped tar of a directory, not the blueprint text - var zip = new admzip(); + const zip = new admzip(); zip.addFile('work/', new Buffer(0)); - zip.addFile('work/blueprint.yaml', new Buffer(blueprint, 'utf8')); - var src = (zip.toBuffer()); + const blueprint_yaml = blueprint.replace(/\r\n/g, "\n").replace(/\r/g, "\n"); + zip.addFile('work/blueprint.yaml', new Buffer(blueprint_yaml, 'utf8')); + const zip_buffer = zip.toBuffer(); // Set up the HTTP PUT request - var reqOptions = { + const reqOptions = { method : "PUT", uri : cfyAPI + "/blueprints/" + bpid, headers : { @@ -221,7 +223,7 @@ exports.uploadBlueprint = function(req, bpid, blueprint) { addAuthToOptions(reqOptions); // Initiate PUT request and return the promise for a result - return doRequest(req, reqOptions, src, CLOUDIFY); + return doRequest(req, reqOptions, zip_buffer, CLOUDIFY); }; // Creates a deployment from a blueprint @@ -264,7 +266,7 @@ exports.executeWorkflow = function(req, deployment_id, workflow_id, parameters) // Wait for the result .then (function(result) { - logger.debug(req.dcaeReqId, "Result from initiating workflow: " + JSON.stringify(result)); + logger.info(req.dcaeReqId, "Result from initiating workflow: " + JSON.stringify(result)); return getWorkflowResult(req, result.executionId); }); }; @@ -355,10 +357,10 @@ exports.getNodeInstances = function (mainReq, on_next_node_instances, offset) { }; addAuthToOptions(reqOptions); - logger.debug(mainReq.dcaeReqId, "getNodeInstances: " + JSON.stringify(reqOptions)); + logger.info(mainReq.dcaeReqId, "getNodeInstances: " + JSON.stringify(reqOptions)); return doRequest(mainReq, reqOptions, null, CLOUDIFY) .then(function(cloudify_response) { - logger.debug(mainReq.dcaeReqId, "getNodeInstances response: " + JSON.stringify(cloudify_response)); + logger.info(mainReq.dcaeReqId, "getNodeInstances response: " + JSON.stringify(cloudify_response)); var response = {}; cloudify_response = cloudify_response && cloudify_response.json; if (!cloudify_response || !Array.isArray(cloudify_response.items)) { @@ -371,7 +373,7 @@ exports.getNodeInstances = function (mainReq, on_next_node_instances, offset) { response.message = 'got no more node_instances'; return response; } - logger.debug(mainReq.dcaeReqId, 'getNodeInstances got node_instances ' + cloudify_response.items.length); + logger.info(mainReq.dcaeReqId, 'getNodeInstances got node_instances ' + cloudify_response.items.length); if (typeof on_next_node_instances === 'function') { on_next_node_instances(cloudify_response.items); } @@ -403,7 +405,7 @@ const runQueuedExecution = function(mainReq, deployment_id, workflow_id, paramet + " with params(" + JSON.stringify(parameters || {}) + ")"; startWorkflowExecution(mainReq, deployment_id, workflow_id, parameters) .then(function(result) { - logger.debug(mainReq.dcaeReqId, "result of start the execution for" + exe_deployment_str + ": " + JSON.stringify(result)); + logger.info(mainReq.dcaeReqId, "result of start the execution for" + exe_deployment_str + ": " + JSON.stringify(result)); execution_id = result.json && result.json.id; if (!execution_id) { throw createError("failed to start execution - no execution_id for" + exe_deployment_str, @@ -413,10 +415,10 @@ const runQueuedExecution = function(mainReq, deployment_id, workflow_id, paramet return getWorkflowResult(mainReq, execution_id); }) .then(function(result) { - logger.debug(mainReq.dcaeReqId, 'successfully finished execution: ' + execution_id + " for" + exe_deployment_str); + logger.info(mainReq.dcaeReqId, 'successfully finished execution: ' + execution_id + " for" + exe_deployment_str); var nextExecution = exeQueue.nextExecution(deployment_id); if (nextExecution) { - logger.debug(nextExecution.mainReq.dcaeReqId, "next execution for deployment_id " + deployment_id + logger.info(nextExecution.mainReq.dcaeReqId, "next execution for deployment_id " + deployment_id + " to " + nextExecution.workflow_id + " with params(" + JSON.stringify(nextExecution.parameters || {}) + ")"); runQueuedExecution(nextExecution.mainReq, deployment_id, nextExecution.workflow_id, nextExecution.parameters); @@ -462,7 +464,7 @@ exports.executeOperation = function (mainReq, deployment_id, operation, operatio if (exeQueue.isDeploymentBusy(deployment_id)) { exeQueue.queueUpExecution(mainReq, deployment_id, workflow_id, parameters); - logger.debug(mainReq.dcaeReqId, "deployment busy - queue up execution for deployment_id " + deployment_id + logger.info(mainReq.dcaeReqId, "deployment busy - queue up execution for deployment_id " + deployment_id + " to " + workflow_id + " with params(" + JSON.stringify(parameters || {}) + ")"); return; } diff --git a/lib/dcae-deployments.js b/lib/dcae-deployments.js index 5d98a69..193f6b9 100644 --- a/lib/dcae-deployments.js +++ b/lib/dcae-deployments.js @@ -37,7 +37,7 @@ const inventory = inv({url: config.inventory.url}); app.use(middleware.checkType('application/json')); // Validate type app.use(bodyParser.json({strict: true})); // Parse body as JSON app.use(function(req, res, next) { - log.debug(req.dcaeReqId, + log.info(req.dcaeReqId, "new req: " + req.method + " " + req.originalUrl + " from: " + req.ip + " body: " + JSON.stringify(req.body) ); @@ -165,6 +165,7 @@ app.put('/:deploymentId', function(req, res, next) { /* Already sent the response, so just log error */ /* Don't remove from inventory, because there is a deployment on CM that might need to be removed */ error.message = "Error deploying deploymentId " + req.params['deploymentId'] + ": " + error.message + + " " + (error.stack || "").replace(/\n/g, " "); log.error(error, req); log.audit(req, 500, error.message); } @@ -212,6 +213,7 @@ app.delete('/:deploymentId', function(req, res, next) { else { /* Error happened after we sent the response--log it */ error.message = "Error undeploying deploymentId " + req.params['deploymentId'] + ": " + error.message + + " " + (error.stack || "").replace(/\n/g, " "); log.error(error, req); log.audit(req, 500, error.message); } diff --git a/lib/deploy.js b/lib/deploy.js index dfd73aa..ee31fd3 100644 --- a/lib/deploy.js +++ b/lib/deploy.js @@ -138,27 +138,27 @@ var delay = function(dtime) { // Go through the Cloudify API call sequence to upload blueprint, create deployment, and launch install workflow // (but don't wait for the workflow to finish) const launchBlueprint = function(req, id, blueprint, inputs) { - logger.debug(req.dcaeReqId, "deploymentId: " + id + " starting blueprint upload"); + logger.info(req.dcaeReqId, "deploymentId: " + id + " starting blueprint upload"); // Upload blueprint return cfy.uploadBlueprint(req, id, blueprint) // Create deployment .then (function(result) { - logger.debug(req.dcaeReqId, "deploymentId: " + id + " blueprint uploaded"); + logger.info(req.dcaeReqId, "deploymentId: " + id + " blueprint uploaded"); // Create deployment return cfy.createDeployment(req, id, id, inputs); }) // Launch the workflow, but don't wait for it to complete .then(function(result){ - logger.debug(req.dcaeReqId, "deploymentId: " + id + " deployment created"); + logger.info(req.dcaeReqId, "deploymentId: " + id + " deployment created"); return delay(DELAY_INSTALL_WORKFLOW) .then(function(){ return cfy.initiateWorkflowExecution(req, id, 'install'); }); }) .catch(function(error) { - logger.debug(req.dcaeReqId, "Error: " + error + " for launch blueprint for deploymentId " + id); + logger.info(req.dcaeReqId, "Error: " + JSON.stringify(error) + " for launch blueprint for deploymentId " + id); throw normalizeError(error); }); }; @@ -166,10 +166,10 @@ exports.launchBlueprint = launchBlueprint; // Finish installation launched with launchBlueprint const finishInstallation = function(req, deploymentId, executionId) { - logger.debug(req.dcaeReqId, "finishInstallation: " + deploymentId + " -- executionId: " + executionId); + logger.info(req.dcaeReqId, "finishInstallation: " + deploymentId + " -- executionId: " + executionId); return cfy.getWorkflowResult(req, executionId) .then (function(result){ - logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " install workflow successfully executed"); + logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " install workflow successfully executed"); // Retrieve the outputs from the deployment, as specified in the blueprint return delay(DELAY_RETRIEVE_OUTPUTS).then(function() { return cfy.getOutputs(req, deploymentId); @@ -186,11 +186,11 @@ const finishInstallation = function(req, deploymentId, executionId) { } } } - logger.debug(req.dcaeReqId, "output retrieval result for " + deploymentId + ": " + JSON.stringify(result)); + logger.info(req.dcaeReqId, "output retrieval result for " + deploymentId + ": " + JSON.stringify(result)); return annotateOutputs(req, deploymentId, rawOutputs); }) .catch(function(err) { - logger.debug(req.dcaeReqId, "Error finishing install workflow: " + err + " -- " + JSON.stringify(err)); + logger.info(req.dcaeReqId, "Error finishing install workflow: " + err + " -- " + JSON.stringify(err)); throw normalizeError(err); }); }; @@ -198,31 +198,31 @@ exports.finishInstallation = finishInstallation; // Initiate uninstall workflow against a deployment, but don't wait for workflow to finish const launchUninstall = function(req, deploymentId) { - logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " starting uninstall workflow"); + logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " starting uninstall workflow"); // Run uninstall workflow return cfy.initiateWorkflowExecution(req, deploymentId, 'uninstall') .then(function(result) { return result; }) .catch(function(err) { - logger.debug(req.dcaeReqId, "Error initiating uninstall workflow: " + err + " -- " + JSON.stringify(err)); + logger.info(req.dcaeReqId, "Error initiating uninstall workflow: " + err + " -- " + JSON.stringify(err)); throw normalizeError(err); }); }; exports.launchUninstall = launchUninstall; const finishUninstall = function(req, deploymentId, executionId) { - logger.debug(req.dcaeReqId, "finishUninstall: " + deploymentId + " -- executionId: " + executionId); + logger.info(req.dcaeReqId, "finishUninstall: " + deploymentId + " -- executionId: " + executionId); return cfy.getWorkflowResult(req, executionId) .then (function(result){ - logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " uninstall workflow successfully executed"); + logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " uninstall workflow successfully executed"); // Delete the deployment return delay(DELAY_DELETE_DEPLOYMENT).then(function() { return cfy.deleteDeployment(req, deploymentId); }); }) .then (function(result){ - logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " deployment deleted"); + logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " deployment deleted"); // Delete the blueprint return delay(DELAY_DELETE_BLUEPRINT).then(function() { return cfy.deleteBlueprint(req, deploymentId); @@ -264,7 +264,7 @@ exports.getExecutionStatus = function (req, exid) { if (res.json.error) { result.error = res.json.error; } - logger.debug(req.dcaeReqId, "getExecutionStatus result: " + JSON.stringify(result)); + logger.info(req.dcaeReqId, "getExecutionStatus result: " + JSON.stringify(result)); return result; }) .catch(function(error) { @@ -294,7 +294,7 @@ exports.deployBlueprint = function(req, id, blueprint, inputs) { // Go through the Cloudify API call sequence to do an undeployment of a previously deployed blueprint exports.undeployDeployment = function(req, id) { - logger.debug(req.dcaeReqId, "deploymentId: " + id + " starting uninstall workflow"); + logger.info(req.dcaeReqId, "deploymentId: " + id + " starting uninstall workflow"); // Run launch uninstall workflow return launchUninstall(req, id) diff --git a/lib/logging.js b/lib/logging.js index 4636be3..8d94f87 100644 --- a/lib/logging.js +++ b/lib/logging.js @@ -259,6 +259,10 @@ exports.getLogger = function() { metricsLogger.info(formatMetricsRecord(req, opInfo, extra)); }, + info: function(reqId, msg) { + debugLogger.info(formatDebugRecord(reqId, msg)); + }, + debug: function(reqId, msg) { debugLogger.debug(formatDebugRecord(reqId, msg)); } diff --git a/lib/middleware.js b/lib/middleware.js index 183cf77..ee39863 100644 --- a/lib/middleware.js +++ b/lib/middleware.js @@ -1,16 +1,16 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. +CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @@ -32,12 +32,19 @@ exports.assignId = function(req, res, next) { /* Error handler -- send error with JSON body */ exports.handleErrors = function(err, req, res, next) { - var status = err.status || 500; - var msg = err.message || err.body || 'unknown error' - res.status(status).type('application/json').send({status: status, message: msg }); - log.audit(req, status, msg); + const response = { + status : err.status || 500, + message : err.message || err.body || 'unknown error' + }; + if (err.stack) { + response.stack = err.stack.split("\n"); + } + + res.status(response.status).type('application/json').send(response); + log.audit(req, response.status, JSON.stringify(response)); - if (status >= 500) { + if (response.status >= 500) { + err.message = response.message + (err.stack && " " + response.stack.join(', ')); log.error(err, req); } }; @@ -55,7 +62,7 @@ exports.checkType = function(type){ var err = new Error ('Content-Type must be \'' + type +'\''); err.status = 415; next (err); - } + } }; }; @@ -70,7 +77,7 @@ exports.checkProps = function(props) { } else { next(); - } + } }; }; diff --git a/lib/policy.js b/lib/policy.js index 7c47dd3..4a3248d 100644 --- a/lib/policy.js +++ b/lib/policy.js @@ -51,7 +51,7 @@ function policyUpdate(req, res, next) { removed_policy_ids : {} }; - logger.debug(req.dcaeReqId, "policyUpdate " + logger.info(req.dcaeReqId, "policyUpdate " + req.method + ' ' + req.protocol + '://' + req.get('host') + req.originalUrl + " catch_up: " + policy_update.catch_up + " latest_policies: " + policy_update.latest_policies @@ -95,7 +95,7 @@ function policyUpdate(req, res, next) { && !node_instance.runtime_properties.policy_filters)) { return; } - logger.debug(req.dcaeReqId, "checking policies on node_instance: " + JSON.stringify(node_instance)); + logger.info(req.dcaeReqId, "checking policies on node_instance: " + JSON.stringify(node_instance)); const deployment = policy_update.policy_deployments[node_instance.deployment_id] || { "deployment_id": node_instance.deployment_id, @@ -122,7 +122,7 @@ function policyUpdate(req, res, next) { have_policies = true; deployment.removed_policy_ids[policy_id] = true; policy_update.removed_policy_ids[policy_id] = true; - logger.debug(req.dcaeReqId, "going to remove policy " + policy_id + " from node_instance: " + JSON.stringify(node_instance)); + logger.info(req.dcaeReqId, "going to remove policy " + policy_id + " from node_instance: " + JSON.stringify(node_instance)); return; } @@ -135,13 +135,13 @@ function policyUpdate(req, res, next) { have_policies = true; deployment.updated_policies[policy_id] = latest_policy; policy_update.updated_policy_ids[policy_id] = true; - logger.debug(req.dcaeReqId, "going to update policy " + policy_id + " on node_instance: " + JSON.stringify(node_instance)); + logger.info(req.dcaeReqId, "going to update policy " + policy_id + " on node_instance: " + JSON.stringify(node_instance)); }); const policy_filters = node_instance.runtime_properties.policy_filters || {}; const policy_filter_ids = Object.keys(policy_filters); if (policy_filter_ids.length) { - logger.debug(req.dcaeReqId, "matching latest policies to policy_filters[" + policy_filter_ids.length + "] on node_instance: " + JSON.stringify(node_instance)); + logger.info(req.dcaeReqId, "matching latest policies to policy_filters[" + policy_filter_ids.length + "] on node_instance: " + JSON.stringify(node_instance)); try { Object.keys(policy_update.latest_policies).forEach(policy_id => { if (!deployment.is_deployment_busy && deployed_policies[policy_id]) {return;} @@ -206,7 +206,7 @@ function policyUpdate(req, res, next) { } deployment.added_policies[policy_filter_id].policies[policy_id] = latest_policy; policy_update.added_policy_ids[policy_id] = true; - logger.debug(req.dcaeReqId, "going to add policy " + JSON.stringify(latest_policy) + logger.info(req.dcaeReqId, "going to add policy " + JSON.stringify(latest_policy) + " per policy_filter_id " + policy_filter_id + " on node_instance: " + JSON.stringify(node_instance)); return true; @@ -225,11 +225,11 @@ function policyUpdate(req, res, next) { } }); - logger.debug(req.dcaeReqId, "collected policy_deployments to update " + JSON.stringify(policy_update.policy_deployments)); + logger.info(req.dcaeReqId, "collected policy_deployments to update " + JSON.stringify(policy_update.policy_deployments)); }; const update_policies_on_deployments = function(result) { - logger.debug(req.dcaeReqId, "finished loading policy_deployments" + JSON.stringify(result)); + logger.info(req.dcaeReqId, "finished loading policy_deployments" + JSON.stringify(result)); if (result.status !== 200) { const error_msg = "failed to retrieve component policies from cloudify " + result.message; logger.error(createError(error_msg, result.status, "api", 502, 'cloudify-manager'), req); @@ -248,7 +248,7 @@ function policyUpdate(req, res, next) { + "] and added policies[" + Object.keys(policy_update.added_policy_ids).length + "] and removed policies[" + Object.keys(policy_update.removed_policy_ids).length + "] to deployments[" + deployment_ids.length + "]"; - logger.debug(req.dcaeReqId, audit_msg + ": " + JSON.stringify(deployment_ids)); + logger.info(req.dcaeReqId, audit_msg + ": " + JSON.stringify(deployment_ids)); logger.audit(req, result.status, audit_msg); deployment_ids.forEach(deployment_id => { const deployment = policy_update.policy_deployments[deployment_id]; @@ -257,7 +257,7 @@ function policyUpdate(req, res, next) { }); deployment.removed_policy_ids = Object.keys(deployment.removed_policy_ids); - logger.debug(req.dcaeReqId, "ready to execute-operation policy-update on deployment " + JSON.stringify(deployment)); + logger.info(req.dcaeReqId, "ready to execute-operation policy-update on deployment " + JSON.stringify(deployment)); cloudify.executeOperation(req, deployment.deployment_id, POLICY_UPDATE_OPERATION, { 'updated_policies': deployment.updated_policies, @@ -276,7 +276,7 @@ function policyUpdate(req, res, next) { * retrieve all component-policies from cloudify */ function getComponentPoliciesFromCloudify(req, res, next) { - logger.debug(req.dcaeReqId, "getComponentPoliciesFromCloudify " + req.originalUrl); + logger.info(req.dcaeReqId, "getComponentPoliciesFromCloudify " + req.originalUrl); const response = {"requestID": req.dcaeReqId}; response.started = new Date(); response.server_instance_uuid = process.mainModule.exports.config.server_instance_uuid; @@ -324,7 +324,7 @@ function getComponentPoliciesFromCloudify(req, res, next) { } }); - logger.debug(req.dcaeReqId, "collected " + response.node_instance_ids.length + logger.info(req.dcaeReqId, "collected " + response.node_instance_ids.length + " node_instance_ids: " + JSON.stringify(response.node_instance_ids) + " component_policies: " + JSON.stringify(response.component_policies) + " component_policy_filters: " + JSON.stringify(response.component_policy_filters) @@ -334,7 +334,7 @@ function getComponentPoliciesFromCloudify(req, res, next) { response.ended = new Date(); response.status = result.status; response.message = result.message; - logger.debug(req.dcaeReqId, result.message); + logger.info(req.dcaeReqId, result.message); if (result.status !== 200) { logger.error(createError(result.message, result.status, "api", 502, 'cloudify-manager'), req); } @@ -351,7 +351,7 @@ app.set('etag', false); app.use(require('./middleware').checkType('application/json')); app.use(require('body-parser').json({strict: true})); app.use(function(req, res, next) { - logger.debug(req.dcaeReqId, + logger.info(req.dcaeReqId, "new req: " + req.method + " " + req.originalUrl + " from: " + req.ip + " body: " + JSON.stringify(req.body) ); diff --git a/lib/promise_request.js b/lib/promise_request.js index c34227d..200e516 100644 --- a/lib/promise_request.js +++ b/lib/promise_request.js @@ -70,7 +70,7 @@ exports.doRequest = function(mainReq, options, body, targetEntity) { catch (e) { opInfo.respCode = 500; opInfo.complete = false; - logger.metrics(mainReq, opInfo, e.message); + logger.metrics(mainReq, opInfo, (e.message || "") + " " + (e.stack || "").replace(/\n/g, " ")); reject(e); } diff --git a/package.json b/package.json index 9a11f49..60061c3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "onap-dcae-deployment-handler", - "version": "4.4.1", + "version": "4.4.2", "description": "ONAP DCAE Deployment Handler", "main": "deployment-handler.js", "dependencies": { @@ -29,7 +29,7 @@ ECOMP is a trademark and service mark of AT&T Intellectual Property. <groupId>org.onap.dcaegen2.platform</groupId> <artifactId>deployment-handler</artifactId> <name>dcaegen2-platform-deployment-handler</name> - <version>2.1.1-SNAPSHOT</version> + <version>2.1.2-SNAPSHOT</version> <url>http://maven.apache.org</url> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> diff --git a/tests/mock_deployment_handler.js b/tests/mock_deployment_handler.js index ee42511..7407e55 100644 --- a/tests/mock_deployment_handler.js +++ b/tests/mock_deployment_handler.js @@ -88,7 +88,8 @@ const run_dh = function() { } }) .catch(function(e) { - const error = "test of deployment-handler exiting due to test problem: " + e.message; + const error = "test of deployment-handler exiting due to test problem: " + e.message + + " " + (e.stack || "").replace(/\n/g, " "); console.error(error); throw e; }); diff --git a/tests/test_dcae-deployments.js b/tests/test_dcae-deployments.js index ae95e06..ea81278 100644 --- a/tests/test_dcae-deployments.js +++ b/tests/test_dcae-deployments.js @@ -103,7 +103,7 @@ const Inventory = { "owner": "dcaeorch", "typeName": "svc-type-000", "typeVersion": 1, - "blueprintTemplate": "tosca_definitions_version: cloudify_dsl_1_2\nimports:\n - http://www.getcloudify.org/spec/cloudify/3.3/types.yaml\n - https://nexus01.research.att.com:8443/repository/solutioning01-mte2-raw/type_files/dti_inputs.yaml\nnode_templates:\n type-00:\n type: cloudify.nodes.Root", + "blueprintTemplate": "--- \r\ntosca_definitions_version: cloudify_dsl_1_3\r\nimports: \r\n - \"http://www.getcloudify.org/spec/cloudify/3.4/types.yaml\"\r\n - \"https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R2/dockerplugin/3.2.0/dockerplugin_types.yaml\"\r\n - \"https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R2/relationshipplugin/1.0.0/relationshipplugin_types.yaml\"\r\n - \"https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R2/dcaepolicyplugin/2.3.0/dcaepolicyplugin_types.yaml\"\r\ninputs: \r\n aaiEnrichmentHost: \r\n default: none\r\n type: string\r\n aaiEnrichmentPort: \r\n default: 8443\r\n type: string\r\n cbs_host: \r\n default: config-binding-service.dcae\r\n type: string\r\n cbs_port: \r\n default: \"10000\"\r\n type: string\r\n consul_host: \r\n default: consul-server.onap-consul\r\n type: string\r\n consul_port: \r\n default: \"8500\"\r\n type: string\r\n dh_location_id: \r\n default: zone1\r\n type: string\r\n dh_override: \r\n default: component_dockerhost\r\n type: string\r\n dmaap_host: \r\n default: dmaap.onap-message-router\r\n type: string\r\n dmaap_port: \r\n default: 3904\r\n type: string\r\n enableAAIEnrichment: \r\n default: false\r\n type: string\r\n enableRedisCaching: \r\n default: false\r\n type: string\r\n external_port: \r\n default: \"32014\"\r\n description: \"Port for CDAPgui to be exposed\"\r\n type: string\r\n policy_id: \r\n default: none\r\n type: string\r\n redisHosts: \r\n type: string\r\n tag_version: \r\n default: \"nexus3.onap.org:10001/onap//onap/org.onap.dcaegen2.deployments.tca-cdap-container:1.0.0\"\r\n type: string\r\nnode_templates: \r\n docker_service_host: \r\n properties: \r\n docker_host_override: \r\n get_input: dh_override\r\n location_id: \r\n get_input: dh_location_id\r\n type: dcae.nodes.SelectedDockerHost\r\n tca_docker: \r\n interfaces: \r\n cloudify.interfaces.lifecycle: \r\n start: \r\n inputs: \r\n envs: \r\n AAIHOST: \r\n get_input: aaiEnrichmentHost\r\n AAIPORT: \r\n get_input: aaiEnrichmentPort\r\n CBS_HOST: \r\n get_input: cbs_host\r\n CBS_PORT: \r\n get_input: cbs_port\r\n CONFIG_BINDING_SERVICE: config_binding_service\r\n CONSUL_HOST: \r\n get_input: consul_host\r\n CONSUL_PORT: \r\n get_input: consul_port\r\n DMAAPHOST: \r\n get_input: dmaap_host\r\n DMAAPPORT: \r\n get_input: dmaap_port\r\n DMAAPPUBTOPIC: unauthenticated.DCAE_CL_OUTPUT\r\n DMAAPSUBTOPIC: unauthenticated.SEC_MEASUREMENT_OUTPUT\r\n# SERVICE_11011_IGNORE: \"true\"\r\n SERVICE_11015_IGNORE: \"true\"\r\n ports:\r\n - concat: [\"11011:\", { get_input: external_port }] \r\n stop: \r\n inputs: \r\n cleanup_image: true\r\n properties: \r\n application_config: \r\n app_config: \r\n appDescription: \"DCAE Analytics Threshold Crossing Alert Application\"\r\n appName: dcae-tca\r\n tcaAlertsAbatementTableName: TCAAlertsAbatementTable\r\n tcaAlertsAbatementTableTTLSeconds: \"1728000\"\r\n tcaSubscriberOutputStreamName: TCASubscriberOutputStream\r\n tcaVESAlertsTableName: TCAVESAlertsTable\r\n tcaVESAlertsTableTTLSeconds: \"1728000\"\r\n tcaVESMessageStatusTableName: TCAVESMessageStatusTable\r\n tcaVESMessageStatusTableTTLSeconds: \"86400\"\r\n thresholdCalculatorFlowletInstances: \"2\"\r\n app_preferences: \r\n aaiEnrichmentHost: \r\n get_input: aaiEnrichmentHost\r\n aaiEnrichmentIgnoreSSLCertificateErrors: \"true\"\r\n aaiEnrichmentPortNumber: \"8443\"\r\n aaiEnrichmentProtocol: https\r\n aaiEnrichmentUserName: DCAE\r\n aaiEnrichmentUserPassword: DCAE\r\n aaiVMEnrichmentAPIPath: /aai/v11/search/nodes-query\r\n aaiVNFEnrichmentAPIPath: /aai/v11/network/generic-vnfs/generic-vnf\r\n enableAAIEnrichment: \r\n get_input: enableAAIEnrichment\r\n enableAlertCEFFormat: \"false\"\r\n enableRedisCaching: \r\n get_input: enableRedisCaching\r\n publisherContentType: application/json\r\n publisherHostName: \r\n get_input: dmaap_host\r\n publisherHostPort: \r\n get_input: dmaap_port\r\n publisherMaxBatchSize: \"1\"\r\n publisherMaxRecoveryQueueSize: \"100000\"\r\n publisherPollingInterval: \"20000\"\r\n publisherProtocol: http\r\n publisherTopicName: unauthenticated.DCAE_CL_OUTPUT\r\n redisHosts: \r\n get_input: redisHosts\r\n subscriberConsumerGroup: OpenDCAE-c12\r\n subscriberConsumerId: c12\r\n subscriberContentType: application/json\r\n subscriberHostName: \r\n get_input: dmaap_host\r\n subscriberHostPort: \r\n get_input: dmaap_port\r\n subscriberMessageLimit: \"-1\"\r\n subscriberPollingInterval: \"30000\"\r\n subscriberProtocol: http\r\n subscriberTimeoutMS: \"-1\"\r\n subscriberTopicName: unauthenticated.SEC_MEASUREMENT_OUTPUT\r\n tca_policy: \"{\\\"domain\\\":\\\"measurementsForVfScaling\\\",\\\"metricsPerEventName\\\":[{\\\"eventName\\\":\\\"vFirewallBroadcastPackets\\\",\\\"controlLoopSchemaType\\\":\\\"VNF\\\",\\\"policyScope\\\":\\\"DCAE\\\",\\\"policyName\\\":\\\"DCAE.Config_tca-hi-lo\\\",\\\"policyVersion\\\":\\\"v0.0.1\\\",\\\"thresholds\\\":[{\\\"closedLoopControlName\\\":\\\"ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a\\\",\\\"version\\\":\\\"1.0.2\\\",\\\"fieldPath\\\":\\\"$.event.measurementsForVfScalingFields.vNicUsageArray[*].receivedTotalPacketsDelta\\\",\\\"thresholdValue\\\":300,\\\"direction\\\":\\\"LESS_OR_EQUAL\\\",\\\"severity\\\":\\\"MAJOR\\\",\\\"closedLoopEventStatus\\\":\\\"ONSET\\\"},{\\\"closedLoopControlName\\\":\\\"ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a\\\",\\\"version\\\":\\\"1.0.2\\\",\\\"fieldPath\\\":\\\"$.event.measurementsForVfScalingFields.vNicUsageArray[*].receivedTotalPacketsDelta\\\",\\\"thresholdValue\\\":700,\\\"direction\\\":\\\"GREATER_OR_EQUAL\\\",\\\"severity\\\":\\\"CRITICAL\\\",\\\"closedLoopEventStatus\\\":\\\"ONSET\\\"}]},{\\\"eventName\\\":\\\"vLoadBalancer\\\",\\\"controlLoopSchemaType\\\":\\\"VM\\\",\\\"policyScope\\\":\\\"DCAE\\\",\\\"policyName\\\":\\\"DCAE.Config_tca-hi-lo\\\",\\\"policyVersion\\\":\\\"v0.0.1\\\",\\\"thresholds\\\":[{\\\"closedLoopControlName\\\":\\\"ControlLoop-vDNS-6f37f56d-a87d-4b85-b6a9-cc953cf779b3\\\",\\\"version\\\":\\\"1.0.2\\\",\\\"fieldPath\\\":\\\"$.event.measurementsForVfScalingFields.vNicUsageArray[*].receivedTotalPacketsDelta\\\",\\\"thresholdValue\\\":300,\\\"direction\\\":\\\"GREATER_OR_EQUAL\\\",\\\"severity\\\":\\\"CRITICAL\\\",\\\"closedLoopEventStatus\\\":\\\"ONSET\\\"}]},{\\\"eventName\\\":\\\"Measurement_vGMUX\\\",\\\"controlLoopSchemaType\\\":\\\"VNF\\\",\\\"policyScope\\\":\\\"DCAE\\\",\\\"policyName\\\":\\\"DCAE.Config_tca-hi-lo\\\",\\\"policyVersion\\\":\\\"v0.0.1\\\",\\\"thresholds\\\":[{\\\"closedLoopControlName\\\":\\\"ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e\\\",\\\"version\\\":\\\"1.0.2\\\",\\\"fieldPath\\\":\\\"$.event.measurementsForVfScalingFields.additionalMeasurements[*].arrayOfFields[0].value\\\",\\\"thresholdValue\\\":0,\\\"direction\\\":\\\"EQUAL\\\",\\\"severity\\\":\\\"MAJOR\\\",\\\"closedLoopEventStatus\\\":\\\"ABATED\\\"},{\\\"closedLoopControlName\\\":\\\"ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e\\\",\\\"version\\\":\\\"1.0.2\\\",\\\"fieldPath\\\":\\\"$.event.measurementsForVfScalingFields.additionalMeasurements[*].arrayOfFields[0].value\\\",\\\"thresholdValue\\\":0,\\\"direction\\\":\\\"GREATER\\\",\\\"severity\\\":\\\"CRITICAL\\\",\\\"closedLoopEventStatus\\\":\\\"ONSET\\\"}]}]}\"\r\n docker_config: \r\n healthcheck: \r\n endpoint: /\r\n interval: 15s\r\n timeout: 1s\r\n type: http\r\n image: \r\n get_input: tag_version\r\n service_component_type: dcaegen2-analytics_tca\r\n relationships: \r\n - \r\n target: docker_service_host\r\n type: dcae.relationships.component_contained_in\r\n - \r\n target: tca_policy\r\n type: cloudify.relationships.depends_on\r\n type: dcae.nodes.DockerContainerForComponentsUsingDmaap\r\n tca_policy: \r\n properties: \r\n policy_id: \r\n get_input: policy_id\r\n type: dcae.nodes.policy\r\n", "serviceIds": null, "vnfTypes": ["TESTVNF000"], "serviceLocations": null, diff --git a/version.properties b/version.properties index 607bf2c..56a8b6d 100644 --- a/version.properties +++ b/version.properties @@ -1,6 +1,6 @@ major=2
minor=1
-patch=1
+patch=2
base_version=${major}.${minor}.${patch}
release_version=${base_version}
snapshot_version=${base_version}-SNAPSHOT
|