summaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorAlex Shatov <alexs@att.com>2018-05-04 12:26:17 -0400
committerAlex Shatov <alexs@att.com>2018-05-04 12:26:17 -0400
commita46339420faefc49cb48adf2989a0884ff961278 (patch)
tree3862914b2996f355982085784bdea3f77ba0fd85 /lib
parentf8cab3eebdcee288332e16bda5bd6b2fa17e02ac (diff)
fixed 500 "value" argument is out of bounds
- convert the EOL to linux "\n" in the blueprint before zipping and sending to cloudify to avoid crashing when the blueprint is windows "\r\n" or mac "\r" based - on catching the exception - log the stack - added logger.info that replaced the logger.debug in a variety of places - external version 2.1.2, internal version 4.4.2 - unit test code coverage Statements : 84.26% ( 942/1118 ) Branches : 64.14% ( 322/502 ) Functions : 81.68% ( 156/191 ) Lines : 84.32% ( 930/1103 ) Change-Id: I9f5f28ddd5d143ca4903316c9199df7d27682143 Signed-off-by: Alex Shatov <alexs@att.com> Issue-ID: DCAEGEN2-487
Diffstat (limited to 'lib')
-rw-r--r--lib/cloudify.js38
-rw-r--r--lib/dcae-deployments.js4
-rw-r--r--lib/deploy.js30
-rw-r--r--lib/logging.js4
-rw-r--r--lib/middleware.js29
-rw-r--r--lib/policy.js28
-rw-r--r--lib/promise_request.js2
7 files changed, 75 insertions, 60 deletions
diff --git a/lib/cloudify.js b/lib/cloudify.js
index c0504af..51be6e1 100644
--- a/lib/cloudify.js
+++ b/lib/cloudify.js
@@ -91,12 +91,12 @@ const getExecutionStatus = function(req, execution_id) {
const getWorkflowResult = function(mainReq, execution_id) {
/* Defense: Some callers do not supply mainReq */
mainReq = mainReq || {};
- logger.debug(mainReq.dcaeReqId, "Getting workflow result for execution id: " + execution_id);
+ logger.info(mainReq.dcaeReqId, "Getting workflow result for execution id: " + execution_id);
// Function for testing if workflow is finished
// Expects the result of getExecStatus
var checkStatus = function(res) {
- logger.debug(mainReq.dcaeReqId, "Checking result: " + JSON.stringify(res) + " ==> " + (res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0));
+ logger.info(mainReq.dcaeReqId, "Checking result: " + JSON.stringify(res) + " ==> " + (res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0));
return res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0;
};
@@ -109,7 +109,7 @@ const getWorkflowResult = function(mainReq, execution_id) {
/* Handle fulfilled promise from repeatWhile */
function(res) {
- logger.debug(mainReq.dcaeReqId, 'workflow result: ' + JSON.stringify(res));
+ logger.info(mainReq.dcaeReqId, 'workflow result: ' + JSON.stringify(res));
/* Successful completion */
if (res.json && res.json.status && res.json.status === 'terminated') {
@@ -189,11 +189,11 @@ const startWorkflowExecution = function(mainReq, deployment_id, workflow_id, par
const initiateWorkflowExecution = function(req, deployment_id, workflow_id, parameters) {
return startWorkflowExecution(req, deployment_id, workflow_id, parameters)
.then(function(result) {
- logger.debug(req.dcaeReqId, "Result from POSTing workflow execution start: " + JSON.stringify(result));
+ logger.info(req.dcaeReqId, "Result from POSTing workflow execution start: " + JSON.stringify(result));
if (result.json && result.json.id) {
return {deploymentId: deployment_id, workflowType: workflow_id, executionId: result.json.id};
}
- logger.debug(req.dcaeReqId,"Did not get expected JSON body from POST to start workflow");
+ logger.info(req.dcaeReqId,"Did not get expected JSON body from POST to start workflow");
var err = new Error("POST to start workflow got success response but no body");
err.status = err.code = 502;
throw err;
@@ -202,15 +202,17 @@ const initiateWorkflowExecution = function(req, deployment_id, workflow_id, para
// Uploads a blueprint via the Cloudify API
exports.uploadBlueprint = function(req, bpid, blueprint) {
+ logger.info(req.dcaeReqId, "uploadBlueprint " + bpid);
// Cloudify API wants a gzipped tar of a directory, not the blueprint text
- var zip = new admzip();
+ const zip = new admzip();
zip.addFile('work/', new Buffer(0));
- zip.addFile('work/blueprint.yaml', new Buffer(blueprint, 'utf8'));
- var src = (zip.toBuffer());
+ const blueprint_yaml = blueprint.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
+ zip.addFile('work/blueprint.yaml', new Buffer(blueprint_yaml, 'utf8'));
+ const zip_buffer = zip.toBuffer();
// Set up the HTTP PUT request
- var reqOptions = {
+ const reqOptions = {
method : "PUT",
uri : cfyAPI + "/blueprints/" + bpid,
headers : {
@@ -221,7 +223,7 @@ exports.uploadBlueprint = function(req, bpid, blueprint) {
addAuthToOptions(reqOptions);
// Initiate PUT request and return the promise for a result
- return doRequest(req, reqOptions, src, CLOUDIFY);
+ return doRequest(req, reqOptions, zip_buffer, CLOUDIFY);
};
// Creates a deployment from a blueprint
@@ -264,7 +266,7 @@ exports.executeWorkflow = function(req, deployment_id, workflow_id, parameters)
// Wait for the result
.then (function(result) {
- logger.debug(req.dcaeReqId, "Result from initiating workflow: " + JSON.stringify(result));
+ logger.info(req.dcaeReqId, "Result from initiating workflow: " + JSON.stringify(result));
return getWorkflowResult(req, result.executionId);
});
};
@@ -355,10 +357,10 @@ exports.getNodeInstances = function (mainReq, on_next_node_instances, offset) {
};
addAuthToOptions(reqOptions);
- logger.debug(mainReq.dcaeReqId, "getNodeInstances: " + JSON.stringify(reqOptions));
+ logger.info(mainReq.dcaeReqId, "getNodeInstances: " + JSON.stringify(reqOptions));
return doRequest(mainReq, reqOptions, null, CLOUDIFY)
.then(function(cloudify_response) {
- logger.debug(mainReq.dcaeReqId, "getNodeInstances response: " + JSON.stringify(cloudify_response));
+ logger.info(mainReq.dcaeReqId, "getNodeInstances response: " + JSON.stringify(cloudify_response));
var response = {};
cloudify_response = cloudify_response && cloudify_response.json;
if (!cloudify_response || !Array.isArray(cloudify_response.items)) {
@@ -371,7 +373,7 @@ exports.getNodeInstances = function (mainReq, on_next_node_instances, offset) {
response.message = 'got no more node_instances';
return response;
}
- logger.debug(mainReq.dcaeReqId, 'getNodeInstances got node_instances ' + cloudify_response.items.length);
+ logger.info(mainReq.dcaeReqId, 'getNodeInstances got node_instances ' + cloudify_response.items.length);
if (typeof on_next_node_instances === 'function') {
on_next_node_instances(cloudify_response.items);
}
@@ -403,7 +405,7 @@ const runQueuedExecution = function(mainReq, deployment_id, workflow_id, paramet
+ " with params(" + JSON.stringify(parameters || {}) + ")";
startWorkflowExecution(mainReq, deployment_id, workflow_id, parameters)
.then(function(result) {
- logger.debug(mainReq.dcaeReqId, "result of start the execution for" + exe_deployment_str + ": " + JSON.stringify(result));
+ logger.info(mainReq.dcaeReqId, "result of start the execution for" + exe_deployment_str + ": " + JSON.stringify(result));
execution_id = result.json && result.json.id;
if (!execution_id) {
throw createError("failed to start execution - no execution_id for" + exe_deployment_str,
@@ -413,10 +415,10 @@ const runQueuedExecution = function(mainReq, deployment_id, workflow_id, paramet
return getWorkflowResult(mainReq, execution_id);
})
.then(function(result) {
- logger.debug(mainReq.dcaeReqId, 'successfully finished execution: ' + execution_id + " for" + exe_deployment_str);
+ logger.info(mainReq.dcaeReqId, 'successfully finished execution: ' + execution_id + " for" + exe_deployment_str);
var nextExecution = exeQueue.nextExecution(deployment_id);
if (nextExecution) {
- logger.debug(nextExecution.mainReq.dcaeReqId, "next execution for deployment_id " + deployment_id
+ logger.info(nextExecution.mainReq.dcaeReqId, "next execution for deployment_id " + deployment_id
+ " to " + nextExecution.workflow_id
+ " with params(" + JSON.stringify(nextExecution.parameters || {}) + ")");
runQueuedExecution(nextExecution.mainReq, deployment_id, nextExecution.workflow_id, nextExecution.parameters);
@@ -462,7 +464,7 @@ exports.executeOperation = function (mainReq, deployment_id, operation, operatio
if (exeQueue.isDeploymentBusy(deployment_id)) {
exeQueue.queueUpExecution(mainReq, deployment_id, workflow_id, parameters);
- logger.debug(mainReq.dcaeReqId, "deployment busy - queue up execution for deployment_id " + deployment_id
+ logger.info(mainReq.dcaeReqId, "deployment busy - queue up execution for deployment_id " + deployment_id
+ " to " + workflow_id + " with params(" + JSON.stringify(parameters || {}) + ")");
return;
}
diff --git a/lib/dcae-deployments.js b/lib/dcae-deployments.js
index 5d98a69..193f6b9 100644
--- a/lib/dcae-deployments.js
+++ b/lib/dcae-deployments.js
@@ -37,7 +37,7 @@ const inventory = inv({url: config.inventory.url});
app.use(middleware.checkType('application/json')); // Validate type
app.use(bodyParser.json({strict: true})); // Parse body as JSON
app.use(function(req, res, next) {
- log.debug(req.dcaeReqId,
+ log.info(req.dcaeReqId,
"new req: " + req.method + " " + req.originalUrl +
" from: " + req.ip + " body: " + JSON.stringify(req.body)
);
@@ -165,6 +165,7 @@ app.put('/:deploymentId', function(req, res, next) {
/* Already sent the response, so just log error */
/* Don't remove from inventory, because there is a deployment on CM that might need to be removed */
error.message = "Error deploying deploymentId " + req.params['deploymentId'] + ": " + error.message
+ + " " + (error.stack || "").replace(/\n/g, " ");
log.error(error, req);
log.audit(req, 500, error.message);
}
@@ -212,6 +213,7 @@ app.delete('/:deploymentId', function(req, res, next) {
else {
/* Error happened after we sent the response--log it */
error.message = "Error undeploying deploymentId " + req.params['deploymentId'] + ": " + error.message
+ + " " + (error.stack || "").replace(/\n/g, " ");
log.error(error, req);
log.audit(req, 500, error.message);
}
diff --git a/lib/deploy.js b/lib/deploy.js
index dfd73aa..ee31fd3 100644
--- a/lib/deploy.js
+++ b/lib/deploy.js
@@ -138,27 +138,27 @@ var delay = function(dtime) {
// Go through the Cloudify API call sequence to upload blueprint, create deployment, and launch install workflow
// (but don't wait for the workflow to finish)
const launchBlueprint = function(req, id, blueprint, inputs) {
- logger.debug(req.dcaeReqId, "deploymentId: " + id + " starting blueprint upload");
+ logger.info(req.dcaeReqId, "deploymentId: " + id + " starting blueprint upload");
// Upload blueprint
return cfy.uploadBlueprint(req, id, blueprint)
// Create deployment
.then (function(result) {
- logger.debug(req.dcaeReqId, "deploymentId: " + id + " blueprint uploaded");
+ logger.info(req.dcaeReqId, "deploymentId: " + id + " blueprint uploaded");
// Create deployment
return cfy.createDeployment(req, id, id, inputs);
})
// Launch the workflow, but don't wait for it to complete
.then(function(result){
- logger.debug(req.dcaeReqId, "deploymentId: " + id + " deployment created");
+ logger.info(req.dcaeReqId, "deploymentId: " + id + " deployment created");
return delay(DELAY_INSTALL_WORKFLOW)
.then(function(){
return cfy.initiateWorkflowExecution(req, id, 'install');
});
})
.catch(function(error) {
- logger.debug(req.dcaeReqId, "Error: " + error + " for launch blueprint for deploymentId " + id);
+ logger.info(req.dcaeReqId, "Error: " + JSON.stringify(error) + " for launch blueprint for deploymentId " + id);
throw normalizeError(error);
});
};
@@ -166,10 +166,10 @@ exports.launchBlueprint = launchBlueprint;
// Finish installation launched with launchBlueprint
const finishInstallation = function(req, deploymentId, executionId) {
- logger.debug(req.dcaeReqId, "finishInstallation: " + deploymentId + " -- executionId: " + executionId);
+ logger.info(req.dcaeReqId, "finishInstallation: " + deploymentId + " -- executionId: " + executionId);
return cfy.getWorkflowResult(req, executionId)
.then (function(result){
- logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " install workflow successfully executed");
+ logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " install workflow successfully executed");
// Retrieve the outputs from the deployment, as specified in the blueprint
return delay(DELAY_RETRIEVE_OUTPUTS).then(function() {
return cfy.getOutputs(req, deploymentId);
@@ -186,11 +186,11 @@ const finishInstallation = function(req, deploymentId, executionId) {
}
}
}
- logger.debug(req.dcaeReqId, "output retrieval result for " + deploymentId + ": " + JSON.stringify(result));
+ logger.info(req.dcaeReqId, "output retrieval result for " + deploymentId + ": " + JSON.stringify(result));
return annotateOutputs(req, deploymentId, rawOutputs);
})
.catch(function(err) {
- logger.debug(req.dcaeReqId, "Error finishing install workflow: " + err + " -- " + JSON.stringify(err));
+ logger.info(req.dcaeReqId, "Error finishing install workflow: " + err + " -- " + JSON.stringify(err));
throw normalizeError(err);
});
};
@@ -198,31 +198,31 @@ exports.finishInstallation = finishInstallation;
// Initiate uninstall workflow against a deployment, but don't wait for workflow to finish
const launchUninstall = function(req, deploymentId) {
- logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " starting uninstall workflow");
+ logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " starting uninstall workflow");
// Run uninstall workflow
return cfy.initiateWorkflowExecution(req, deploymentId, 'uninstall')
.then(function(result) {
return result;
})
.catch(function(err) {
- logger.debug(req.dcaeReqId, "Error initiating uninstall workflow: " + err + " -- " + JSON.stringify(err));
+ logger.info(req.dcaeReqId, "Error initiating uninstall workflow: " + err + " -- " + JSON.stringify(err));
throw normalizeError(err);
});
};
exports.launchUninstall = launchUninstall;
const finishUninstall = function(req, deploymentId, executionId) {
- logger.debug(req.dcaeReqId, "finishUninstall: " + deploymentId + " -- executionId: " + executionId);
+ logger.info(req.dcaeReqId, "finishUninstall: " + deploymentId + " -- executionId: " + executionId);
return cfy.getWorkflowResult(req, executionId)
.then (function(result){
- logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " uninstall workflow successfully executed");
+ logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " uninstall workflow successfully executed");
// Delete the deployment
return delay(DELAY_DELETE_DEPLOYMENT).then(function() {
return cfy.deleteDeployment(req, deploymentId);
});
})
.then (function(result){
- logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " deployment deleted");
+ logger.info(req.dcaeReqId, "deploymentId: " + deploymentId + " deployment deleted");
// Delete the blueprint
return delay(DELAY_DELETE_BLUEPRINT).then(function() {
return cfy.deleteBlueprint(req, deploymentId);
@@ -264,7 +264,7 @@ exports.getExecutionStatus = function (req, exid) {
if (res.json.error) {
result.error = res.json.error;
}
- logger.debug(req.dcaeReqId, "getExecutionStatus result: " + JSON.stringify(result));
+ logger.info(req.dcaeReqId, "getExecutionStatus result: " + JSON.stringify(result));
return result;
})
.catch(function(error) {
@@ -294,7 +294,7 @@ exports.deployBlueprint = function(req, id, blueprint, inputs) {
// Go through the Cloudify API call sequence to do an undeployment of a previously deployed blueprint
exports.undeployDeployment = function(req, id) {
- logger.debug(req.dcaeReqId, "deploymentId: " + id + " starting uninstall workflow");
+ logger.info(req.dcaeReqId, "deploymentId: " + id + " starting uninstall workflow");
// Run launch uninstall workflow
return launchUninstall(req, id)
diff --git a/lib/logging.js b/lib/logging.js
index 4636be3..8d94f87 100644
--- a/lib/logging.js
+++ b/lib/logging.js
@@ -259,6 +259,10 @@ exports.getLogger = function() {
metricsLogger.info(formatMetricsRecord(req, opInfo, extra));
},
+ info: function(reqId, msg) {
+ debugLogger.info(formatDebugRecord(reqId, msg));
+ },
+
debug: function(reqId, msg) {
debugLogger.debug(formatDebugRecord(reqId, msg));
}
diff --git a/lib/middleware.js b/lib/middleware.js
index 183cf77..ee39863 100644
--- a/lib/middleware.js
+++ b/lib/middleware.js
@@ -1,16 +1,16 @@
/*
-Copyright(c) 2017 AT&T Intellectual Property. All rights reserved.
+Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved.
-Licensed under the Apache License, Version 2.0 (the "License");
+Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing,
+Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
-CONDITIONS OF ANY KIND, either express or implied.
+CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
*/
@@ -32,12 +32,19 @@ exports.assignId = function(req, res, next) {
/* Error handler -- send error with JSON body */
exports.handleErrors = function(err, req, res, next) {
- var status = err.status || 500;
- var msg = err.message || err.body || 'unknown error'
- res.status(status).type('application/json').send({status: status, message: msg });
- log.audit(req, status, msg);
+ const response = {
+ status : err.status || 500,
+ message : err.message || err.body || 'unknown error'
+ };
+ if (err.stack) {
+ response.stack = err.stack.split("\n");
+ }
+
+ res.status(response.status).type('application/json').send(response);
+ log.audit(req, response.status, JSON.stringify(response));
- if (status >= 500) {
+ if (response.status >= 500) {
+ err.message = response.message + (err.stack && " " + response.stack.join(', '));
log.error(err, req);
}
};
@@ -55,7 +62,7 @@ exports.checkType = function(type){
var err = new Error ('Content-Type must be \'' + type +'\'');
err.status = 415;
next (err);
- }
+ }
};
};
@@ -70,7 +77,7 @@ exports.checkProps = function(props) {
}
else {
next();
- }
+ }
};
};
diff --git a/lib/policy.js b/lib/policy.js
index 7c47dd3..4a3248d 100644
--- a/lib/policy.js
+++ b/lib/policy.js
@@ -51,7 +51,7 @@ function policyUpdate(req, res, next) {
removed_policy_ids : {}
};
- logger.debug(req.dcaeReqId, "policyUpdate "
+ logger.info(req.dcaeReqId, "policyUpdate "
+ req.method + ' ' + req.protocol + '://' + req.get('host') + req.originalUrl
+ " catch_up: " + policy_update.catch_up
+ " latest_policies: " + policy_update.latest_policies
@@ -95,7 +95,7 @@ function policyUpdate(req, res, next) {
&& !node_instance.runtime_properties.policy_filters)) {
return;
}
- logger.debug(req.dcaeReqId, "checking policies on node_instance: " + JSON.stringify(node_instance));
+ logger.info(req.dcaeReqId, "checking policies on node_instance: " + JSON.stringify(node_instance));
const deployment = policy_update.policy_deployments[node_instance.deployment_id] || {
"deployment_id": node_instance.deployment_id,
@@ -122,7 +122,7 @@ function policyUpdate(req, res, next) {
have_policies = true;
deployment.removed_policy_ids[policy_id] = true;
policy_update.removed_policy_ids[policy_id] = true;
- logger.debug(req.dcaeReqId, "going to remove policy " + policy_id + " from node_instance: " + JSON.stringify(node_instance));
+ logger.info(req.dcaeReqId, "going to remove policy " + policy_id + " from node_instance: " + JSON.stringify(node_instance));
return;
}
@@ -135,13 +135,13 @@ function policyUpdate(req, res, next) {
have_policies = true;
deployment.updated_policies[policy_id] = latest_policy;
policy_update.updated_policy_ids[policy_id] = true;
- logger.debug(req.dcaeReqId, "going to update policy " + policy_id + " on node_instance: " + JSON.stringify(node_instance));
+ logger.info(req.dcaeReqId, "going to update policy " + policy_id + " on node_instance: " + JSON.stringify(node_instance));
});
const policy_filters = node_instance.runtime_properties.policy_filters || {};
const policy_filter_ids = Object.keys(policy_filters);
if (policy_filter_ids.length) {
- logger.debug(req.dcaeReqId, "matching latest policies to policy_filters[" + policy_filter_ids.length + "] on node_instance: " + JSON.stringify(node_instance));
+ logger.info(req.dcaeReqId, "matching latest policies to policy_filters[" + policy_filter_ids.length + "] on node_instance: " + JSON.stringify(node_instance));
try {
Object.keys(policy_update.latest_policies).forEach(policy_id => {
if (!deployment.is_deployment_busy && deployed_policies[policy_id]) {return;}
@@ -206,7 +206,7 @@ function policyUpdate(req, res, next) {
}
deployment.added_policies[policy_filter_id].policies[policy_id] = latest_policy;
policy_update.added_policy_ids[policy_id] = true;
- logger.debug(req.dcaeReqId, "going to add policy " + JSON.stringify(latest_policy)
+ logger.info(req.dcaeReqId, "going to add policy " + JSON.stringify(latest_policy)
+ " per policy_filter_id " + policy_filter_id
+ " on node_instance: " + JSON.stringify(node_instance));
return true;
@@ -225,11 +225,11 @@ function policyUpdate(req, res, next) {
}
});
- logger.debug(req.dcaeReqId, "collected policy_deployments to update " + JSON.stringify(policy_update.policy_deployments));
+ logger.info(req.dcaeReqId, "collected policy_deployments to update " + JSON.stringify(policy_update.policy_deployments));
};
const update_policies_on_deployments = function(result) {
- logger.debug(req.dcaeReqId, "finished loading policy_deployments" + JSON.stringify(result));
+ logger.info(req.dcaeReqId, "finished loading policy_deployments" + JSON.stringify(result));
if (result.status !== 200) {
const error_msg = "failed to retrieve component policies from cloudify " + result.message;
logger.error(createError(error_msg, result.status, "api", 502, 'cloudify-manager'), req);
@@ -248,7 +248,7 @@ function policyUpdate(req, res, next) {
+ "] and added policies[" + Object.keys(policy_update.added_policy_ids).length
+ "] and removed policies[" + Object.keys(policy_update.removed_policy_ids).length
+ "] to deployments[" + deployment_ids.length + "]";
- logger.debug(req.dcaeReqId, audit_msg + ": " + JSON.stringify(deployment_ids));
+ logger.info(req.dcaeReqId, audit_msg + ": " + JSON.stringify(deployment_ids));
logger.audit(req, result.status, audit_msg);
deployment_ids.forEach(deployment_id => {
const deployment = policy_update.policy_deployments[deployment_id];
@@ -257,7 +257,7 @@ function policyUpdate(req, res, next) {
});
deployment.removed_policy_ids = Object.keys(deployment.removed_policy_ids);
- logger.debug(req.dcaeReqId, "ready to execute-operation policy-update on deployment " + JSON.stringify(deployment));
+ logger.info(req.dcaeReqId, "ready to execute-operation policy-update on deployment " + JSON.stringify(deployment));
cloudify.executeOperation(req, deployment.deployment_id, POLICY_UPDATE_OPERATION,
{
'updated_policies': deployment.updated_policies,
@@ -276,7 +276,7 @@ function policyUpdate(req, res, next) {
* retrieve all component-policies from cloudify
*/
function getComponentPoliciesFromCloudify(req, res, next) {
- logger.debug(req.dcaeReqId, "getComponentPoliciesFromCloudify " + req.originalUrl);
+ logger.info(req.dcaeReqId, "getComponentPoliciesFromCloudify " + req.originalUrl);
const response = {"requestID": req.dcaeReqId};
response.started = new Date();
response.server_instance_uuid = process.mainModule.exports.config.server_instance_uuid;
@@ -324,7 +324,7 @@ function getComponentPoliciesFromCloudify(req, res, next) {
}
});
- logger.debug(req.dcaeReqId, "collected " + response.node_instance_ids.length
+ logger.info(req.dcaeReqId, "collected " + response.node_instance_ids.length
+ " node_instance_ids: " + JSON.stringify(response.node_instance_ids)
+ " component_policies: " + JSON.stringify(response.component_policies)
+ " component_policy_filters: " + JSON.stringify(response.component_policy_filters)
@@ -334,7 +334,7 @@ function getComponentPoliciesFromCloudify(req, res, next) {
response.ended = new Date();
response.status = result.status;
response.message = result.message;
- logger.debug(req.dcaeReqId, result.message);
+ logger.info(req.dcaeReqId, result.message);
if (result.status !== 200) {
logger.error(createError(result.message, result.status, "api", 502, 'cloudify-manager'), req);
}
@@ -351,7 +351,7 @@ app.set('etag', false);
app.use(require('./middleware').checkType('application/json'));
app.use(require('body-parser').json({strict: true}));
app.use(function(req, res, next) {
- logger.debug(req.dcaeReqId,
+ logger.info(req.dcaeReqId,
"new req: " + req.method + " " + req.originalUrl +
" from: " + req.ip + " body: " + JSON.stringify(req.body)
);
diff --git a/lib/promise_request.js b/lib/promise_request.js
index c34227d..200e516 100644
--- a/lib/promise_request.js
+++ b/lib/promise_request.js
@@ -70,7 +70,7 @@ exports.doRequest = function(mainReq, options, body, targetEntity) {
catch (e) {
opInfo.respCode = 500;
opInfo.complete = false;
- logger.metrics(mainReq, opInfo, e.message);
+ logger.metrics(mainReq, opInfo, (e.message || "") + " " + (e.stack || "").replace(/\n/g, " "));
reject(e);
}