diff options
Diffstat (limited to 'lib/cloudify.js')
-rw-r--r-- | lib/cloudify.js | 38 |
1 files changed, 20 insertions, 18 deletions
diff --git a/lib/cloudify.js b/lib/cloudify.js index c0504af..51be6e1 100644 --- a/lib/cloudify.js +++ b/lib/cloudify.js @@ -91,12 +91,12 @@ const getExecutionStatus = function(req, execution_id) { const getWorkflowResult = function(mainReq, execution_id) { /* Defense: Some callers do not supply mainReq */ mainReq = mainReq || {}; - logger.debug(mainReq.dcaeReqId, "Getting workflow result for execution id: " + execution_id); + logger.info(mainReq.dcaeReqId, "Getting workflow result for execution id: " + execution_id); // Function for testing if workflow is finished // Expects the result of getExecStatus var checkStatus = function(res) { - logger.debug(mainReq.dcaeReqId, "Checking result: " + JSON.stringify(res) + " ==> " + (res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0)); + logger.info(mainReq.dcaeReqId, "Checking result: " + JSON.stringify(res) + " ==> " + (res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0)); return res.json && res.json.status && FINISHED.indexOf(res.json.status) < 0; }; @@ -109,7 +109,7 @@ const getWorkflowResult = function(mainReq, execution_id) { /* Handle fulfilled promise from repeatWhile */ function(res) { - logger.debug(mainReq.dcaeReqId, 'workflow result: ' + JSON.stringify(res)); + logger.info(mainReq.dcaeReqId, 'workflow result: ' + JSON.stringify(res)); /* Successful completion */ if (res.json && res.json.status && res.json.status === 'terminated') { @@ -189,11 +189,11 @@ const startWorkflowExecution = function(mainReq, deployment_id, workflow_id, par const initiateWorkflowExecution = function(req, deployment_id, workflow_id, parameters) { return startWorkflowExecution(req, deployment_id, workflow_id, parameters) .then(function(result) { - logger.debug(req.dcaeReqId, "Result from POSTing workflow execution start: " + JSON.stringify(result)); + logger.info(req.dcaeReqId, "Result from POSTing workflow execution start: " + JSON.stringify(result)); if (result.json && result.json.id) { return {deploymentId: deployment_id, workflowType: workflow_id, executionId: result.json.id}; } - logger.debug(req.dcaeReqId,"Did not get expected JSON body from POST to start workflow"); + logger.info(req.dcaeReqId,"Did not get expected JSON body from POST to start workflow"); var err = new Error("POST to start workflow got success response but no body"); err.status = err.code = 502; throw err; @@ -202,15 +202,17 @@ const initiateWorkflowExecution = function(req, deployment_id, workflow_id, para // Uploads a blueprint via the Cloudify API exports.uploadBlueprint = function(req, bpid, blueprint) { + logger.info(req.dcaeReqId, "uploadBlueprint " + bpid); // Cloudify API wants a gzipped tar of a directory, not the blueprint text - var zip = new admzip(); + const zip = new admzip(); zip.addFile('work/', new Buffer(0)); - zip.addFile('work/blueprint.yaml', new Buffer(blueprint, 'utf8')); - var src = (zip.toBuffer()); + const blueprint_yaml = blueprint.replace(/\r\n/g, "\n").replace(/\r/g, "\n"); + zip.addFile('work/blueprint.yaml', new Buffer(blueprint_yaml, 'utf8')); + const zip_buffer = zip.toBuffer(); // Set up the HTTP PUT request - var reqOptions = { + const reqOptions = { method : "PUT", uri : cfyAPI + "/blueprints/" + bpid, headers : { @@ -221,7 +223,7 @@ exports.uploadBlueprint = function(req, bpid, blueprint) { addAuthToOptions(reqOptions); // Initiate PUT request and return the promise for a result - return doRequest(req, reqOptions, src, CLOUDIFY); + return doRequest(req, reqOptions, zip_buffer, CLOUDIFY); }; // Creates a deployment from a blueprint @@ -264,7 +266,7 @@ exports.executeWorkflow = function(req, deployment_id, workflow_id, parameters) // Wait for the result .then (function(result) { - logger.debug(req.dcaeReqId, "Result from initiating workflow: " + JSON.stringify(result)); + logger.info(req.dcaeReqId, "Result from initiating workflow: " + JSON.stringify(result)); return getWorkflowResult(req, result.executionId); }); }; @@ -355,10 +357,10 @@ exports.getNodeInstances = function (mainReq, on_next_node_instances, offset) { }; addAuthToOptions(reqOptions); - logger.debug(mainReq.dcaeReqId, "getNodeInstances: " + JSON.stringify(reqOptions)); + logger.info(mainReq.dcaeReqId, "getNodeInstances: " + JSON.stringify(reqOptions)); return doRequest(mainReq, reqOptions, null, CLOUDIFY) .then(function(cloudify_response) { - logger.debug(mainReq.dcaeReqId, "getNodeInstances response: " + JSON.stringify(cloudify_response)); + logger.info(mainReq.dcaeReqId, "getNodeInstances response: " + JSON.stringify(cloudify_response)); var response = {}; cloudify_response = cloudify_response && cloudify_response.json; if (!cloudify_response || !Array.isArray(cloudify_response.items)) { @@ -371,7 +373,7 @@ exports.getNodeInstances = function (mainReq, on_next_node_instances, offset) { response.message = 'got no more node_instances'; return response; } - logger.debug(mainReq.dcaeReqId, 'getNodeInstances got node_instances ' + cloudify_response.items.length); + logger.info(mainReq.dcaeReqId, 'getNodeInstances got node_instances ' + cloudify_response.items.length); if (typeof on_next_node_instances === 'function') { on_next_node_instances(cloudify_response.items); } @@ -403,7 +405,7 @@ const runQueuedExecution = function(mainReq, deployment_id, workflow_id, paramet + " with params(" + JSON.stringify(parameters || {}) + ")"; startWorkflowExecution(mainReq, deployment_id, workflow_id, parameters) .then(function(result) { - logger.debug(mainReq.dcaeReqId, "result of start the execution for" + exe_deployment_str + ": " + JSON.stringify(result)); + logger.info(mainReq.dcaeReqId, "result of start the execution for" + exe_deployment_str + ": " + JSON.stringify(result)); execution_id = result.json && result.json.id; if (!execution_id) { throw createError("failed to start execution - no execution_id for" + exe_deployment_str, @@ -413,10 +415,10 @@ const runQueuedExecution = function(mainReq, deployment_id, workflow_id, paramet return getWorkflowResult(mainReq, execution_id); }) .then(function(result) { - logger.debug(mainReq.dcaeReqId, 'successfully finished execution: ' + execution_id + " for" + exe_deployment_str); + logger.info(mainReq.dcaeReqId, 'successfully finished execution: ' + execution_id + " for" + exe_deployment_str); var nextExecution = exeQueue.nextExecution(deployment_id); if (nextExecution) { - logger.debug(nextExecution.mainReq.dcaeReqId, "next execution for deployment_id " + deployment_id + logger.info(nextExecution.mainReq.dcaeReqId, "next execution for deployment_id " + deployment_id + " to " + nextExecution.workflow_id + " with params(" + JSON.stringify(nextExecution.parameters || {}) + ")"); runQueuedExecution(nextExecution.mainReq, deployment_id, nextExecution.workflow_id, nextExecution.parameters); @@ -462,7 +464,7 @@ exports.executeOperation = function (mainReq, deployment_id, operation, operatio if (exeQueue.isDeploymentBusy(deployment_id)) { exeQueue.queueUpExecution(mainReq, deployment_id, workflow_id, parameters); - logger.debug(mainReq.dcaeReqId, "deployment busy - queue up execution for deployment_id " + deployment_id + logger.info(mainReq.dcaeReqId, "deployment busy - queue up execution for deployment_id " + deployment_id + " to " + workflow_id + " with params(" + JSON.stringify(parameters || {}) + ")"); return; } |