diff options
author | Alex Shatov <alexs@att.com> | 2018-01-24 13:35:28 -0500 |
---|---|---|
committer | Alex Shatov <alexs@att.com> | 2018-01-24 13:35:28 -0500 |
commit | 4e30c82b172cf422ab5179e3c566ef01ca14cb3a (patch) | |
tree | 0efa3043daeb8001516d671df5d8e1b81ca4b798 | |
parent | 70253f7088be04125d9fac8f9bddfaa63778608e (diff) |
unit tests coverage 68% and more info in audit
* refactored the unit test - simpler dh server initialization
* new unit tests for the dcae-deployments
* new unit tests for healthcheck - info
* new server_instance_uuid - unique per deployment-handler instance
for logging and info
* dragging req object over the stack to show req data in logging
and audit and metrics
* new feature variable collection of policies per component in DCAE
Change-Id: I8388d7e5e11e3a6c871cf3d507bd8a07b09add29
Issue-ID: DCAEGEN2-249
Signed-off-by: Alex Shatov <alexs@att.com>
-rw-r--r-- | Dockerfile_UT | 2 | ||||
-rw-r--r-- | deployment-handler.js | 3 | ||||
-rw-r--r-- | lib/cloudify.js | 58 | ||||
-rw-r--r-- | lib/config.js | 5 | ||||
-rw-r--r-- | lib/consul.js | 6 | ||||
-rw-r--r-- | lib/dcae-deployments.js | 107 | ||||
-rw-r--r-- | lib/deploy.js | 144 | ||||
-rw-r--r-- | lib/info.js | 40 | ||||
-rw-r--r-- | lib/inventory.js | 24 | ||||
-rw-r--r-- | lib/logging.js | 4 | ||||
-rw-r--r-- | lib/policy.js | 9 | ||||
-rw-r--r-- | lib/promise_request.js | 14 | ||||
-rw-r--r-- | tests/mock_deployment_handler.js | 102 | ||||
-rw-r--r-- | tests/mock_utils.js | 25 | ||||
-rw-r--r-- | tests/test_dcae-deployments.js | 490 | ||||
-rw-r--r-- | tests/test_info.js | 57 | ||||
-rw-r--r-- | tests/test_policy.js | 135 | ||||
-rw-r--r-- | tests/test_zzz_run.js | 21 |
18 files changed, 941 insertions, 305 deletions
diff --git a/Dockerfile_UT b/Dockerfile_UT index 265e1bd..df4ddac 100644 --- a/Dockerfile_UT +++ b/Dockerfile_UT @@ -1,5 +1,5 @@ FROM node:6.10.3 -MAINTAINER maintainer + ENV INSROOT /opt/app ENV APPUSER dh ENV APPDIR ${INSROOT}/${APPUSER} diff --git a/deployment-handler.js b/deployment-handler.js index 15b2807..e049b82 100644 --- a/deployment-handler.js +++ b/deployment-handler.js @@ -75,7 +75,7 @@ const start = function(config) { "policy": POLICY_PATH, "swagger-ui": SWAGGER_UI_PATH }; - exports.config = config; + process.mainModule.exports.config = config; log.debug(null, "Configuration: " + JSON.stringify(config)); @@ -154,4 +154,3 @@ conf.configure() }); module.exports.app = app; -module.exports.set_app = set_app; diff --git a/lib/cloudify.js b/lib/cloudify.js index 23e779a..b03ecac 100644 --- a/lib/cloudify.js +++ b/lib/cloudify.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -75,19 +75,17 @@ var delay = function(dtime) { }; // Get current status of a workflow execution -const getExecutionStatus = function(execution_id, mainReq) { - /* Defense: Some callers do not supply mainReq */ - mainReq = mainReq || {}; +const getExecutionStatus = function(req, execution_id) { var reqOptions = { method : "GET", uri : cfyAPI + "/executions/" + execution_id }; addAuthToOptions(reqOptions); - return doRequest(reqOptions, null, CLOUDIFY, mainReq); + return doRequest(req, reqOptions, null, CLOUDIFY); }; // Poll for the result of a workflow execution until it's done -var getWorkflowResult = function(execution_id, mainReq) { +const getWorkflowResult = function(mainReq, execution_id) { /* Defense: Some callers do not supply mainReq */ mainReq = mainReq || {}; logger.debug(mainReq.dcaeReqId, "Getting workflow result for execution id: " + execution_id); @@ -100,7 +98,7 @@ var getWorkflowResult = function(execution_id, mainReq) { }; // Create execution status checker function - var getExecStatus = function() {return getExecutionStatus(execution_id, mainReq);}; + var getExecStatus = function() {return getExecutionStatus(mainReq, execution_id);}; return repeat.repeatWhile(getExecStatus, checkStatus, MAX_TRIES, RETRY_INTERVAL) .then( @@ -181,18 +179,18 @@ const startWorkflowExecution = function(mainReq, deployment_id, workflow_id, par if (parameters) {body.parameters = parameters;} // Make the POST request - return doRequest(reqOptions, JSON.stringify(body), CLOUDIFY, mainReq); + return doRequest(mainReq, reqOptions, JSON.stringify(body), CLOUDIFY); }; //Initiate a workflow execution against a deployment -const initiateWorkflowExecution = function(deployment_id, workflow_id, parameters) { - return startWorkflowExecution(null, deployment_id, workflow_id, parameters) +const initiateWorkflowExecution = function(req, deployment_id, workflow_id, parameters) { + return startWorkflowExecution(req, deployment_id, workflow_id, parameters) .then(function(result) { - logger.debug(null, "Result from POSTing workflow execution start: " + JSON.stringify(result)); + logger.debug(req.dcaeReqId, "Result from POSTing workflow execution start: " + JSON.stringify(result)); if (result.json && result.json.id) { return {deploymentId: deployment_id, workflowType: workflow_id, executionId: result.json.id}; } - logger.debug(null,"Did not get expected JSON body from POST to start workflow"); + logger.debug(req.dcaeReqId,"Did not get expected JSON body from POST to start workflow"); var err = new Error("POST to start workflow got success response but no body"); err.status = err.code = 502; throw err; @@ -200,7 +198,7 @@ const initiateWorkflowExecution = function(deployment_id, workflow_id, parameter }; // Uploads a blueprint via the Cloudify API -exports.uploadBlueprint = function(bpid, blueprint) { +exports.uploadBlueprint = function(req, bpid, blueprint) { // Cloudify API wants a gzipped tar of a directory, not the blueprint text var zip = new admzip(); @@ -220,11 +218,11 @@ exports.uploadBlueprint = function(bpid, blueprint) { addAuthToOptions(reqOptions); // Initiate PUT request and return the promise for a result - return doRequest(reqOptions, src, CLOUDIFY); + return doRequest(req, reqOptions, src, CLOUDIFY); }; // Creates a deployment from a blueprint -exports.createDeployment = function(dpid, bpid, inputs) { +exports.createDeployment = function(req, dpid, bpid, inputs) { // Set up the HTTP PUT request var reqOptions = { @@ -245,7 +243,7 @@ exports.createDeployment = function(dpid, bpid, inputs) { } // Make the PUT request to create the deployment - return doRequest(reqOptions, JSON.stringify(body), CLOUDIFY); + return doRequest(req, reqOptions, JSON.stringify(body), CLOUDIFY); }; // Initiate a workflow execution against a deployment @@ -258,19 +256,19 @@ exports.getWorkflowExecutionStatus = getExecutionStatus; exports.getWorkflowResult = getWorkflowResult; // Executes a workflow against a deployment and returns a promise for final result -exports.executeWorkflow = function(deployment_id, workflow_id, parameters) { - return initiateWorkflowExecution(deployment_id, workflow_id, parameters) +exports.executeWorkflow = function(req, deployment_id, workflow_id, parameters) { + return initiateWorkflowExecution(req, deployment_id, workflow_id, parameters) // Wait for the result .then (function(result) { - logger.debug(null, "Result from initiating workflow: " + JSON.stringify(result)); - return getWorkflowResult(result.executionId); + logger.debug(req.dcaeReqId, "Result from initiating workflow: " + JSON.stringify(result)); + return getWorkflowResult(req, result.executionId); }); }; // Retrieves outputs for a deployment -exports.getOutputs = function(dpid) { +exports.getOutputs = function(req, dpid) { var reqOptions = { method : "GET", uri : cfyAPI + "/deployments/" + dpid + "/outputs", @@ -280,11 +278,11 @@ exports.getOutputs = function(dpid) { }; addAuthToOptions(reqOptions); - return doRequest(reqOptions, null, CLOUDIFY); + return doRequest(req, reqOptions, null, CLOUDIFY); }; // Get the output descriptions for a deployment -exports.getOutputDescriptions = function(dpid) { +exports.getOutputDescriptions = function(req, dpid) { var reqOptions = { method : "GET", uri : cfyAPI + "/deployments/" + dpid + "?include=outputs", @@ -294,29 +292,29 @@ exports.getOutputDescriptions = function(dpid) { }; addAuthToOptions(reqOptions); - return doRequest(reqOptions, null, CLOUDIFY); + return doRequest(req, reqOptions, null, CLOUDIFY); }; // Deletes a deployment -exports.deleteDeployment = function(dpid) { +exports.deleteDeployment = function(req, dpid) { var reqOptions = { method : "DELETE", uri : cfyAPI + "/deployments/" + dpid }; addAuthToOptions(reqOptions); - return doRequest(reqOptions, null, CLOUDIFY); + return doRequest(req, reqOptions, null, CLOUDIFY); }; // Deletes a blueprint -exports.deleteBlueprint = function(bpid) { +exports.deleteBlueprint = function(req, bpid) { var reqOptions = { method : "DELETE", uri : cfyAPI + "/blueprints/" + bpid }; addAuthToOptions(reqOptions); - return doRequest(reqOptions, null, CLOUDIFY); + return doRequest(req, reqOptions, null, CLOUDIFY); }; // Allow client to set the Cloudify API root address @@ -349,7 +347,7 @@ exports.getNodeInstances = function (mainReq, on_next_node_instances, offset) { addAuthToOptions(reqOptions); logger.debug(mainReq.dcaeReqId, "getNodeInstances: " + JSON.stringify(reqOptions)); - return doRequest(reqOptions, null, CLOUDIFY, mainReq) + return doRequest(mainReq, reqOptions, null, CLOUDIFY) .then(function(cloudify_response) { logger.debug(mainReq.dcaeReqId, "getNodeInstances response: " + JSON.stringify(cloudify_response)); var response = {}; @@ -403,7 +401,7 @@ const runQueuedExecution = function(mainReq, deployment_id, workflow_id, paramet 553, "api", 553, CLOUDIFY); } exeQueue.setExecutionId(deployment_id, execution_id); - return getWorkflowResult(execution_id, mainReq); + return getWorkflowResult(mainReq, execution_id); }) .then(function(result) { logger.debug(mainReq.dcaeReqId, 'successfully finished execution: ' + execution_id + " for" + exe_deployment_str); diff --git a/lib/config.js b/lib/config.js index e44e9b5..b71199c 100644 --- a/lib/config.js +++ b/lib/config.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -148,7 +148,8 @@ const getTLSCredentials = function() { } exports.configure = function() { - var config = {}; + const config = {}; + config.server_instance_uuid = utils.generateId(); /* Get configuration from configuration store */ return getFileContents(PACKAGE_JSON_FILE) diff --git a/lib/consul.js b/lib/consul.js index 3a3257b..226291f 100644 --- a/lib/consul.js +++ b/lib/consul.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -29,7 +29,7 @@ module.exports = { * If there is no such key, resolve to null. */ getKey: function(key) { - return doRequest({method: 'GET', uri: CONSUL_URL + KEY + key + '?raw'}, null, CONSUL) + return doRequest(null, {method: 'GET', uri: CONSUL_URL + KEY + key + '?raw'}, null, CONSUL) .then(function(res) { return res.json || res.body; }) @@ -51,7 +51,7 @@ module.exports = { * If the service is not found, returns a zero-length array. */ getService: function(serviceId) { - return doRequest({method: 'GET', uri: CONSUL_URL + SERVICE + serviceId}, null, CONSUL) + return doRequest(null, {method: 'GET', uri: CONSUL_URL + SERVICE + serviceId}, null, CONSUL) .then(function(res){ return res.json.map(function(r) { /* Address for external service is in r.Address with r.ServiceAddress empty */ diff --git a/lib/dcae-deployments.js b/lib/dcae-deployments.js index 38dc3c4..9c1d918 100644 --- a/lib/dcae-deployments.js +++ b/lib/dcae-deployments.js @@ -1,16 +1,16 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. +CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @@ -36,11 +36,18 @@ const inventory = inv({url: config.inventory.url}); /* Set up middleware stack for initial processing of request */ app.use(middleware.checkType('application/json')); // Validate type app.use(bodyParser.json({strict: true})); // Parse body as JSON +app.use(function(req, res, next) { + log.debug(req.dcaeReqId, + "new req: " + req.method + " " + req.originalUrl + + " from: " + req.ip + " body: " + JSON.stringify(req.body) + ); + next(); +}); /* Return a promise for a blueprint for the given service type ID */ -const getBlueprint = function(serviceTypeId) { - return inventory.getBlueprintByType(serviceTypeId) +const getBlueprint = function(req, serviceTypeId) { + return inventory.getBlueprintByType(req, serviceTypeId) .then(function (blueprintInfo) { if (!blueprintInfo.blueprint) { var e = new Error("No service type with ID " + serviceTypeId); @@ -48,7 +55,7 @@ const getBlueprint = function(serviceTypeId) { throw e; } return blueprintInfo; - }) + }) }; /* Generate self and status links object for responses */ @@ -57,7 +64,7 @@ const createLinks = function(req, deploymentId, executionId) { return { self: baseURL, status: baseURL + '/operation/' + executionId - }; + }; }; /* Generate a success response body for PUT and DELETE operations */ @@ -71,13 +78,11 @@ const createResponse = function(req, result) { /* Look up running (or in process of deploying) instances of the given service type */ app.get('/', function (req, res, next) { var services = [] - - - var searchTerm = {}; + var searchTerm; req.query['serviceTypeId'] && (searchTerm = {typeId: req.query['serviceTypeId']}); - - inventory.getServicesByType(searchTerm) + + inventory.getServicesByType(req, searchTerm) .then(function (result) { var deployments = result.map(function(service){ return { @@ -92,70 +97,68 @@ app.get('/', function (req, res, next) { /* Accept an incoming deployment request */ app.put('/:deploymentId', function(req, res, next) { - - log.debug(req.dcaeReqId, "body: " + JSON.stringify(req.body)); - + /* Make sure there's a serviceTypeId in the body */ if (!req.body['serviceTypeId']) { var e = new Error ('Missing required parameter serviceTypeId'); e.status = 400; throw e; } - + /* Make sure the deploymentId doesn't already exist */ - inventory.verifyUniqueDeploymentId(req.params['deploymentId']) + inventory.verifyUniqueDeploymentId(req, req.params['deploymentId']) /* Get the blueprint for this service type */ .then(function(res) { - return getBlueprint(req.body['serviceTypeId']); + return getBlueprint(req, req.body['serviceTypeId']); }) - - /* Add this new service instance to inventory - * Easier to remove from inventory if deployment fails than vice versa + + /* Add this new service instance to inventory + * Easier to remove from inventory if deployment fails than vice versa * Also lets client check for deployed/deploying instances if client wants to limit number of instances */ .then(function (blueprintInfo) { req.dcaeBlueprint = blueprintInfo.blueprint; - return inventory.addService(req.params['deploymentId'], blueprintInfo.typeId, "dummyVnfId", "dummyVnfType", "dummyLocation"); + return inventory.addService(req, req.params['deploymentId'], blueprintInfo.typeId, "dummyVnfId", "dummyVnfType", "dummyLocation"); }) - + /* Upload blueprint, create deployment and start install workflow (but don't wait for completion */ .then (function() { req.dcaeAddedToInventory = true; - return deploy.launchBlueprint(req.params['deploymentId'], req.dcaeBlueprint, req.body['inputs']); + return deploy.launchBlueprint(req, req.params['deploymentId'], req.dcaeBlueprint, req.body['inputs']); }) - + /* Send the HTTP response indicating workflow has started */ .then(function(result) { res.status(202).json(createResponse(req, result)); log.audit(req, 202, "Execution ID: " + result.executionId); return result; }) - + /* Finish deployment--wait for the install workflow to complete, retrieve and annotate outputs */ .then(function(result) { - return deploy.finishInstallation(result.deploymentId, result.executionId); + return deploy.finishInstallation(req, result.deploymentId, result.executionId); }) - + /* Log completion in audit log */ .then (function(result) { log.audit(req, 200, "Deployed id: " + req.params['deploymentId']); }) - + /* All errors show up here */ - .catch(function(error) { - + .catch(function(error) { + /* If we haven't already sent a response, let the error handler send response and log the error */ if (!res.headersSent) { - + /* If we made an inventory entry, remove it */ if (req.dcaeAddedToInventory) { - inventory.deleteService(req.params['deploymentId']) + inventory.deleteService(req, req.params['deploymentId']) .catch(function(error) { log.error(error, req); }); } - + next(error); } else { @@ -164,46 +167,46 @@ app.put('/:deploymentId', function(req, res, next) { error.message = "Error deploying deploymentId " + req.params['deploymentId'] + ": " + error.message log.error(error, req); log.audit(req, 500, error.message); - } + } }); }); /* Delete a running service instance */ app.delete('/:deploymentId', function(req, res, next) { - + /* Launch the uninstall workflow */ - deploy.launchUninstall(req.params['deploymentId']) - + deploy.launchUninstall(req, req.params['deploymentId']) + /* Delete the service from inventory */ .then(function(result) { - return inventory.deleteService(req.params['deploymentId']) + return inventory.deleteService(req, req.params['deploymentId']) .then (function() { return result; }); }) - + /* Send the HTTP response indicating workflow has started */ .then(function(result) { res.status(202).send(createResponse(req, result)); log.audit(req, 202, "ExecutionId: " + result.executionId); return result; }) - + /* Finish the delete processing--wait for the uninstall to complete, delete deployment, delete blueprint */ .then(function(result) { - return deploy.finishUninstall(result.deploymentId, result.executionId); + return deploy.finishUninstall(req, result.deploymentId, result.executionId); }) - + /* Log completion in audit log */ .then(function(result) { - log.audit(req, 200, "Undeployed id: " + req.params['deploymentId']); + log.audit(req, 200, "Undeployed id: " + req.params['deploymentId']); }) - + /* All errors show up here */ .catch(function(error) { /* If we haven't already sent a response, give it to the error handler to send response */ - if (!res.headersSent) { + if (!res.headersSent) { next(error); } else { @@ -217,8 +220,8 @@ app.delete('/:deploymentId', function(req, res, next) { /* Get the status of a workflow execution */ app.get('/:deploymentId/operation/:executionId', function(req, res, next){ - deploy.getExecutionStatus(req.params['executionId']) - + deploy.getExecutionStatus(req, req.params['executionId']) + /* Send success response */ .then(function(result) { result.requestId = req.dcaeReqId; @@ -226,9 +229,9 @@ app.get('/:deploymentId/operation/:executionId', function(req, res, next){ res.status(200).json(result); log.audit(req, 200, "Workflow type: " + result.operationType + " -- execution status: " + result.status); }) - + .catch(next); /* Let the error handler send the response and log the error */ - + }); -module.exports = app;
\ No newline at end of file +module.exports = app; diff --git a/lib/deploy.js b/lib/deploy.js index 7f83620..e651773 100644 --- a/lib/deploy.js +++ b/lib/deploy.js @@ -1,16 +1,16 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. +CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @@ -58,7 +58,7 @@ var parseContent = function(input) { // create a normalized representation of errors, whether they're a node.js Error or a Cloudify API error var normalizeError = function (err) { var e; - + if (err instanceof Error) { /* node.js system error */ e = createError("Error communicating with CM: " + err.message, 504, "system", 202, 'cloudify-manager'); @@ -71,7 +71,7 @@ var normalizeError = function (err) { var status = err.status || 502; var cfyCode = "UNKNOWN"; var cfyMessage; - + if (err.body) { var p = parseContent(err.body); if (p.json) { @@ -84,28 +84,28 @@ var normalizeError = function (err) { } message = "Status " + status + " from CM API -- error code: " + cfyCode + " -- message: " + cfyMessage; } - + /* Pass through 400-level status, recast 500-level */ var returnStatus = (err.status > 499) ? 502 : err.status; e = createError(message, returnStatus, "api", 502, 'cloudify-manager'); } - + return e; }; // Augment the raw outputs from a deployment with the descriptions from the blueprint -var annotateOutputs = function (id, rawOutputs) { +var annotateOutputs = function (req, id, rawOutputs) { return new Promise(function(resolve, reject) { - + var outItems = Object.keys(rawOutputs); - + if (outItems.length < 1) { // No output items, so obviously no descriptions, just return empty object resolve({}); } else { // Call Cloudify to get the descriptions - cfy.getOutputDescriptions(id) + cfy.getOutputDescriptions(req, id) .then(function(res) { // Assemble an outputs object with values from raw output and descriptions just obtained var p = parseContent(res.body); @@ -115,16 +115,16 @@ var annotateOutputs = function (id, rawOutputs) { outs[i] = {value: rawOutputs[i]}; if (p.content.outputs[i] && p.content.outputs[i].description) { outs[i].description = p.content.outputs[i].description; - } + } }); resolve(outs); } else { reject({code: "API_INVALID_RESPONSE", message: "Invalid response for output descriptions query"}); - } + } }); } - + }); }; @@ -137,41 +137,43 @@ var delay = function(dtime) { // Go through the Cloudify API call sequence to upload blueprint, create deployment, and launch install workflow // (but don't wait for the workflow to finish) -const launchBlueprint = function(id, blueprint, inputs) { - logger.debug(null, "deploymentId: " + id + " starting blueprint upload"); +const launchBlueprint = function(req, id, blueprint, inputs) { + logger.debug(req.dcaeReqId, "deploymentId: " + id + " starting blueprint upload"); // Upload blueprint - return cfy.uploadBlueprint(id, blueprint) - + return cfy.uploadBlueprint(req, id, blueprint) + // Create deployment .then (function(result) { - logger.debug(null, "deploymentId: " + id + " blueprint uploaded"); + logger.debug(req.dcaeReqId, "deploymentId: " + id + " blueprint uploaded"); // Create deployment - return cfy.createDeployment(id, id, inputs); + return cfy.createDeployment(req, id, id, inputs); }) - + // Launch the workflow, but don't wait for it to complete .then(function(result){ - logger.debug(null, "deploymentId: " + id + " deployment created"); + logger.debug(req.dcaeReqId, "deploymentId: " + id + " deployment created"); return delay(DELAY_INSTALL_WORKFLOW) - .then(function(){ - return cfy.initiateWorkflowExecution(id, 'install'); + .then(function(){ + return cfy.initiateWorkflowExecution(req, id, 'install'); }); }) .catch(function(error) { - logger.debug(null, "Error: " + error + " for launch blueprint for deploymentId " + id); + logger.debug(req.dcaeReqId, "Error: " + error + " for launch blueprint for deploymentId " + id); throw normalizeError(error); }); }; exports.launchBlueprint = launchBlueprint; // Finish installation launched with launchBlueprint -const finishInstallation = function(deploymentId, executionId) { - logger.debug(null, "finishInstallation: " + deploymentId + " -- executionId: " + executionId); - return cfy.getWorkflowResult(executionId) +const finishInstallation = function(req, deploymentId, executionId) { + logger.debug(req.dcaeReqId, "finishInstallation: " + deploymentId + " -- executionId: " + executionId); + return cfy.getWorkflowResult(req, executionId) .then (function(result){ - logger.debug(null, "deploymentId: " + deploymentId + " install workflow successfully executed"); + logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " install workflow successfully executed"); // Retrieve the outputs from the deployment, as specified in the blueprint - return delay(DELAY_RETRIEVE_OUTPUTS).then(function() { return cfy.getOutputs(deploymentId); }); + return delay(DELAY_RETRIEVE_OUTPUTS).then(function() { + return cfy.getOutputs(req, deploymentId); + }); }) .then(function(result) { // We have the raw outputs from the deployment but not annotated with the descriptions @@ -182,45 +184,49 @@ const finishInstallation = function(deploymentId, executionId) { if (p.content.outputs) { rawOutputs = p.content.outputs; } - } + } } - logger.debug(null, "output retrieval result for " + deploymentId + ": " + JSON.stringify(result)); - return annotateOutputs(deploymentId, rawOutputs); + logger.debug(req.dcaeReqId, "output retrieval result for " + deploymentId + ": " + JSON.stringify(result)); + return annotateOutputs(req, deploymentId, rawOutputs); }) .catch(function(err) { - logger.debug(null, "Error finishing install workflow: " + err + " -- " + JSON.stringify(err)); + logger.debug(req.dcaeReqId, "Error finishing install workflow: " + err + " -- " + JSON.stringify(err)); throw normalizeError(err); }); }; exports.finishInstallation = finishInstallation; // Initiate uninstall workflow against a deployment, but don't wait for workflow to finish -const launchUninstall = function(deploymentId) { - logger.debug(null, "deploymentId: " + deploymentId + " starting uninstall workflow"); +const launchUninstall = function(req, deploymentId) { + logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " starting uninstall workflow"); // Run uninstall workflow - return cfy.initiateWorkflowExecution(deploymentId, 'uninstall') + return cfy.initiateWorkflowExecution(req, deploymentId, 'uninstall') .then(function(result) { return result; }) .catch(function(err) { - logger.debug(null, "Error initiating uninstall workflow: " + err + " -- " + JSON.stringify(err)); + logger.debug(req.dcaeReqId, "Error initiating uninstall workflow: " + err + " -- " + JSON.stringify(err)); throw normalizeError(err); - }); + }); }; exports.launchUninstall = launchUninstall; -const finishUninstall = function(deploymentId, executionId) { - logger.debug(null, "finishUninstall: " + deploymentId + " -- executionId: " + executionId); - return cfy.getWorkflowResult(executionId) +const finishUninstall = function(req, deploymentId, executionId) { + logger.debug(req.dcaeReqId, "finishUninstall: " + deploymentId + " -- executionId: " + executionId); + return cfy.getWorkflowResult(req, executionId) .then (function(result){ - logger.debug(null, "deploymentId: " + deploymentId + " uninstall workflow successfully executed"); + logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " uninstall workflow successfully executed"); // Delete the deployment - return delay(DELAY_DELETE_DEPLOYMENT).then(function() {return cfy.deleteDeployment(deploymentId);}); + return delay(DELAY_DELETE_DEPLOYMENT).then(function() { + return cfy.deleteDeployment(req, deploymentId); + }); }) .then (function(result){ - logger.debug(null, "deploymentId: " + deploymentId + " deployment deleted"); + logger.debug(req.dcaeReqId, "deploymentId: " + deploymentId + " deployment deleted"); // Delete the blueprint - return delay(DELAY_DELETE_BLUEPRINT).then(function() {return cfy.deleteBlueprint(deploymentId);}); + return delay(DELAY_DELETE_BLUEPRINT).then(function() { + return cfy.deleteBlueprint(req, deploymentId); + }); }) .then (function(result){ return result; @@ -228,19 +234,19 @@ const finishUninstall = function(deploymentId, executionId) { .catch (function(err){ throw normalizeError(err); }); - + }; exports.finishUninstall = finishUninstall; // Get the status of a workflow execution -exports.getExecutionStatus = function (exid) { - return cfy.getWorkflowExecutionStatus(exid) +exports.getExecutionStatus = function (req, exid) { + return cfy.getWorkflowExecutionStatus(req, exid) .then(function(res){ - + var result = { operationType: res.json.workflow_id }; - + // Map execution status if (res.json.status === "terminated") { result.status = "succeeded"; @@ -254,11 +260,11 @@ exports.getExecutionStatus = function (exid) { else { result.status = "processing"; } - + if (res.json.error) { result.error = res.json.error; } - logger.debug(null, "getExecutionStatus result: " + JSON.stringify(result)); + logger.debug(req.dcaeReqId, "getExecutionStatus result: " + JSON.stringify(result)); return result; }) .catch(function(error) { @@ -267,37 +273,37 @@ exports.getExecutionStatus = function (exid) { }; // Go through the Cloudify API call sequence to do a deployment -exports.deployBlueprint = function(id, blueprint, inputs) { +exports.deployBlueprint = function(req, id, blueprint, inputs) { + + // Upload blueprint, create deployment, and initiate install workflow + return launchBlueprint(req, id, blueprint, inputs) - // Upload blueprint, create deployment, and initiate install workflow - return launchBlueprint(id, blueprint, inputs) - // Wait for the workflow to complete .then( - + // launchBlueprint promise fulfilled -- finish installation function(result){ - return finishInstallation(result.deploymentId, result.executionId); // Will throw normalized error if it fails + return finishInstallation(req, result.deploymentId, result.executionId); // Will throw normalized error if it fails }, - + // launchBlueprint promise rejected -- report error function(err) { - throw normalizeError(err); + throw normalizeError(err); }); }; // Go through the Cloudify API call sequence to do an undeployment of a previously deployed blueprint -exports.undeployDeployment = function(id) { - logger.debug(null, "deploymentId: " + id + " starting uninstall workflow"); - +exports.undeployDeployment = function(req, id) { + logger.debug(req.dcaeReqId, "deploymentId: " + id + " starting uninstall workflow"); + // Run launch uninstall workflow - return launchUninstall(id) - + return launchUninstall(req, id) + // launchUninstall promise fulfilled -- finish uninstall .then (function(result){ - return finishUninstall(result.deploymentId, result.executionId); // Will throw normalized error if it fails + return finishUninstall(req, result.deploymentId, result.executionId); // Will throw normalized error if it fails }, - + // launchUninstall promise rejected -- report error function(err){ throw normalizeError(err); diff --git a/lib/info.js b/lib/info.js index f6b37a8..1c15349 100644 --- a/lib/info.js +++ b/lib/info.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -19,27 +19,27 @@ See the License for the specific language governing permissions and limitations "use strict"; const router = require('express').Router(); - -/* Pick up config exported by main */ -const config = process.mainModule.exports.config; +const logger = require('./logging').getLogger(); /* Accept an incoming event */ router.get('/', function(req, res) { - res.json( - { - "server" : { - "name": config.name, - "description": config.description, - "version": config.version, - "branch": config.branch, - "commit": config.commit, - "commit_datetime": config.commit_datetime - }, - "apiVersion": config.apiVersion, - "links": config.apiLinks - } - ); - require('./logging').getLogger().audit(req, 200); + /* Pick up config exported by main */ + const config = process.mainModule.exports.config; + const info = { + "server" : { + "name": config.name, + "description": config.description, + "version": config.version, + "branch": config.branch, + "commit": config.commit, + "commit_datetime": config.commit_datetime, + "server_instance_uuid": config.server_instance_uuid + }, + "apiVersion": config.apiVersion, + "links": config.apiLinks + }; + res.json(info); + logger.audit(req, 200, JSON.stringify(info)); }); -module.exports = router;
\ No newline at end of file +module.exports = router; diff --git a/lib/inventory.js b/lib/inventory.js index c2e13c9..ecc790a 100644 --- a/lib/inventory.js +++ b/lib/inventory.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -55,7 +55,7 @@ module.exports = function(options) { return { /* Add a DCAE service to the inventory. Done after a deployment.*/ - addService: function(deploymentId, serviceType, vnfId, vnfType, vnfLocation, outputs) { + addService: function(req, deploymentId, serviceType, vnfId, vnfType, vnfLocation, outputs) { /* Create the service description */ var serviceDescription = @@ -83,23 +83,23 @@ module.exports = function(options) { json: serviceDescription }; - return doRequest(reqOptions, null, INVENTORY); + return doRequest(req, reqOptions, null, INVENTORY); }, /* Remove a DCAE service from the inventory. Done after an undeployment. */ - deleteService: function(serviceId) { - return doRequest({method: "DELETE", uri: url + INV_SERVICES + "/" + serviceId}, null, INVENTORY); + deleteService: function(req, serviceId) { + return doRequest(req, {method: "DELETE", uri: url + INV_SERVICES + "/" + serviceId}, null, INVENTORY); }, /* Find running/deploying instances of services (with a given type name, if specified) */ - getServicesByType: function(query) { + getServicesByType: function(req, query) { var options = { method: 'GET', uri: url + INV_SERVICES, - qs: query || {} + qs: query }; - return doRequest(options, null, INVENTORY) + return doRequest(req, options, null, INVENTORY) .then (function (result) { var services = []; var content = JSON.parse(result.body); @@ -113,8 +113,8 @@ module.exports = function(options) { }, /* Find a blueprint given the service type ID -- return blueprint and type ID */ - getBlueprintByType: function(serviceTypeId) { - return doRequest({ + getBlueprintByType: function(req, serviceTypeId) { + return doRequest(req, { method: "GET", uri: url + INV_SERV_TYPES + '/' + serviceTypeId }, null, INVENTORY) @@ -138,8 +138,8 @@ module.exports = function(options) { * deployment ID as service name. If it doesn't exist, the function * resolves its promise. If it *does* exist, then it throws an error. */ - verifyUniqueDeploymentId: function(deploymentId) { - return doRequest({ + verifyUniqueDeploymentId: function(req, deploymentId) { + return doRequest(req, { method: "GET", uri: url + INV_SERVICES + "/" + deploymentId }, null, INVENTORY) diff --git a/lib/logging.js b/lib/logging.js index a21f37e..cfd987d 100644 --- a/lib/logging.js +++ b/lib/logging.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -130,6 +130,7 @@ const DEBUG_MARKER = '^'; const formatAuditRecord = function(req, status, extra) { var rec = new Array(AUDIT_NFIELDS); const end = new Date(); + rec[AUDIT_INSTUUID] = (process.mainModule.exports.config || {}).server_instance_uuid || ""; rec[AUDIT_END] = end.toISOString(); rec[AUDIT_BEGIN] = req.startTime.toISOString(); rec[AUDIT_REQID] = req.dcaeReqId; @@ -161,6 +162,7 @@ const formatAuditRecord = function(req, status, extra) { const formatMetricsRecord = function(req, opInfo, extra) { var rec = new Array(METRICS_NFIELDS); const end = new Date(); + rec[METRICS_INSTUUID] = (process.mainModule.exports.config || {}).server_instance_uuid || ""; rec[METRICS_END] = end.toISOString(); rec[METRICS_BEGIN] = opInfo.startTime.toISOString(); diff --git a/lib/policy.js b/lib/policy.js index 482650a..89e5b6a 100644 --- a/lib/policy.js +++ b/lib/policy.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -324,6 +324,13 @@ app.set('x-powered-by', false); app.set('etag', false); app.use(require('./middleware').checkType('application/json')); app.use(require('body-parser').json({strict: true})); +app.use(function(req, res, next) { + logger.debug(req.dcaeReqId, + "new req: " + req.method + " " + req.originalUrl + + " from: " + req.ip + " body: " + JSON.stringify(req.body) + ); + next(); +}); app.post('/', policyUpdate); app.get('/components', getComponentPoliciesFromCloudify); diff --git a/lib/promise_request.js b/lib/promise_request.js index 0572ac4..975f12d 100644 --- a/lib/promise_request.js +++ b/lib/promise_request.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -31,11 +31,11 @@ const url = require('url'); const querystring = require('querystring'); const logger = require('./logging').getLogger(); -exports.doRequest = function(options, body, targetEntity, mainReq) { - +exports.doRequest = function(mainReq, options, body, targetEntity) { + /* Defense: for now, some callers don't provide mainReq */ mainReq = mainReq || {}; - + var opInfo = {"startTime":new Date(), "targetEntity": targetEntity}; return new Promise(function(resolve, reject) { @@ -56,10 +56,12 @@ exports.doRequest = function(options, body, targetEntity, mainReq) { options.hostname = parsed.hostname; options.port = parsed.port; options.path = parsed.path; + opInfo.targetService = options.method + " " + options.uri; if (options.qs) { - options.path += ('?' + querystring.stringify(options.qs)); + const qry = ('?' + querystring.stringify(options.qs)); + options.path += qry; + opInfo.targetService += qry; } - opInfo.targetService = options.method + " " + options.uri; } try { diff --git a/tests/mock_deployment_handler.js b/tests/mock_deployment_handler.js new file mode 100644 index 0000000..ed0468f --- /dev/null +++ b/tests/mock_deployment_handler.js @@ -0,0 +1,102 @@ +/* +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +/** + * mock-deployment_handler - base server for all other tests + */ + +"use strict"; + +const nock = require('nock'); +const utils = require('./mock_utils'); + +const MAIN_PATH = './../'; + +const CONSUL_URL = 'http://consul:8500'; +const MOCK_CLOUDIFY_MANAGER = "mock_cloudify_manager"; +const CLOUDIFY_URL = "http://" + MOCK_CLOUDIFY_MANAGER + ":80"; + +const MOCK_INVENTORY = "mock_inventory"; +const INVENTORY_URL = "https://" + MOCK_INVENTORY + ":8080"; + +nock(CONSUL_URL).persist().get('/v1/kv/deployment_handler?raw') + .reply(200, {"logLevel": "DEBUG", "cloudify": {"protocol": "http"}}); + +nock(CONSUL_URL).persist().get('/v1/catalog/service/cloudify_manager') + .reply(200, [{ + "ID":"deadbeef-dead-beef-dead-beefdeadbeef", + "Node":"devorcl00", + "Address": MOCK_CLOUDIFY_MANAGER, + "Datacenter":"rework-central", + "TaggedAddresses":{"lan": MOCK_CLOUDIFY_MANAGER,"wan": MOCK_CLOUDIFY_MANAGER}, + "NodeMeta":{}, + "ServiceID":"cloudify_manager", + "ServiceName":"cloudify_manager", + "ServiceTags":["http://" + MOCK_CLOUDIFY_MANAGER + "/api/v2.1"], + "ServiceAddress": MOCK_CLOUDIFY_MANAGER, + "ServicePort":80, + "ServiceEnableTagOverride":false, + "CreateIndex":16, + "ModifyIndex":16 + }]); + +nock(CONSUL_URL).persist().get('/v1/catalog/service/inventory') + .reply(200, [{ + "ID": "", + "Node": "inventory_mock_node", + "Address": MOCK_INVENTORY, + "Datacenter": "rework-central", + "TaggedAddresses": null, + "NodeMeta": null, + "ServiceID": "inventory", + "ServiceName": "inventory", + "ServiceTags": [], + "ServiceAddress": "", + "ServicePort": 8080, + "ServiceEnableTagOverride": false, + "CreateIndex": 8068, + "ModifyIndex": 8068 + }]); + +const tests = []; + +const run_dh = function() { + describe('run deployment-handler', () => { + it('starting deployment-handler server', function() { + console.log("starting deployment-handler server"); + const dh_server = require(MAIN_PATH + 'deployment-handler'); + + return utils.sleep(5000).then(function() { + console.log("starting tests: count =", tests.length); + if (Array.isArray(tests)) { + tests.forEach(test => { + test(dh_server); + }); + } + }) + .catch(function(e) { + const error = "test of deployment-handler exiting due to test problem: " + e.message; + console.error(error); + throw e; + }); + }).timeout(10000); + }); +}; + +module.exports.INVENTORY_URL = INVENTORY_URL; +module.exports.CLOUDIFY_URL = CLOUDIFY_URL; +module.exports.add_tests = function(new_tests) {Array.prototype.push.apply(tests, new_tests);}; +module.exports.run_dh = run_dh;
\ No newline at end of file diff --git a/tests/mock_utils.js b/tests/mock_utils.js new file mode 100644 index 0000000..2d7d2e5 --- /dev/null +++ b/tests/mock_utils.js @@ -0,0 +1,25 @@ +/* +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +"use strict"; + +module.exports.sleep = function(time) { + console.log("sleep for " + time + " msecs..."); + return new Promise((resolve) => setTimeout(() => { + console.log("woke up after " + time + " msecs"); + resolve(); + }, time)); +}; diff --git a/tests/test_dcae-deployments.js b/tests/test_dcae-deployments.js new file mode 100644 index 0000000..2aca4c7 --- /dev/null +++ b/tests/test_dcae-deployments.js @@ -0,0 +1,490 @@ +/* +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +/** + * handling policy updates + */ + +"use strict"; + +const nock = require('nock') + , chai = require('chai') + , chaiHttp = require('chai-http') + , expect = chai.expect + , assert = chai.assert; + +chai.use(chaiHttp); + +const dh = require('./mock_deployment_handler'); +const utils = require('./mock_utils'); + +const INV_PATH_DCAE_SERVICES = "/dcae-services"; +const INV_PATH_DCAE_SERVICE_TYPES = "/dcae-service-types/"; +const INV_PARAM_TYPE_ID = "?typeId="; + +const I_DONT_KNOW = "i-dont-know"; +const DEPLOYMENT_ID_JFL = "dep-jfl-000"; +const DEPLOYMENT_ID_JFL_1 = "dep-jfl-001"; +const EXISTING_DEPLOYMENT_ID = "deployment-CL-2229"; +const INV_EXISTING_SERVICE_TYPE = "86615fc1-aed9-4aa2-9e4b-abdaccbe63de"; + +const Inventory = { + resp_empty: {"links":{"previousLink":null,"nextLink":null},"totalCount":0,"items":[]}, + resp_services: function(deployment_id, service_type, totalCount) { + service_type = service_type || "f93264ee-348c-44f6-af3d-15b157bba735"; + const res = { + "links": { + "previousLink": null, + "nextLink": { + "rel": "next", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICES + + (service_type && "/" + INV_PARAM_TYPE_ID + service_type + "&offset=25") || "/?offset=25" + } + }, + "totalCount": totalCount || 190, + "items": [] + }; + Array.from(Array(totalCount || 1), (_, idx) => idx).forEach(index => { + const dpl_id = deployment_id + ((index && "_" + index) || ""); + res.items.push({ + "serviceId": dpl_id, + "selfLink": { + "rel": "self", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICES + "/" + dpl_id + }, + "created": 1503668339483, + "modified": 1503668339483, + "typeLink": { + "rel": "type", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICE_TYPES + service_type + }, + "vnfId": "dummyVnfId", + "vnfLink": null, + "vnfType": "dummyVnfType", + "vnfLocation": "dummyLocation", + "deploymentRef": dpl_id, + "components": [{ + "componentId": "/components/dummy", + "componentLink": null, + "created": 1489768104449, + "modified": 1508260526203, + "componentType": "dummyComponent", + "componentSource": "DCAEController", + "status": null, + "location": null, + "shareable": 0 + }] + }); + }); + return res; + }, + resp_not_found_service: function(service_id) { + return { + "code": 1, + "type": "error", + "message": "DCAEService not found: " + service_id + }; + }, + resp_existing_blueprint: function(service_type) { + return { + "owner": "dcaeorch", + "typeName": "svc-type-000", + "typeVersion": 1, + "blueprintTemplate": "tosca_definitions_version: cloudify_dsl_1_2\nimports:\n - http://www.getcloudify.org/spec/cloudify/3.3/types.yaml\n - https://nexus01.research.att.com:8443/repository/solutioning01-mte2-raw/type_files/dti_inputs.yaml\nnode_templates:\n type-00:\n type: cloudify.nodes.Root", + "serviceIds": null, + "vnfTypes": ["TESTVNF000"], + "serviceLocations": null, + "asdcServiceId": null, + "asdcResourceId": null, + "asdcServiceURL": null, + "typeId": service_type, + "selfLink": { + "rel": "self", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICE_TYPES + service_type + }, + "created": 1500910967567, + "deactivated": null + }; + }, + resp_put_service: function(deployment_id, service_type) { + return { + "serviceId": deployment_id, + "selfLink": { + "rel": "self", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICES + "/" + deployment_id + }, + "created": 1516376798582, + "modified": 1516376798582, + "typeLink": { + "rel": "type", + "href": dh.INVENTORY_URL + INV_PATH_DCAE_SERVICE_TYPES + service_type + }, + "vnfId": "dummyVnfId", + "vnfLink": null, + "vnfType": "dummyVnfType", + "vnfLocation": "dummyLocation", + "deploymentRef": deployment_id, + "components": [{ + "componentId": "/components/dummy", + "componentLink": null, + "created": 1489768104449, + "modified": 1516376798582, + "componentType": "dummy_component", + "componentSource": "DCAEController", + "status": null, + "location": null, + "shareable": 0 + }] + }; + } +}; + +const Cloudify = { + resp_blueprint: function(deployment_id) { + return { + "main_file_name": "blueprint.yaml", + "description": null, + "created_at": "2018-01-19 15:46:47.037084", + "updated_at": "2018-01-19 15:46:47.037084", + "plan": {}, + "id": deployment_id + }; + }, + resp_deploy: function(deployment_id, blueprint_id, inputs) { + return { + "inputs": (inputs && JSON.parse(JSON.stringify(inputs)) || null), + "description": null, + "created_at": "2018-01-19 15:46:47.037084", + "updated_at": "2018-01-19 15:46:47.037084", + "id": deployment_id, + "blueprint_id": blueprint_id || deployment_id + }; + }, + resp_execution: function(deployment_id, blueprint_id, execution_id, terminated, workflow_id) { + return { + "status": (terminated && "terminated") || "pending", + "created_at": "2018-01-19 15:51:21.866227", + "workflow_id": workflow_id || "install", + "is_system_workflow": false, + "parameters": {}, + "blueprint_id": blueprint_id || deployment_id, + "deployment_id": deployment_id, + "error": "", + "id": execution_id + }; + }, + resp_outputs: function(deployment_id) { + return {"outputs": {}, "deployment_id": deployment_id}; + } +}; + +function test_get_dcae_deployments(dh_server) { + const req_path = "/dcae-deployments"; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + console.log(test_txt); + it('GET all the dcae-deployments from inventory', function() { + const inv_resp = Inventory.resp_services(EXISTING_DEPLOYMENT_ID); + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICES).reply(200, inv_resp); + + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log("res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + assert.containsAllKeys(res.body, {"requestId": "", "deployments": []}); + assert.isString(res.body.requestId); + assert.isArray(res.body.deployments); + assert.lengthOf(res.body.deployments, inv_resp.items.length); + assert.containsAllKeys(res.body.deployments[0], {"href":null}); + assert.match(res.body.deployments[0].href, + new RegExp("^http:[/][/]127.0.0.1:[0-9]+[/]dcae-deployments[/]" + EXISTING_DEPLOYMENT_ID)); + }) + .catch(function(err) { + console.error("err for", test_txt, err); + throw err; + }); + }); + }); +} + +function test_get_dcae_deployments_service_type_unknown(dh_server) { + const req_path = "/dcae-deployments?serviceTypeId=" + I_DONT_KNOW; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + console.log(test_txt); + it('GET nothing for unknown service-type from inventory', function() { + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICES + INV_PARAM_TYPE_ID + I_DONT_KNOW) + .reply(200, Inventory.resp_empty); + + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log("res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + assert.containsAllKeys(res.body, {"requestId": "", "deployments": []}); + assert.isString(res.body.requestId); + assert.isArray(res.body.deployments); + assert.lengthOf(res.body.deployments, 0); + }) + .catch(function(err) { + console.error("err for", test_txt, err); + throw err; + }); + }); + }); +} + +function create_main_message(service_type_id, include_inputs) { + var msg = {"serviceTypeId": service_type_id}; + if (include_inputs) { + msg.inputs= { + "dcae_service_location" : "loc00", + "dcae_target_type" : "type000", + "dcae_target_name" : "target000" + }; + } + return msg; +} + +function test_put_dcae_deployments_i_dont_know(dh_server) { + const req_path = "/dcae-deployments/" + I_DONT_KNOW; + const message = create_main_message(I_DONT_KNOW); + const test_txt = "PUT " + req_path + ": " + JSON.stringify(message); + describe(test_txt, () => { + console.log(test_txt); + it('Fail to deploy i-dont-know service', function(done) { + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICES + "/" + I_DONT_KNOW) + .reply(404, Inventory.resp_not_found_service(I_DONT_KNOW)); + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICE_TYPES + I_DONT_KNOW) + .reply(404, "<html> <head><title>Error 404 Not Found</title></head><body></body> </html>"); + + chai.request(dh_server.app).put(req_path) + .set('content-type', 'application/json') + .send(message) + .end(function(err, res) { + console.log("res for", test_txt, res.text); + expect(res).to.have.status(404); + expect(res.body).to.have.property('message'); + expect(res.body.message).to.be.equal("No service type with ID " + I_DONT_KNOW); + done(); + }); + }); + }); +} + +function test_put_dcae_deployments_missing_input_error(dh_server) { + const req_path = "/dcae-deployments/" + DEPLOYMENT_ID_JFL; + const message = create_main_message(INV_EXISTING_SERVICE_TYPE); + const test_txt = "PUT " + req_path + ": " + JSON.stringify(message); + describe(test_txt, () => { + console.log(test_txt); + it('Fail to deploy service - missing_input', function(done) { + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL) + .reply(404, Inventory.resp_not_found_service(DEPLOYMENT_ID_JFL)); + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICE_TYPES + INV_EXISTING_SERVICE_TYPE) + .reply(200, Inventory.resp_existing_blueprint(INV_EXISTING_SERVICE_TYPE)); + nock(dh.INVENTORY_URL).put(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL) + .reply(200, Inventory.resp_put_service(DEPLOYMENT_ID_JFL, INV_EXISTING_SERVICE_TYPE)); + nock(dh.INVENTORY_URL).delete(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL) + .reply(200); + + nock(dh.CLOUDIFY_URL).put("/api/v2.1/blueprints/" + DEPLOYMENT_ID_JFL) + .reply(200, Cloudify.resp_blueprint(DEPLOYMENT_ID_JFL)); + + const depl_rejected = { + "message": "Required inputs blah...", + "error_code": "missing_required_deployment_input_error", + "server_traceback": "Traceback blah..." + }; + nock(dh.CLOUDIFY_URL).put("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL) + .reply(400, depl_rejected); + + chai.request(dh_server.app).put(req_path) + .set('content-type', 'application/json') + .send(message) + .end(function(err, res) { + console.log("res for", test_txt, res.text); + expect(res).to.have.status(400); + expect(res.body).to.have.property('message'); + expect(res.body.message).to.be.equal("Status 400 from CM API -- error code: " + depl_rejected.error_code + " -- message: " + depl_rejected.message); + done(); + }); + }); + }); +} + +function test_put_dcae_deployments_success(dh_server) { + const req_path = "/dcae-deployments/" + DEPLOYMENT_ID_JFL_1; + const message = create_main_message(INV_EXISTING_SERVICE_TYPE, true); + const test_txt = "PUT " + req_path + ": " + JSON.stringify(message); + const execution_id = "execution_" + DEPLOYMENT_ID_JFL_1; + describe(test_txt, () => { + console.log(test_txt); + it('Success deploy service', function() { + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL_1) + .reply(404, Inventory.resp_not_found_service(DEPLOYMENT_ID_JFL_1)); + nock(dh.INVENTORY_URL).get(INV_PATH_DCAE_SERVICE_TYPES + INV_EXISTING_SERVICE_TYPE) + .reply(200, Inventory.resp_existing_blueprint(INV_EXISTING_SERVICE_TYPE)); + nock(dh.INVENTORY_URL).put(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL_1) + .reply(200, Inventory.resp_put_service(DEPLOYMENT_ID_JFL_1, INV_EXISTING_SERVICE_TYPE)); + + nock(dh.CLOUDIFY_URL).put("/api/v2.1/blueprints/" + DEPLOYMENT_ID_JFL_1) + .reply(200, Cloudify.resp_blueprint(DEPLOYMENT_ID_JFL_1)); + + nock(dh.CLOUDIFY_URL).put("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL_1) + .reply(201, Cloudify.resp_deploy(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, message.inputs)); + + nock(dh.CLOUDIFY_URL).post("/api/v2.1/executions").reply(201, + Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, execution_id)); + + nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions/" + execution_id).reply(200, + Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, execution_id, true)); + + nock(dh.CLOUDIFY_URL).get("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL_1 + "/outputs") + .reply(200, Cloudify.resp_outputs(DEPLOYMENT_ID_JFL_1)); + + return chai.request(dh_server.app).put(req_path) + .set('content-type', 'application/json') + .send(message) + .then(function(res) { + console.log("res for", test_txt, res.text); + expect(res).to.have.status(202); + expect(res).to.be.json; + + return utils.sleep(10000); + }) + .then(function() { + console.log("the end of test"); + }) + .catch(function(err) { + console.error("err for", test_txt, err); + throw err; + }); + }).timeout(50000); + }); +} + +function test_get_dcae_deployments_operation(dh_server) { + const execution_id = "execution_" + DEPLOYMENT_ID_JFL_1; + const req_path = "/dcae-deployments/" + DEPLOYMENT_ID_JFL_1 + "/operation/" + execution_id; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + console.log(test_txt); + it('Get operation execution succeeded', function() { + nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions/" + execution_id).reply(200, + Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, execution_id, true)); + + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log("res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + }) + .catch(function(err) { + console.error("err for", test_txt, err); + throw err; + }); + }); + }); +} + +function test_get_dcae_deployments_service_type_deployed(dh_server) { + const req_path = "/dcae-deployments?serviceTypeId=" + INV_EXISTING_SERVICE_TYPE; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + console.log(test_txt); + it('GET services=deployments of the service-type from inventory', function() { + const deployed_count = 10; + nock(dh.INVENTORY_URL) + .get(INV_PATH_DCAE_SERVICES + INV_PARAM_TYPE_ID + INV_EXISTING_SERVICE_TYPE) + .reply(200, Inventory.resp_services(DEPLOYMENT_ID_JFL_1, INV_EXISTING_SERVICE_TYPE, deployed_count)); + + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log("res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + assert.containsAllKeys(res.body, {"requestId": "", "deployments": []}); + assert.isString(res.body.requestId); + assert.isArray(res.body.deployments); + assert.lengthOf(res.body.deployments, deployed_count); + }) + .catch(function(err) { + console.error("err for", test_txt, err); + throw err; + }); + }); + }); +} + +function test_delete_dcae_deployments_success(dh_server) { + const req_path = "/dcae-deployments/" + DEPLOYMENT_ID_JFL_1; + const test_txt = "DELETE " + req_path; + const workflow_id = "uninstall"; + const execution_id = workflow_id + "_" + DEPLOYMENT_ID_JFL_1; + describe(test_txt, () => { + console.log(test_txt); + it('Success DELETE service', function() { + nock(dh.CLOUDIFY_URL).post("/api/v2.1/executions").reply(201, + Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, + execution_id, false, workflow_id)); + + nock(dh.INVENTORY_URL).delete(INV_PATH_DCAE_SERVICES + "/" + DEPLOYMENT_ID_JFL_1) + .reply(200); + + nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions/" + execution_id).reply(200, + Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, + execution_id, true, workflow_id)); + + nock(dh.CLOUDIFY_URL).delete("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL_1) + .reply(201, Cloudify.resp_deploy(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1)); + + nock(dh.CLOUDIFY_URL).delete("/api/v2.1/blueprints/" + DEPLOYMENT_ID_JFL_1) + .reply(200, Cloudify.resp_blueprint(DEPLOYMENT_ID_JFL_1)); + + return chai.request(dh_server.app).delete(req_path) + .then(function(res) { + console.log("res for", test_txt, res.text); + expect(res).to.have.status(202); + expect(res).to.be.json; + + return utils.sleep(45000); + }) + .then(function() { + console.log("the end of test"); + }) + .catch(function(err) { + console.error("err for", test_txt, err); + throw err; + }); + }).timeout(60000); + }); +} + +dh.add_tests([ + test_get_dcae_deployments, + test_get_dcae_deployments_service_type_unknown, + test_put_dcae_deployments_i_dont_know, + test_put_dcae_deployments_missing_input_error, + test_get_dcae_deployments_operation, + test_get_dcae_deployments_service_type_deployed, + test_put_dcae_deployments_success, + test_delete_dcae_deployments_success +]); diff --git a/tests/test_info.js b/tests/test_info.js new file mode 100644 index 0000000..b2f8a91 --- /dev/null +++ b/tests/test_info.js @@ -0,0 +1,57 @@ +/* +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +/** + * handling policy updates + */ + +"use strict"; + +const chai = require('chai') + , chaiHttp = require('chai-http') + , expect = chai.expect + , assert = chai.assert; + +chai.use(chaiHttp); + +const dh = require('./mock_deployment_handler'); + +function test_get_info(dh_server) { + const req_path = "/"; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + console.log(test_txt); + it('GET info', function() { + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log("res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + + const info = res.body; + const config = process.mainModule.exports.config; + assert.include(config, info.server); + assert.deepEqual(config.apiLinks, info.links); + }) + .catch(function(err) { + console.error("err for", test_txt, err); + throw err; + }); + }); + }); +} + +dh.add_tests([test_get_info]); diff --git a/tests/test_policy.js b/tests/test_policy.js index 8161032..597c6df 100644 --- a/tests/test_policy.js +++ b/tests/test_policy.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,21 +20,19 @@ See the License for the specific language governing permissions and limitations "use strict"; -const nock = require('nock'); -const chai = require('chai') +const nock = require('nock') + , chai = require('chai') , chaiHttp = require('chai-http') - , expect = chai.expect; + , expect = chai.expect + , assert = chai.assert; chai.use(chaiHttp); -const REQ_ID = "111"; +const dh = require('./mock_deployment_handler'); + const RUN_TS = new Date(); const RUN_TS_HOURS = RUN_TS.getHours(); -const CONSUL_URL = 'http://consul:8500'; -const TEST_CLOUDIFY_MANAGER = "test_cloudify_manager"; -const CLOUDIFY_URL = "http://" + TEST_CLOUDIFY_MANAGER + ":80"; - const POLICY_ID = 'policy_id'; const POLICY_VERSION = "policyVersion"; const POLICY_NAME = "policyName"; @@ -80,46 +78,7 @@ function create_policy(policy_id, policy_version=1) { }; } -nock(CONSUL_URL).persist().get('/v1/kv/deployment_handler?raw') - .reply(200, {"logLevel": "DEBUG", "cloudify": {"protocol": "http"}}); - -nock(CONSUL_URL).persist().get('/v1/catalog/service/cloudify_manager') - .reply(200, [{ - "ID":"deadbeef-dead-beef-dead-beefdeadbeef", - "Node":"devorcl00", - "Address": TEST_CLOUDIFY_MANAGER, - "Datacenter":"rework-central", - "TaggedAddresses":{"lan": TEST_CLOUDIFY_MANAGER,"wan": TEST_CLOUDIFY_MANAGER}, - "NodeMeta":{}, - "ServiceID":"cloudify_manager", - "ServiceName":"cloudify_manager", - "ServiceTags":["http://" + TEST_CLOUDIFY_MANAGER + "/api/v2.1"], - "ServiceAddress": TEST_CLOUDIFY_MANAGER, - "ServicePort":80, - "ServiceEnableTagOverride":false, - "CreateIndex":16, - "ModifyIndex":16 - }]); - -nock(CONSUL_URL).persist().get('/v1/catalog/service/inventory') - .reply(200, [{ - "ID": "", - "Node": "inventory_test", - "Address": "inventory", - "Datacenter": "rework-central", - "TaggedAddresses": null, - "NodeMeta": null, - "ServiceID": "inventory", - "ServiceName": "inventory", - "ServiceTags": [], - "ServiceAddress": "inventory", - "ServicePort": 8080, - "ServiceEnableTagOverride": false, - "CreateIndex": 8068, - "ModifyIndex": 8068 - }]); - -nock(CLOUDIFY_URL).persist().get(/[/]api[/]v2[.]1[/]node-instances/) +nock(dh.CLOUDIFY_URL).persist().get(/[/]api[/]v2[.]1[/]node-instances/) .reply(200, { "items": [ { @@ -162,63 +121,27 @@ nock(CLOUDIFY_URL).persist().get(/[/]api[/]v2[.]1[/]node-instances/) "size": 10000 } } - }); - -describe('test policy on deployment-handler', () => { - it('starting', function() { - console.log("go testing deployment-handler"); - - const conf = require('./../lib/config'); - const logging = require('./../lib/logging'); - const log = logging.getLogger(); - - console.log("started logger"); - log.debug(REQ_ID, "started logger"); - - console.log("conf.configure"); - - return conf.configure() - .then(function(config) { - logging.setLevel(config.logLevel); - - /* Set up exported configuration */ - config.apiLinks = {"test" : true}; - // exports.config = config; - process.mainModule.exports.config = config; - - console.log("got configuration:", JSON.stringify(config)); - - log.debug(REQ_ID, "Configuration: " + JSON.stringify(config)); - - const main_app = require('./../deployment-handler'); - console.log("setting main_app..."); - main_app.set_app(); - console.log("set main_app"); - - const req_path = "/policy/components"; - const test_txt = "GET " + req_path; - describe(test_txt, () => { - console.log(test_txt); - it('GET all the components with policy from cloudify', function() { - console.log("chai", test_txt); - return chai.request(main_app.app).get(req_path) - .then(function(res) { - console.log("res for", test_txt, JSON.stringify(res.body)); - log.debug(REQ_ID, "received " + JSON.stringify(res.body)); - expect(res).to.have.status(200); - expect(res).to.be.json; - }) - .catch(function(err) { - console.error("err for", test_txt, err); - throw err; - }); + } +); + +function test_get_policy_components(dh_server) { + const req_path = "/policy/components"; + const test_txt = "GET " + req_path; + describe(test_txt, () => { + console.log(test_txt); + it('GET all the components with policy from cloudify', function() { + return chai.request(dh_server.app).get(req_path) + .then(function(res) { + console.log("res for", test_txt, res.text); + expect(res).to.have.status(200); + expect(res).to.be.json; + }) + .catch(function(err) { + console.error("err for", test_txt, err); + throw err; }); - }); - }) - .catch(function(e) { - const error = "test of deployment-handler exiting due to startup problem: " + e.message; - console.error(error); - throw e; }); }); -});
\ No newline at end of file +} + +dh.add_tests([test_get_policy_components]); diff --git a/tests/test_zzz_run.js b/tests/test_zzz_run.js new file mode 100644 index 0000000..8ae405a --- /dev/null +++ b/tests/test_zzz_run.js @@ -0,0 +1,21 @@ +/* +Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. +*/ + +"use strict"; + +const dh = require('./mock_deployment_handler'); + +dh.run_dh(); |