diff options
-rw-r--r-- | Dockerfile_UT | 3 | ||||
-rw-r--r-- | LICENSE.txt | 2 | ||||
-rw-r--r-- | deployment-handler.js | 4 | ||||
-rw-r--r-- | lib/cloudify.js | 5 | ||||
-rw-r--r-- | package.json | 4 | ||||
-rw-r--r-- | pom.xml | 4 | ||||
-rw-r--r-- | tests/mock_deployment_handler.js | 8 | ||||
-rw-r--r-- | tests/test_dcae-deployments.js | 51 | ||||
-rw-r--r-- | tests/test_policy.js | 7 | ||||
-rw-r--r-- | version.properties | 2 |
10 files changed, 52 insertions, 38 deletions
diff --git a/Dockerfile_UT b/Dockerfile_UT index e6a6509..27b84ba 100644 --- a/Dockerfile_UT +++ b/Dockerfile_UT @@ -6,7 +6,8 @@ ENV APPDIR ${INSROOT}/${APPUSER} WORKDIR ${APPDIR} -RUN mkdir -p ${APPDIR}/lib \ +RUN uname -a \ + && mkdir -p ${APPDIR}/lib \ && mkdir -p ${APPDIR}/tests \ && mkdir -p ${APPDIR}/etc \ && mkdir -p ${APPDIR}/log \ diff --git a/LICENSE.txt b/LICENSE.txt index 14cb17c..c142ce1 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,7 +1,7 @@ /* * ============LICENSE_START========================================== * =================================================================== -* Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +* Copyright (c) 2017-2019 AT&T Intellectual Property. All rights reserved. * =================================================================== * * Unless otherwise specified, all software contained herein is licensed diff --git a/deployment-handler.js b/deployment-handler.js index edbf84e..26074e7 100644 --- a/deployment-handler.js +++ b/deployment-handler.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2019 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,8 +20,6 @@ See the License for the specific language governing permissions and limitations const API_VERSION = "5.0.0"; -const fs = require('fs'); -const util = require('util'); const http = require('http'); const https = require('https'); const express = require('express'); diff --git a/lib/cloudify.js b/lib/cloudify.js index 138b986..1cd5489 100644 --- a/lib/cloudify.js +++ b/lib/cloudify.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2019 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -231,8 +231,7 @@ exports.uploadBlueprint = function(req, bpid, blueprint) { // Cloudify API wants a gzipped tar of a directory, not the blueprint text const zip = new admzip(); - zip.addFile('work/', new Buffer(0)); - zip.addFile('work/blueprint.yaml', new Buffer(blueprint, 'utf8')); + zip.addFile('work/blueprint.yaml', Buffer.from(blueprint, 'utf8')); const zip_buffer = zip.toBuffer(); // Set up the HTTP PUT request diff --git a/package.json b/package.json index 2c5f677..9f24b9f 100644 --- a/package.json +++ b/package.json @@ -1,10 +1,10 @@ { "name": "onap-dcae-deployment-handler", - "version": "5.1.0", + "version": "5.2.0", "description": "ONAP DCAE Deployment Handler", "main": "deployment-handler.js", "dependencies": { - "adm-zip": "0.4.7", + "adm-zip": "0.4.13", "body-parser": "^1.15.0", "express": "^4.13.4", "log4js": "^0.6.33", @@ -1,7 +1,7 @@ <?xml version="1.0"?> <!-- ================================================================================ -Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +Copyright (c) 2017-2019 AT&T Intellectual Property. All rights reserved. ================================================================================ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -29,7 +29,7 @@ ECOMP is a trademark and service mark of AT&T Intellectual Property. <groupId>org.onap.dcaegen2.platform</groupId> <artifactId>deployment-handler</artifactId> <name>dcaegen2-platform-deployment-handler</name> - <version>3.1.0-SNAPSHOT</version> + <version>3.2.0-SNAPSHOT</version> <url>http://maven.apache.org</url> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> diff --git a/tests/mock_deployment_handler.js b/tests/mock_deployment_handler.js index 7407e55..5ed1985 100644 --- a/tests/mock_deployment_handler.js +++ b/tests/mock_deployment_handler.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018-2019 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -24,10 +24,12 @@ const nock = require('nock'); const utils = require('./mock_utils'); const MAIN_PATH = './../'; +const LOG_PATH = './log/'; const CONSUL_URL = 'http://consul:8500'; const MOCK_CLOUDIFY_MANAGER = "mock_cloudify_manager"; const CLOUDIFY_URL = "http://" + MOCK_CLOUDIFY_MANAGER + ":80"; +const CLOUDIFY_API = "/api/v2.1"; const MOCK_INVENTORY = "mock_inventory"; const INVENTORY_URL = "https://" + MOCK_INVENTORY + ":8080"; @@ -45,7 +47,7 @@ nock(CONSUL_URL).persist().get('/v1/catalog/service/cloudify_manager') "NodeMeta":{}, "ServiceID":"cloudify_manager", "ServiceName":"cloudify_manager", - "ServiceTags":["http://" + MOCK_CLOUDIFY_MANAGER + "/api/v2.1"], + "ServiceTags":["http://" + MOCK_CLOUDIFY_MANAGER + CLOUDIFY_API], "ServiceAddress": MOCK_CLOUDIFY_MANAGER, "ServicePort":80, "ServiceEnableTagOverride":false, @@ -99,5 +101,7 @@ const run_dh = function() { module.exports.INVENTORY_URL = INVENTORY_URL; module.exports.CLOUDIFY_URL = CLOUDIFY_URL; +module.exports.CLOUDIFY_API = CLOUDIFY_API; +module.exports.LOG_PATH = LOG_PATH; module.exports.add_tests = function(new_tests) {Array.prototype.push.apply(tests, new_tests);}; module.exports.run_dh = run_dh; diff --git a/tests/test_dcae-deployments.js b/tests/test_dcae-deployments.js index 7bf9831..2f3f7fd 100644 --- a/tests/test_dcae-deployments.js +++ b/tests/test_dcae-deployments.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2018 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2018-2019 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -20,6 +20,8 @@ See the License for the specific language governing permissions and limitations "use strict"; +const fs = require("fs"); + const nock = require('nock') , chai = require('chai') , chaiHttp = require('chai-http') @@ -354,7 +356,7 @@ function test_put_dcae_deployments_missing_input_error(dh_server) { return ""; }); - nock(dh.CLOUDIFY_URL).put("/api/v2.1/blueprints/" + DEPLOYMENT_ID_JFL) + nock(dh.CLOUDIFY_URL).put(dh.CLOUDIFY_API + "/blueprints/" + DEPLOYMENT_ID_JFL) .reply(200, function(uri, requestBody) { console.log(action_timer.step, "put", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); return JSON.stringify(Cloudify.resp_blueprint(DEPLOYMENT_ID_JFL)); @@ -365,7 +367,7 @@ function test_put_dcae_deployments_missing_input_error(dh_server) { "error_code": "missing_required_deployment_input_error", "server_traceback": "Traceback blah..." }; - nock(dh.CLOUDIFY_URL).put("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL) + nock(dh.CLOUDIFY_URL).put(dh.CLOUDIFY_API + "/deployments/" + DEPLOYMENT_ID_JFL) .reply(400, function(uri) { console.log(action_timer.step, "put", dh.CLOUDIFY_URL, uri); return JSON.stringify(depl_rejected); @@ -411,19 +413,19 @@ function test_put_dcae_deployments_creation_failed(dh_server) { return JSON.stringify(Inventory.resp_put_service(DEPLOYMENT_ID_JFL_1, INV_EXISTING_SERVICE_TYPE)); }); - nock(dh.CLOUDIFY_URL).put("/api/v2.1/blueprints/" + DEPLOYMENT_ID_JFL_1) + nock(dh.CLOUDIFY_URL).put(dh.CLOUDIFY_API + "/blueprints/" + DEPLOYMENT_ID_JFL_1) .reply(200, function(uri, requestBody) { console.log(action_timer.step, "put", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); return JSON.stringify(Cloudify.resp_blueprint(DEPLOYMENT_ID_JFL_1)); }); - nock(dh.CLOUDIFY_URL).put("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL_1) + nock(dh.CLOUDIFY_URL).put(dh.CLOUDIFY_API + "/deployments/" + DEPLOYMENT_ID_JFL_1) .reply(201, function(uri, requestBody) { console.log(action_timer.step, "put", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); return JSON.stringify(Cloudify.resp_deploy(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, message.inputs)); }); - nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions?deployment_id=" + DEPLOYMENT_ID_JFL_1 + "&workflow_id=create_deployment_environment&_include=id,status") + nock(dh.CLOUDIFY_URL).get(dh.CLOUDIFY_API + "/executions?deployment_id=" + DEPLOYMENT_ID_JFL_1 + "&workflow_id=create_deployment_environment&_include=id,status") .reply(200, function(uri) { console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); return JSON.stringify(Cloudify.resp_dep_creation(DEPLOYMENT_ID_JFL_1, execution_id, "failed")); @@ -471,37 +473,37 @@ function test_put_dcae_deployments_success(dh_server) { return JSON.stringify(Inventory.resp_put_service(DEPLOYMENT_ID_JFL_1, INV_EXISTING_SERVICE_TYPE)); }); - nock(dh.CLOUDIFY_URL).put("/api/v2.1/blueprints/" + DEPLOYMENT_ID_JFL_1) + nock(dh.CLOUDIFY_URL).put(dh.CLOUDIFY_API + "/blueprints/" + DEPLOYMENT_ID_JFL_1) .reply(200, function(uri, requestBody) { console.log(action_timer.step, "put", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); return JSON.stringify(Cloudify.resp_blueprint(DEPLOYMENT_ID_JFL_1)); }); - nock(dh.CLOUDIFY_URL).put("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL_1) + nock(dh.CLOUDIFY_URL).put(dh.CLOUDIFY_API + "/deployments/" + DEPLOYMENT_ID_JFL_1) .reply(201, function(uri, requestBody) { console.log(action_timer.step, "put", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); return JSON.stringify(Cloudify.resp_deploy(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, message.inputs)); }); - nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions?deployment_id=" + DEPLOYMENT_ID_JFL_1 + "&workflow_id=create_deployment_environment&_include=id,status") + nock(dh.CLOUDIFY_URL).get(dh.CLOUDIFY_API + "/executions?deployment_id=" + DEPLOYMENT_ID_JFL_1 + "&workflow_id=create_deployment_environment&_include=id,status") .reply(200, function(uri) { console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); return JSON.stringify(Cloudify.resp_dep_creation(DEPLOYMENT_ID_JFL_1, execution_id)); }); - nock(dh.CLOUDIFY_URL).post("/api/v2.1/executions") + nock(dh.CLOUDIFY_URL).post(dh.CLOUDIFY_API + "/executions") .reply(201, function(uri, requestBody) { console.log(action_timer.step, "post", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); return JSON.stringify(Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, execution_id)); }); - nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions/" + execution_id) + nock(dh.CLOUDIFY_URL).get(dh.CLOUDIFY_API + "/executions/" + execution_id) .reply(200, function(uri) { console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); return JSON.stringify(Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, execution_id, true)); }); - nock(dh.CLOUDIFY_URL).get("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL_1 + "/outputs") + nock(dh.CLOUDIFY_URL).get(dh.CLOUDIFY_API + "/deployments/" + DEPLOYMENT_ID_JFL_1 + "/outputs") .reply(200, function(uri) { console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); return JSON.stringify(Cloudify.resp_outputs(DEPLOYMENT_ID_JFL_1)); @@ -536,7 +538,7 @@ function test_get_dcae_deployments_operation(dh_server) { it('Get operation execution succeeded', function() { const action_timer = new utils.ActionTimer(); console.log(action_timer.step, test_txt); - nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions/" + execution_id) + nock(dh.CLOUDIFY_URL).get(dh.CLOUDIFY_API + "/executions/" + execution_id) .reply(200, function(uri) { console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); return JSON.stringify(Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, execution_id, true)); @@ -600,7 +602,7 @@ function test_delete_dcae_deployments_success(dh_server) { const action_timer = new utils.ActionTimer(); console.log(action_timer.step, test_txt); - nock(dh.CLOUDIFY_URL).post("/api/v2.1/executions") + nock(dh.CLOUDIFY_URL).post(dh.CLOUDIFY_API + "/executions") .reply(201, function(uri, requestBody) { console.log(action_timer.step, "post", dh.CLOUDIFY_URL, uri, JSON.stringify(requestBody)); return JSON.stringify(Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, @@ -613,20 +615,20 @@ function test_delete_dcae_deployments_success(dh_server) { return ""; }); - nock(dh.CLOUDIFY_URL).get("/api/v2.1/executions/" + execution_id) + nock(dh.CLOUDIFY_URL).get(dh.CLOUDIFY_API + "/executions/" + execution_id) .reply(200, function(uri) { console.log(action_timer.step, "get", dh.CLOUDIFY_URL, uri); return JSON.stringify(Cloudify.resp_execution(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1, execution_id, true, workflow_id)); }); - nock(dh.CLOUDIFY_URL).delete("/api/v2.1/deployments/" + DEPLOYMENT_ID_JFL_1) + nock(dh.CLOUDIFY_URL).delete(dh.CLOUDIFY_API + "/deployments/" + DEPLOYMENT_ID_JFL_1) .reply(201, function(uri) { console.log(action_timer.step, "delete", dh.CLOUDIFY_URL, uri); return JSON.stringify(Cloudify.resp_deploy(DEPLOYMENT_ID_JFL_1, DEPLOYMENT_ID_JFL_1)); }); - nock(dh.CLOUDIFY_URL).delete("/api/v2.1/blueprints/" + DEPLOYMENT_ID_JFL_1) + nock(dh.CLOUDIFY_URL).delete(dh.CLOUDIFY_API + "/blueprints/" + DEPLOYMENT_ID_JFL_1) .reply(200, function(uri) { console.log(action_timer.step, "delete", dh.CLOUDIFY_URL, uri); return JSON.stringify(Cloudify.resp_blueprint(DEPLOYMENT_ID_JFL_1)); @@ -667,10 +669,21 @@ function test_zipper(dh_server) { for (var i=0; i< 100; i++) { blueprint = blueprint + (i % 10); try { + const zip_folder = "zip_blueprint_" + ('0' + i).substr(-2); + const zip_path = dh.LOG_PATH + zip_folder + ".zip"; + const zip = new admzip(); - zip.addFile('work/', new Buffer(0)); - zip.addFile('work/blueprint.yaml', new Buffer(blueprint, 'utf8')); + // no need for separate folder entry like 'work/' in zip + // zip file created by admzip unzips properly in linux, but not in Windows :-( + zip.addFile(zip_folder + '/blueprint.yaml', Buffer.from(blueprint, 'utf8')); const zip_buffer = zip.toBuffer(); + fs.writeFileSync(zip_path, zip_buffer); + + // gave up on unzipping in old ubuntu that fails to have unzip installed + // uncomment the next lines if you have unzip in os to properly test + // execSync('unzip ' + zip_folder + ".zip", {"cwd": dh.LOG_PATH}); + // const bp_from_file = fs.readFileSync(dh.LOG_PATH + zip_folder + '/blueprint.yaml').toString('utf8'); + // expect(bp_from_file).to.be.equal(blueprint); success_blueprints.push(blueprint); } catch (e) { // TypeError diff --git a/tests/test_policy.js b/tests/test_policy.js index 32e5ed4..468ac2b 100644 --- a/tests/test_policy.js +++ b/tests/test_policy.js @@ -1,5 +1,5 @@ /* -Copyright(c) 2017-2018 AT&T Intellectual Property. All rights reserved. +Copyright(c) 2017-2019 AT&T Intellectual Property. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -52,9 +52,8 @@ const MONKEYED_POLICY_ID_5 = "DCAE_alex.Config_peach_5"; const MONKEYED_POLICY_ID_6 = "DCAE_alex.Config_peach_6"; const CLAMP_POLICY_ID = "CLAMP.Config_clamp_policy"; -const CFY_API = "/api/v2.1"; -const CFY_API_NODE_INSTANCES = CFY_API + "/node-instances"; -const CFY_API_EXECUTIONS = CFY_API + "/executions"; +const CFY_API_NODE_INSTANCES = dh.CLOUDIFY_API + "/node-instances"; +const CFY_API_EXECUTIONS = dh.CLOUDIFY_API + "/executions"; const CFY_API_EXECUTION = CFY_API_EXECUTIONS + "/"; function create_policy_body(policy_id, policy_version=1, matching_conditions=null) { diff --git a/version.properties b/version.properties index decdfc4..f1f03dc 100644 --- a/version.properties +++ b/version.properties @@ -1,5 +1,5 @@ major=3
-minor=1
+minor=2
patch=0
base_version=${major}.${minor}.${patch}
release_version=${base_version}
|