aboutsummaryrefslogtreecommitdiffstats
path: root/catalog/packages
diff options
context:
space:
mode:
authordyh <dengyuanhong@chinamobile.com>2019-09-04 09:52:48 +0800
committerdyh <dengyuanhong@chinamobile.com>2019-09-04 16:09:26 +0800
commita32c2b20207885d895bd96204cc166fca14db97b (patch)
tree1edd33368158dc5f057a0a9475dced3df6c3b24c /catalog/packages
parent431a5a35a8e0a26d21c663167303696db8a7a2a6 (diff)
update for change to etsicatalog
Change-Id: Idc2a6950960a324964500a8c4701be422de2b782 Issue-ID: MODELING-216 Signed-off-by: dyh <dengyuanhong@chinamobile.com>
Diffstat (limited to 'catalog/packages')
-rw-r--r--catalog/packages/__init__.py13
-rw-r--r--catalog/packages/biz/__init__.py13
-rw-r--r--catalog/packages/biz/common.py51
-rw-r--r--catalog/packages/biz/ns_descriptor.py239
-rw-r--r--catalog/packages/biz/nsdm_subscription.py218
-rw-r--r--catalog/packages/biz/pnf_descriptor.py226
-rw-r--r--catalog/packages/biz/sdc_ns_package.py172
-rw-r--r--catalog/packages/biz/sdc_service_package.py124
-rw-r--r--catalog/packages/biz/sdc_vnf_package.py254
-rw-r--r--catalog/packages/biz/service_descriptor.py129
-rw-r--r--catalog/packages/biz/vnf_package.py227
-rw-r--r--catalog/packages/biz/vnf_pkg_artifacts.py43
-rw-r--r--catalog/packages/biz/vnf_pkg_subscription.py190
-rw-r--r--catalog/packages/const.py78
-rw-r--r--catalog/packages/serializers/__init__.py13
-rw-r--r--catalog/packages/serializers/catalog_serializers.py442
-rw-r--r--catalog/packages/serializers/checksum.py30
-rw-r--r--catalog/packages/serializers/create_nsd_info_request.py29
-rw-r--r--catalog/packages/serializers/create_pnfd_info_request.py29
-rw-r--r--catalog/packages/serializers/create_vnf_pkg_info_req.py27
-rw-r--r--catalog/packages/serializers/link.py24
-rw-r--r--catalog/packages/serializers/nsd_info.py161
-rw-r--r--catalog/packages/serializers/nsd_infos.py20
-rw-r--r--catalog/packages/serializers/nsdm_filter_data.py177
-rw-r--r--catalog/packages/serializers/nsdm_subscription.py84
-rw-r--r--catalog/packages/serializers/pnfd_info.py107
-rw-r--r--catalog/packages/serializers/pnfd_infos.py20
-rw-r--r--catalog/packages/serializers/problem_details.py58
-rw-r--r--catalog/packages/serializers/response.py51
-rw-r--r--catalog/packages/serializers/subscription_auth_data.py77
-rw-r--r--catalog/packages/serializers/upload_vnf_pkg_from_uri_req.py36
-rw-r--r--catalog/packages/serializers/vnf_pkg_artifact_info.py39
-rw-r--r--catalog/packages/serializers/vnf_pkg_info.py127
-rw-r--r--catalog/packages/serializers/vnf_pkg_infos.py20
-rw-r--r--catalog/packages/serializers/vnf_pkg_notifications.py117
-rw-r--r--catalog/packages/serializers/vnf_pkg_software_image_info.py96
-rw-r--r--catalog/packages/serializers/vnf_pkg_subscription.py93
-rw-r--r--catalog/packages/tests/__init__.py13
-rw-r--r--catalog/packages/tests/const.py596
-rw-r--r--catalog/packages/tests/test_health_check.py50
-rw-r--r--catalog/packages/tests/test_ns_descriptor.py300
-rw-r--r--catalog/packages/tests/test_nsdm_subscription.py521
-rw-r--r--catalog/packages/tests/test_nspackage.py246
-rw-r--r--catalog/packages/tests/test_pnf_descriptor.py286
-rw-r--r--catalog/packages/tests/test_service_descriptor.py95
-rw-r--r--catalog/packages/tests/test_servicepackage.py481
-rw-r--r--catalog/packages/tests/test_vnf_package.py382
-rw-r--r--catalog/packages/tests/test_vnf_pkg_subscription.py183
-rw-r--r--catalog/packages/tests/test_vnfpackage.py258
-rw-r--r--catalog/packages/urls.py76
-rw-r--r--catalog/packages/views/__init__.py13
-rw-r--r--catalog/packages/views/catalog_views.py535
-rw-r--r--catalog/packages/views/common.py123
-rw-r--r--catalog/packages/views/health_check_views.py31
-rw-r--r--catalog/packages/views/ns_descriptor_views.py139
-rw-r--r--catalog/packages/views/nsdm_subscription_views.py127
-rw-r--r--catalog/packages/views/pnf_descriptor_views.py166
-rw-r--r--catalog/packages/views/vnf_package_artifact_views.py54
-rw-r--r--catalog/packages/views/vnf_package_subscription_views.py120
-rw-r--r--catalog/packages/views/vnf_package_views.py168
60 files changed, 8817 insertions, 0 deletions
diff --git a/catalog/packages/__init__.py b/catalog/packages/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/packages/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/packages/biz/__init__.py b/catalog/packages/biz/__init__.py
new file mode 100644
index 0000000..342c2a8
--- /dev/null
+++ b/catalog/packages/biz/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/packages/biz/common.py b/catalog/packages/biz/common.py
new file mode 100644
index 0000000..ce77a41
--- /dev/null
+++ b/catalog/packages/biz/common.py
@@ -0,0 +1,51 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.utils import fileutil
+
+CHUNK_SIZE = 1024 * 8
+
+
+def save(remote_file, vnf_pkg_id):
+ local_file_name = remote_file.name
+ local_file_dir = os.path.join(CATALOG_ROOT_PATH, vnf_pkg_id)
+ local_file_name = os.path.join(local_file_dir, local_file_name)
+ if not os.path.exists(local_file_dir):
+ fileutil.make_dirs(local_file_dir)
+ with open(local_file_name, 'wb') as local_file:
+ for chunk in remote_file.chunks(chunk_size=CHUNK_SIZE):
+ local_file.write(chunk)
+ return local_file_name
+
+
+def read(file_path, start, end):
+ fp = open(file_path, 'rb')
+ fp.seek(start)
+ pos = start
+ while pos + CHUNK_SIZE < end:
+ yield fp.read(CHUNK_SIZE)
+ pos = fp.tell()
+ yield fp.read(end - pos)
+
+
+def parse_file_range(file_path, file_range):
+ start, end = 0, os.path.getsize(file_path)
+ if file_range:
+ [start, range_end] = file_range.split('-')
+ range_end = range_end.strip() if range_end.strip() else end
+ start, end = int(start.strip()), int(range_end)
+ return start, end
diff --git a/catalog/packages/biz/ns_descriptor.py b/catalog/packages/biz/ns_descriptor.py
new file mode 100644
index 0000000..f0e0572
--- /dev/null
+++ b/catalog/packages/biz/ns_descriptor.py
@@ -0,0 +1,239 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import json
+import logging
+import os
+import uuid
+
+from catalog.packages.biz.common import parse_file_range, read, save
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import NSPackageModel, PnfPackageModel, VnfPackageModel
+from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
+from catalog.pub.utils import fileutil, toscaparser
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+METADATA = "metadata"
+
+
+class NsDescriptor(object):
+
+ def __init__(self):
+ pass
+
+ def create(self, data, id=None):
+ logger.info('Start to create a NSD...')
+ user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+ data = {
+ 'id': id if id else str(uuid.uuid4()),
+ 'nsdOnboardingState': PKG_STATUS.CREATED,
+ 'nsdOperationalState': PKG_STATUS.DISABLED,
+ 'nsdUsageState': PKG_STATUS.NOT_IN_USE,
+ 'userDefinedData': user_defined_data,
+ '_links': None # TODO
+ }
+ NSPackageModel.objects.create(
+ nsPackageId=data['id'],
+ onboardingState=data['nsdOnboardingState'],
+ operationalState=data['nsdOperationalState'],
+ usageState=data['nsdUsageState'],
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ logger.info('A NSD(%s) has been created.' % data['id'])
+ return data
+
+ def query_multiple(self, nsdId=None):
+ if nsdId:
+ ns_pkgs = NSPackageModel.objects.filter(nsdId=nsdId)
+ else:
+ ns_pkgs = NSPackageModel.objects.all()
+ response_data = []
+ for ns_pkg in ns_pkgs:
+ data = self.fill_resp_data(ns_pkg)
+ response_data.append(data)
+ return response_data
+
+ def query_single(self, nsd_info_id):
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.error('NSD(%s) does not exist.' % nsd_info_id)
+ raise ResourceNotFoundException('NSD(%s) does not exist.' % nsd_info_id)
+ return self.fill_resp_data(ns_pkgs[0])
+
+ def delete_single(self, nsd_info_id):
+ logger.info('Start to delete NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.info('NSD(%s) has been deleted.' % nsd_info_id)
+ return
+ '''
+ if ns_pkgs[0].operationalState != PKG_STATUS.DISABLED:
+ logger.error('NSD(%s) shall be DISABLED.' % nsd_info_id)
+ raise CatalogException('NSD(%s) shall be DISABLED.' % nsd_info_id)
+ if ns_pkgs[0].usageState != PKG_STATUS.NOT_IN_USE:
+ logger.error('NSD(%s) shall be NOT_IN_USE.' % nsd_info_id)
+ raise CatalogException('NSD(%s) shall be NOT_IN_USE.' % nsd_info_id)
+ '''
+ ns_pkgs.delete()
+ ns_pkg_path = os.path.join(CATALOG_ROOT_PATH, nsd_info_id)
+ fileutil.delete_dirs(ns_pkg_path)
+ logger.info('NSD(%s) has been deleted.' % nsd_info_id)
+
+ def upload(self, nsd_info_id, remote_file):
+ logger.info('Start to upload NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.error('NSD(%s) does not exist.' % nsd_info_id)
+ raise CatalogException('NSD(%s) does not exist.' % nsd_info_id)
+ ns_pkgs.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ local_file_name = save(remote_file, nsd_info_id)
+ logger.info('NSD(%s) content has been uploaded.' % nsd_info_id)
+ return local_file_name
+
+ def download(self, nsd_info_id, file_range):
+ logger.info('Start to download NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.error('NSD(%s) does not exist.' % nsd_info_id)
+ raise ResourceNotFoundException('NSD(%s) does not exist.' % nsd_info_id)
+ if ns_pkgs[0].onboardingState != PKG_STATUS.ONBOARDED:
+ logger.error('NSD(%s) is not ONBOARDED.' % nsd_info_id)
+ raise CatalogException('NSD(%s) is not ONBOARDED.' % nsd_info_id)
+
+ local_file_path = ns_pkgs[0].localFilePath
+ start, end = parse_file_range(local_file_path, file_range)
+ logger.info('NSD(%s) has been downloaded.' % nsd_info_id)
+ return read(local_file_path, start, end)
+
+ def parse_nsd_and_save(self, nsd_info_id, local_file_name):
+ logger.info('Start to process NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ ns_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+
+ nsd_json = toscaparser.parse_nsd(local_file_name)
+ logger.debug("%s", nsd_json)
+ nsd = json.JSONDecoder().decode(nsd_json)
+
+ nsd_id = nsd.get("ns", {}).get("properties", {}).get("descriptor_id", "")
+ nsd_name = nsd.get("ns", {}).get("properties", {}).get("name", "")
+ nsd_version = nsd.get("ns", {}).get("properties", {}).get("version", "")
+ nsd_designer = nsd.get("ns", {}).get("properties", {}).get("designer", "")
+ invariant_id = nsd.get("ns", {}).get("properties", {}).get("invariant_id", "")
+ if nsd_id == "":
+ raise CatalogException("nsd_id(%s) does not exist in metadata." % nsd_id)
+ other_nspkg = NSPackageModel.objects.filter(nsdId=nsd_id)
+ if other_nspkg and other_nspkg[0].nsPackageId != nsd_info_id:
+ logger.warn("NSD(%s,%s) already exists.", nsd_id, other_nspkg[0].nsPackageId)
+ raise CatalogException("NSD(%s) already exists." % nsd_id)
+
+ for vnf in nsd["vnfs"]:
+ vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+ if vnfd_id == "undefined":
+ vnfd_id = vnf["properties"].get("id", "undefined")
+ pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if not pkg:
+ pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+ if not pkg:
+ vnfd_name = vnf.get("vnf_id", "undefined")
+ logger.error("[%s] is not distributed.", vnfd_name)
+ raise CatalogException("VNF package(%s) is not distributed." % vnfd_id)
+
+ for pnf in nsd["pnfs"]:
+ pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+ if pnfd_id == "undefined":
+ pnfd_id = pnf["properties"].get("id", "undefined")
+ pkg = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if not pkg:
+ pkg = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+ if not pkg:
+ pnfd_name = pnf.get("pnf_id", "undefined")
+ logger.error("[%s] is not distributed.", pnfd_name)
+ raise CatalogException("PNF package(%s) is not distributed." % pnfd_name)
+
+ ns_pkgs.update(
+ nsdId=nsd_id,
+ nsdName=nsd_name,
+ nsdDesginer=nsd_designer,
+ nsdDescription=nsd.get("description", ""),
+ nsdVersion=nsd_version,
+ invariantId=invariant_id,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ nsPackageUri=local_file_name,
+ sdcCsarId=nsd_info_id,
+ localFilePath=local_file_name,
+ nsdModel=nsd_json
+ )
+ logger.info('NSD(%s) has been processed.' % nsd_info_id)
+
+ def fill_resp_data(self, ns_pkg):
+ data = {
+ 'id': ns_pkg.nsPackageId,
+ 'nsdId': ns_pkg.nsdId,
+ 'nsdName': ns_pkg.nsdName,
+ 'nsdVersion': ns_pkg.nsdVersion,
+ 'nsdDesigner': ns_pkg.nsdDesginer,
+ 'nsdInvariantId': ns_pkg.invariantId,
+ 'vnfPkgIds': [],
+ 'pnfdInfoIds': [], # TODO
+ 'nestedNsdInfoIds': [], # TODO
+ 'nsdOnboardingState': ns_pkg.onboardingState,
+ 'onboardingFailureDetails': None, # TODO
+ 'nsdOperationalState': ns_pkg.operationalState,
+ 'nsdUsageState': ns_pkg.usageState,
+ 'userDefinedData': {},
+ '_links': None # TODO
+ }
+
+ if ns_pkg.nsdModel:
+ nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+ vnf_pkg_ids = []
+ for vnf in nsd_model['vnfs']:
+ vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+ if vnfd_id == "undefined":
+ vnfd_id = vnf["properties"].get("id", "undefined")
+ pkgs = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if not pkgs:
+ pkgs = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+ for pkg in pkgs:
+ vnf_pkg_ids.append(pkg.vnfPackageId)
+ data['vnfPkgIds'] = vnf_pkg_ids
+
+ pnf_info_ids = []
+ for pnf in nsd_model['pnfs']:
+ pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+ if pnfd_id == "undefined":
+ pnfd_id = pnf["properties"].get("id", "undefined")
+ pkgs = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if not pkgs:
+ pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+ for pkg in pkgs:
+ pnf_info_ids.append(pkg.pnfPackageId)
+ data['pnfInfoIds'] = pnf_info_ids # TODO: need reconfirming
+
+ if ns_pkg.userDefinedData:
+ user_defined_data = json.JSONDecoder().decode(ns_pkg.userDefinedData)
+ data['userDefinedData'] = user_defined_data
+
+ return data
+
+ def handle_upload_failed(self, nsd_info_id):
+ ns_pkg = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ ns_pkg.update(onboardingState=PKG_STATUS.CREATED)
diff --git a/catalog/packages/biz/nsdm_subscription.py b/catalog/packages/biz/nsdm_subscription.py
new file mode 100644
index 0000000..e2af6e4
--- /dev/null
+++ b/catalog/packages/biz/nsdm_subscription.py
@@ -0,0 +1,218 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ast
+import json
+import logging
+import requests
+import uuid
+
+from collections import Counter
+
+from rest_framework import status
+
+from catalog.packages import const
+from catalog.pub.database.models import NsdmSubscriptionModel
+from catalog.pub.exceptions import CatalogException, \
+ ResourceNotFoundException, \
+ NsdmBadRequestException, NsdmDuplicateSubscriptionException
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+PARAMSBASICKEYS = ["userName", "password"]
+
+PARAMSOAUTH2CLIENTCREDENTIALSKEYS = ["clientId", "clientPassword",
+ "tokenEndpoint"]
+
+
+def is_filter_type_equal(new_filter, existing_filter):
+ return Counter(list(set(new_filter))) == Counter(existing_filter)
+
+
+class NsdmSubscription:
+
+ def __init__(self):
+ pass
+
+ def query_single_subscription(self, subscription_id):
+ logger.debug("Start Query Subscription... ")
+ subscription = \
+ NsdmSubscriptionModel.objects.filter(
+ subscriptionid=subscription_id)
+ if not subscription.exists():
+ raise ResourceNotFoundException(
+ "Subscription(%s) doesn't exists" % subscription_id)
+ logger.debug("Subscription found... ")
+ return self.fill_resp_data(subscription[0])
+
+ def delete_single_subscription(self, subscription_id):
+ logger.debug("Start Delete Subscription... ")
+ subscription = \
+ NsdmSubscriptionModel.objects.filter(
+ subscriptionid=subscription_id)
+ if not subscription.exists():
+ raise ResourceNotFoundException(
+ "Subscription(%s) doesn't exists" % subscription_id)
+ subscription.delete()
+ logger.debug("Deleted Subscription... ")
+
+ def query_multi_subscriptions(self, query_params):
+ self.params = query_params
+ query_data = {}
+ logger.debug("Start QueryMultiSubscriptions get --> "
+ "Check for filters in query params" % self.params)
+ for query, value in list(self.params.items()):
+ if query in const.NSDM_NOTIFICATION_FILTERS and value:
+ query_data[query + '__icontains'] = json.dumps(list(set(value)))
+ # Query the database with filters if the request
+ # has fields in request params, else fetch all records
+ if query_data:
+ subscriptions = NsdmSubscriptionModel.objects.filter(**query_data)
+ else:
+ subscriptions = NsdmSubscriptionModel.objects.all()
+ if not subscriptions.exists():
+ raise ResourceNotFoundException("Subscriptions doesn't exist")
+ return [self.fill_resp_data(subscription)
+ for subscription in subscriptions]
+
+ def check_callbackuri_connection(self):
+ logger.debug("Create Subscription --> Test Callback URI --"
+ "Sending GET request to %s" % self.callback_uri)
+ try:
+ response = requests.get(self.callback_uri, timeout=2)
+ if response.status_code != status.HTTP_204_NO_CONTENT:
+ raise CatalogException("callbackUri %s returns %s status "
+ "code." % (self.callback_uri,
+ response.status_code))
+ except Exception:
+ raise CatalogException("callbackUri %s didn't return 204 status"
+ "code." % self.callback_uri)
+
+ def fill_resp_data(self, subscription):
+ subscription_filter = dict()
+ for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+ if subscription.__dict__[filter_type]:
+ subscription_filter[filter_type] = \
+ ast.literal_eval(subscription.__dict__[filter_type])
+ resp_data = {
+ 'id': subscription.subscriptionid,
+ 'callbackUri': subscription.callback_uri,
+ 'filter': subscription_filter,
+ '_links': json.loads(subscription.links)
+ }
+ return resp_data
+
+ def create(self, data):
+ logger.debug("Start Create Subscription... ")
+ self.filter = ignore_case_get(data, "filter", {})
+ self.callback_uri = ignore_case_get(data, "callbackUri")
+ self.authentication = ignore_case_get(data, "authentication", {})
+ self.subscription_id = str(uuid.uuid4())
+ self.check_callbackuri_connection()
+ self.check_valid_auth_info()
+ self.check_filter_types()
+ self.check_valid()
+ self.save_db()
+ subscription = \
+ NsdmSubscriptionModel.objects.get(
+ subscriptionid=self.subscription_id)
+ return self.fill_resp_data(subscription)
+
+ def check_filter_types(self):
+ # Check if both nsdId and nsdInfoId
+ # or pnfdId and pnfdInfoId are present
+ logger.debug("Create Subscription --> Validating Filters... ")
+ if self.filter and \
+ self.filter.get("nsdId", "") and \
+ self.filter.get("nsdInfoId", ""):
+ raise NsdmBadRequestException("Notification Filter should contain"
+ " either nsdId or nsdInfoId")
+ if self.filter and \
+ self.filter.get("pnfdId", "") and \
+ self.filter.get("pnfdInfoIds", ""):
+ raise NsdmBadRequestException("Notification Filter should contain"
+ " either pnfdId or pnfdInfoIds")
+
+ def check_valid_auth_info(self):
+ logger.debug("Create Subscription --> Validating Auth "
+ "details if provided... ")
+ if self.authentication.get("paramsBasic", {}) and \
+ const.BASIC not in self.authentication.get("authType", ''):
+ raise NsdmBadRequestException('Auth type should be ' + const.BASIC)
+ if self.authentication.get("paramsOauth2ClientCredentials", {}) and \
+ const.OAUTH2_CLIENT_CREDENTIALS not in \
+ self.authentication.get("authType", ''):
+ raise NsdmBadRequestException('Auth type should '
+ 'be ' + const.OAUTH2_CLIENT_CREDENTIALS)
+ if const.BASIC in self.authentication.get("authType", '') and \
+ "paramsBasic" in list(self.authentication.keys()) and \
+ not is_filter_type_equal(PARAMSBASICKEYS, list(
+ self.authentication.get("paramsBasic").keys())):
+ raise NsdmBadRequestException('userName and password needed '
+ 'for ' + const.BASIC)
+ if const.OAUTH2_CLIENT_CREDENTIALS in \
+ self.authentication.get("authType", '') and \
+ "paramsOauth2ClientCredentials" in \
+ list(self.authentication.keys()) and \
+ not is_filter_type_equal(PARAMSOAUTH2CLIENTCREDENTIALSKEYS, list(
+ self.authentication.get("paramsOauth2ClientCredentials").keys())):
+ raise NsdmBadRequestException('clientId, clientPassword and '
+ 'tokenEndpoint required '
+ 'for ' + const.OAUTH2_CLIENT_CREDENTIALS)
+
+ def check_filter_exists(self, subscription):
+ for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+ if not is_filter_type_equal(self.filter.get(filter_type, []),
+ ast.literal_eval(
+ getattr(subscription,
+ filter_type))):
+ return False
+ return True
+
+ def check_valid(self):
+ logger.debug("Create Subscription --> Checking DB if "
+ "same subscription exists already exists... ")
+ subscriptions = \
+ NsdmSubscriptionModel.objects.filter(
+ callback_uri=self.callback_uri)
+ if not subscriptions.exists():
+ return
+ for subscription in subscriptions:
+ if self.check_filter_exists(subscription):
+ raise NsdmDuplicateSubscriptionException(
+ "Already Subscription exists with the "
+ "same callbackUri and filter")
+
+ def save_db(self):
+ logger.debug("Create Subscription --> Saving the subscription "
+ "%s to the database" % self.subscription_id)
+ links = {
+ "self": {
+ "href":
+ const.NSDM_SUBSCRIPTION_ROOT_URI + self.subscription_id
+ }
+ }
+ subscription_save_db = {
+ "subscriptionid": self.subscription_id,
+ "callback_uri": self.callback_uri,
+ "auth_info": self.authentication,
+ "links": json.dumps(links)
+ }
+ for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+ if self.filter:
+ subscription_save_db[filter_type] = json.dumps(
+ list(set(self.filter.get(filter_type, []))))
+ NsdmSubscriptionModel.objects.create(**subscription_save_db)
+ logger.debug('Create Subscription[%s] success', self.subscription_id)
diff --git a/catalog/packages/biz/pnf_descriptor.py b/catalog/packages/biz/pnf_descriptor.py
new file mode 100644
index 0000000..547c198
--- /dev/null
+++ b/catalog/packages/biz/pnf_descriptor.py
@@ -0,0 +1,226 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import json
+import logging
+import os
+import uuid
+
+from catalog.packages.biz.common import read, save
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import NSPackageModel, PnfPackageModel
+from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
+from catalog.pub.utils import fileutil, toscaparser
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+class PnfDescriptor(object):
+
+ def __init__(self):
+ pass
+
+ def create(self, data):
+ logger.info('Start to create a PNFD...')
+ user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+ data = {
+ 'id': str(uuid.uuid4()),
+ 'pnfdOnboardingState': PKG_STATUS.CREATED,
+ 'pnfdUsageState': PKG_STATUS.NOT_IN_USE,
+ 'userDefinedData': user_defined_data,
+ '_links': None # TODO
+ }
+ PnfPackageModel.objects.create(
+ pnfPackageId=data['id'],
+ onboardingState=data['pnfdOnboardingState'],
+ usageState=data['pnfdUsageState'],
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ logger.info('A PNFD(%s) has been created.' % data['id'])
+ return data
+
+ def query_multiple(self, request):
+ pnfdId = request.query_params.get('pnfdId')
+ if pnfdId:
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfdId=pnfdId)
+ else:
+ pnf_pkgs = PnfPackageModel.objects.all()
+ response_data = []
+ for pnf_pkg in pnf_pkgs:
+ data = self.fill_response_data(pnf_pkg)
+ response_data.append(data)
+ return response_data
+
+ def query_single(self, pnfd_info_id):
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
+ raise ResourceNotFoundException('PNFD(%s) does not exist.' % pnfd_info_id)
+ return self.fill_response_data(pnf_pkgs[0])
+
+ def upload(self, remote_file, pnfd_info_id):
+ logger.info('Start to upload PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.info('PNFD(%s) does not exist.' % pnfd_info_id)
+ raise CatalogException('PNFD (%s) does not exist.' % pnfd_info_id)
+ pnf_pkgs.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ local_file_name = save(remote_file, pnfd_info_id)
+ logger.info('PNFD(%s) content has been uploaded.' % pnfd_info_id)
+ return local_file_name
+
+ def delete_single(self, pnfd_info_id):
+ logger.info('Start to delete PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.info('PNFD(%s) has been deleted.' % pnfd_info_id)
+ return
+ '''
+ if pnf_pkgs[0].usageState != PKG_STATUS.NOT_IN_USE:
+ logger.info('PNFD(%s) shall be NOT_IN_USE.' % pnfd_info_id)
+ raise CatalogException('PNFD(%s) shall be NOT_IN_USE.' % pnfd_info_id)
+ '''
+ del_pnfd_id = pnf_pkgs[0].pnfdId
+ ns_pkgs = NSPackageModel.objects.all()
+ for ns_pkg in ns_pkgs:
+ nsd_model = None
+ if ns_pkg.nsdModel:
+ nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+ if not nsd_model:
+ continue
+ for pnf in nsd_model['pnfs']:
+ if del_pnfd_id == pnf["properties"]["id"]:
+ logger.warn("PNFD(%s) is referenced in NSD", del_pnfd_id)
+ raise CatalogException('PNFD(%s) is referenced.' % pnfd_info_id)
+ pnf_pkgs.delete()
+ pnf_pkg_path = os.path.join(CATALOG_ROOT_PATH, pnfd_info_id)
+ fileutil.delete_dirs(pnf_pkg_path)
+ logger.debug('PNFD(%s) has been deleted.' % pnfd_info_id)
+
+ def download(self, pnfd_info_id):
+ logger.info('Start to download PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
+ raise ResourceNotFoundException('PNFD(%s) does not exist.' % pnfd_info_id)
+ if pnf_pkgs[0].onboardingState != PKG_STATUS.ONBOARDED:
+ logger.error('PNFD(%s) is not ONBOARDED.' % pnfd_info_id)
+ raise CatalogException('PNFD(%s) is not ONBOARDED.' % pnfd_info_id)
+
+ local_file_path = pnf_pkgs[0].localFilePath
+ start, end = 0, os.path.getsize(local_file_path)
+ logger.info('PNFD(%s) has been downloaded.' % pnfd_info_id)
+ return read(local_file_path, start, end)
+
+ def parse_pnfd_and_save(self, pnfd_info_id, local_file_name):
+ logger.info('Start to process PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ pnf_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+ pnfd_json = toscaparser.parse_pnfd(local_file_name)
+ pnfd = json.JSONDecoder().decode(pnfd_json)
+
+ logger.debug("pnfd_json is %s" % pnfd_json)
+ pnfd_id = ""
+ pnfdVersion = ""
+ pnfdProvider = ""
+ pnfdName = ""
+ if pnfd.get("pnf", "") != "":
+ if pnfd["pnf"].get("properties", "") != "":
+ pnfd_id = pnfd["pnf"].get("properties", {}).get("descriptor_id", "")
+ pnfdVersion = pnfd["pnf"].get("properties", {}).get("version", "")
+ pnfdProvider = pnfd["pnf"].get("properties", {}).get("provider", "")
+ pnfdName = pnfd["pnf"].get("properties", {}).get("name", "")
+ if pnfd_id == "":
+ pnfd_id = pnfd["metadata"].get("descriptor_id", "")
+ if pnfd_id == "":
+ pnfd_id = pnfd["metadata"].get("id", "")
+ if pnfd_id == "":
+ pnfd_id = pnfd["metadata"].get("UUID", "")
+ if pnfd_id == "":
+ raise CatalogException('pnfd_id is Null.')
+
+ if pnfdVersion == "":
+ pnfdVersion = pnfd["metadata"].get("template_version", "")
+ if pnfdVersion == "":
+ pnfdVersion = pnfd["metadata"].get("version", "")
+
+ if pnfdProvider == "":
+ pnfdProvider = pnfd["metadata"].get("template_author", "")
+ if pnfdVersion == "":
+ pnfdVersion = pnfd["metadata"].get("provider", "")
+
+ if pnfdName == "":
+ pnfdName = pnfd["metadata"].get("template_name", "")
+ if pnfdVersion == "":
+ pnfdName = pnfd["metadata"].get("name", "")
+
+ other_pnf = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if other_pnf and other_pnf[0].pnfPackageId != pnfd_info_id:
+ logger.info('PNFD(%s) already exists.' % pnfd_id)
+ raise CatalogException("PNFD(%s) already exists." % pnfd_id)
+
+ pnf_pkgs.update(
+ pnfdId=pnfd_id,
+ pnfdName=pnfdName,
+ pnfdVersion=pnfdVersion,
+ pnfVendor=pnfdProvider,
+ pnfPackageUri=local_file_name,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ localFilePath=local_file_name,
+ pnfdModel=pnfd_json
+ )
+ logger.info('PNFD(%s) has been processed.' % pnfd_info_id)
+
+ def fill_response_data(self, pnf_pkg):
+ data = {
+ 'id': pnf_pkg.pnfPackageId,
+ 'pnfdId': pnf_pkg.pnfdId,
+ 'pnfdName': pnf_pkg.pnfdName,
+ 'pnfdVersion': pnf_pkg.pnfdVersion,
+ 'pnfdProvider': pnf_pkg.pnfVendor,
+ 'pnfdInvariantId': None, # TODO
+ 'pnfdOnboardingState': pnf_pkg.onboardingState,
+ 'onboardingFailureDetails': None, # TODO
+ 'pnfdUsageState': pnf_pkg.usageState,
+ 'userDefinedData': {},
+ '_links': None # TODO
+ }
+ if pnf_pkg.userDefinedData:
+ user_defined_data = json.JSONDecoder().decode(pnf_pkg.userDefinedData)
+ data['userDefinedData'] = user_defined_data
+
+ return data
+
+ def handle_upload_failed(self, pnf_pkg_id):
+ pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId=pnf_pkg_id)
+ pnf_pkg.update(onboardingState=PKG_STATUS.CREATED)
+
+ def parse_pnfd(self, csar_id, inputs):
+ try:
+ pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId=csar_id)
+ if not pnf_pkg:
+ raise CatalogException("PNF CSAR(%s) does not exist." % csar_id)
+ csar_path = pnf_pkg[0].localFilePath
+ ret = {"model": toscaparser.parse_pnfd(csar_path, inputs)}
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ return [1, e.args[0]]
+ return [0, ret]
diff --git a/catalog/packages/biz/sdc_ns_package.py b/catalog/packages/biz/sdc_ns_package.py
new file mode 100644
index 0000000..4f9d460
--- /dev/null
+++ b/catalog/packages/biz/sdc_ns_package.py
@@ -0,0 +1,172 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import sys
+import traceback
+
+from catalog.pub.config.config import CATALOG_ROOT_PATH, CATALOG_URL_PATH
+from catalog.pub.config.config import REG_TO_MSB_REG_PARAM
+from catalog.pub.database.models import NSPackageModel
+from catalog.pub.exceptions import CatalogException
+from catalog.pub.msapi import sdc
+from catalog.pub.utils import toscaparser
+from catalog.packages.biz.ns_descriptor import NsDescriptor
+from catalog.pub.utils import fileutil
+
+logger = logging.getLogger(__name__)
+
+STATUS_SUCCESS, STATUS_FAILED = "success", "failed"
+
+METADATA = "metadata"
+
+
+def fmt_ns_pkg_rsp(status, desc, error_code="500"):
+ return [0, {"status": status, "statusDescription": desc, "errorCode": error_code}]
+
+
+def ns_on_distribute(csar_id):
+ ret = None
+ try:
+ ret = NsPackage().on_distribute(csar_id)
+ except CatalogException as e:
+ NsPackage().delete_csar(csar_id)
+ return fmt_ns_pkg_rsp(STATUS_FAILED, e.args[0])
+ except:
+ logger.error(traceback.format_exc())
+ NsPackage().delete_csar(csar_id)
+ return fmt_ns_pkg_rsp(STATUS_FAILED, str(sys.exc_info()))
+ if ret[0]:
+ return fmt_ns_pkg_rsp(STATUS_FAILED, ret[1])
+ return fmt_ns_pkg_rsp(STATUS_SUCCESS, ret[1], "")
+
+
+def ns_delete_csar(csar_id):
+ ret = None
+ try:
+ ret = NsPackage().delete_csar(csar_id)
+ except CatalogException as e:
+ return fmt_ns_pkg_rsp(STATUS_FAILED, e.args[0])
+ except:
+ logger.error(traceback.format_exc())
+ return fmt_ns_pkg_rsp(STATUS_FAILED, str(sys.exc_info()))
+ return fmt_ns_pkg_rsp(STATUS_SUCCESS, ret[1], "")
+
+
+def ns_get_csars():
+ ret = None
+ try:
+ ret = NsPackage().get_csars()
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except:
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def ns_get_csar(csar_id):
+ ret = None
+ try:
+ ret = NsPackage().get_csar(csar_id)
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def parse_nsd(csar_id, inputs):
+ ret = None
+ try:
+ ns_pkg = NSPackageModel.objects.filter(nsPackageId=csar_id)
+ if not ns_pkg:
+ raise CatalogException("NS CSAR(%s) does not exist." % csar_id)
+ csar_path = ns_pkg[0].localFilePath
+ ret = {"model": toscaparser.parse_nsd(csar_path, inputs)}
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return [0, ret]
+
+
+class NsPackage(object):
+ """
+ Actions for sdc ns package.
+ """
+
+ def __init__(self):
+ pass
+
+ def on_distribute(self, csar_id):
+ if NSPackageModel.objects.filter(nsPackageId=csar_id):
+ return [1, "NS CSAR(%s) already exists." % csar_id]
+
+ artifact = sdc.get_artifact(sdc.ASSETTYPE_SERVICES, csar_id)
+ local_path = os.path.join(CATALOG_ROOT_PATH, csar_id)
+ csar_name = "%s.csar" % artifact.get("name", csar_id)
+ local_file_name = sdc.download_artifacts(artifact["toscaModelURL"], local_path, csar_name)
+ if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+ artifact_vnf_file = fileutil.unzip_file(local_file_name, local_path, "Artifacts/Deployment/OTHER/ns.csar")
+ if os.path.exists(artifact_vnf_file):
+ local_file_name = artifact_vnf_file
+
+ data = {
+ 'userDefinedData': {}
+ }
+ nsd = NsDescriptor()
+ nsd.create(data, csar_id)
+ nsd.parse_nsd_and_save(csar_id, local_file_name)
+ return [0, "CSAR(%s) distributed successfully." % csar_id]
+
+ def delete_csar(self, csar_id):
+ nsd = NsDescriptor()
+ nsd.delete_single(csar_id)
+ return [0, "Delete CSAR(%s) successfully." % csar_id]
+
+ def get_csars(self):
+ csars = []
+ nss = NSPackageModel.objects.filter()
+ for ns in nss:
+ ret = self.get_csar(ns.nsPackageId)
+ csars.append(ret[1])
+ return [0, csars]
+
+ def get_csar(self, csar_id):
+ package_info = {}
+ csars = NSPackageModel.objects.filter(nsPackageId=csar_id)
+ if csars:
+ package_info["nsdId"] = csars[0].nsdId
+ package_info["nsPackageId"] = csars[0].nsPackageId
+ package_info["nsdProvider"] = csars[0].nsdDesginer
+ package_info["nsdVersion"] = csars[0].nsdVersion
+ package_info["csarName"] = csars[0].nsPackageUri
+ package_info["nsdModel"] = csars[0].nsdModel
+ package_info["nsdInvariantId"] = csars[0].invariantId
+ package_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+ CATALOG_URL_PATH,
+ csar_id,
+ csars[0].nsPackageUri)
+ else:
+ raise CatalogException("Ns package[%s] not Found." % csar_id)
+
+ return [0, {"csarId": csar_id, "packageInfo": package_info}]
diff --git a/catalog/packages/biz/sdc_service_package.py b/catalog/packages/biz/sdc_service_package.py
new file mode 100644
index 0000000..d0db6bf
--- /dev/null
+++ b/catalog/packages/biz/sdc_service_package.py
@@ -0,0 +1,124 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import traceback
+
+from coverage.xmlreport import os
+
+from catalog.packages.biz.service_descriptor import ServiceDescriptor
+from catalog.pub.config.config import CATALOG_ROOT_PATH, REG_TO_MSB_REG_PARAM, CATALOG_URL_PATH
+from catalog.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.exceptions import CatalogException, PackageNotFoundException, \
+ PackageHasExistsException
+from catalog.pub.msapi import sdc
+from catalog.pub.utils import fileutil, toscaparser
+
+logger = logging.getLogger(__name__)
+
+
+class ServicePackage(object):
+ """
+ Actions for sdc service package.
+ """
+
+ def __init__(self):
+ pass
+
+ def on_distribute(self, csar_id):
+ if ServicePackageModel.objects.filter(servicePackageId=csar_id):
+ raise PackageHasExistsException("Service CSAR(%s) already exists." % csar_id)
+
+ try:
+ service = sdc.get_asset(sdc.ASSETTYPE_SERVICES, csar_id)
+ # check if the related resources exist
+ resources = service.get('resources', None)
+ if resources:
+ for resource in resources:
+ if not VnfPackageModel.objects.filter(vnfPackageId=resource['resourceUUID']) and \
+ not PnfPackageModel.objects.filter(pnfPackageId=resource['resourceUUID']):
+ logger.error("Resource [%s] is not distributed.", resource['resourceUUID'])
+ raise CatalogException("Resource (%s) is not distributed." % resource['resourceUUID'])
+
+ # download csar package
+ local_path = os.path.join(CATALOG_ROOT_PATH, csar_id)
+ csar_name = "%s.csar" % service.get("name", csar_id)
+ local_file_name = sdc.download_artifacts(service["toscaModelURL"], local_path, csar_name)
+ if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+ fileutil.unzip_file(local_file_name, local_path, "")
+ data = {
+ 'userDefinedData': {}
+ }
+ serviced = ServiceDescriptor()
+ serviced.create(data, csar_id)
+ serviced.parse_serviced_and_save(csar_id, local_file_name)
+
+ except Exception as e:
+ logger.error(traceback.format_exc())
+ if ServicePackageModel.objects.filter(servicePackageId=csar_id):
+ ServicePackage().delete_csar(csar_id)
+ raise e
+
+ def delete_csar(self, csar_id):
+ serviced = ServiceDescriptor()
+ serviced.delete_single(csar_id)
+
+ def get_csars(self):
+ csars = []
+ packages = ServicePackageModel.objects.filter()
+ for package in packages:
+ csar = self.get_csar(package.servicePackageId)
+ csars.append(csar)
+ return csars
+
+ def get_csar(self, csar_id):
+ package_info = {}
+ csars = ServicePackageModel.objects.filter(servicePackageId=csar_id)
+ if csars:
+ package_info["servicedId"] = csars[0].servicedId
+ package_info["servicePackageId"] = csars[0].servicePackageId
+ package_info["servicedProvider"] = csars[0].servicedDesigner
+ package_info["servicedVersion"] = csars[0].servicedVersion
+ package_info["csarName"] = csars[0].servicePackageUri
+ package_info["servicedModel"] = csars[0].servicedModel
+ package_info["servicedInvariantId"] = csars[0].invariantId
+ package_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+ CATALOG_URL_PATH,
+ csar_id,
+ csars[0].servicePackageUri)
+ else:
+ error_message = "Service package[%s] not Found." % csar_id
+ logger.error(error_message)
+ raise PackageNotFoundException(error_message)
+
+ return {"csarId": csar_id, "packageInfo": package_info}
+
+ def parse_serviced(self, csar_id, inputs):
+ service_pkg = ServicePackageModel.objects.filter(servicePackageId=csar_id)
+ if not service_pkg:
+ raise PackageNotFoundException("Service CSAR(%s) does not exist." % csar_id)
+
+ try:
+ csar_path = service_pkg[0].localFilePath
+ ret = {"model": toscaparser.parse_sd(csar_path, inputs)}
+ return ret
+ except CatalogException as e:
+ logger.error(e.args[0])
+ raise e
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ raise e
diff --git a/catalog/packages/biz/sdc_vnf_package.py b/catalog/packages/biz/sdc_vnf_package.py
new file mode 100644
index 0000000..571c3bb
--- /dev/null
+++ b/catalog/packages/biz/sdc_vnf_package.py
@@ -0,0 +1,254 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import sys
+import threading
+import traceback
+
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.config.config import CATALOG_ROOT_PATH, CATALOG_URL_PATH
+from catalog.pub.config.config import REG_TO_MSB_REG_PARAM
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.exceptions import CatalogException
+from catalog.pub.msapi import sdc
+from catalog.pub.utils import fileutil
+from catalog.pub.utils import toscaparser
+from catalog.pub.utils.jobutil import JobUtil
+
+logger = logging.getLogger(__name__)
+
+JOB_ERROR = 255
+
+
+def nf_get_csars():
+ ret = None
+ try:
+ ret = NfPackage().get_csars()
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def nf_get_csar(csar_id):
+ ret = None
+ try:
+ ret = NfPackage().get_csar(csar_id)
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def parse_vnfd(csar_id, inputs):
+ ret = None
+ try:
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=csar_id)
+ if not nf_pkg:
+ raise CatalogException("VNF CSAR(%s) does not exist." % csar_id)
+ csar_path = nf_pkg[0].localFilePath
+ ret = {"model": toscaparser.parse_vnfd(csar_path, inputs)}
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return [0, ret]
+
+
+class NfDistributeThread(threading.Thread):
+ """
+ Sdc NF Package Distribute
+ """
+
+ def __init__(self, csar_id, vim_ids, lab_vim_id, job_id):
+ threading.Thread.__init__(self)
+ self.csar_id = csar_id
+ self.vim_ids = vim_ids
+ self.lab_vim_id = lab_vim_id
+ self.job_id = job_id
+
+ self.csar_save_path = os.path.join(CATALOG_ROOT_PATH, csar_id)
+
+ def run(self):
+ try:
+ self.on_distribute()
+ except CatalogException as e:
+ self.rollback_distribute()
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, e.args[0])
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+ self.rollback_distribute()
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, "Failed to distribute CSAR(%s)" % self.csar_id)
+
+ def on_distribute(self):
+ JobUtil.create_job(
+ inst_type='nf',
+ jobaction='on_distribute',
+ inst_id=self.csar_id,
+ job_id=self.job_id)
+ JobUtil.add_job_status(self.job_id, 5, "Start CSAR(%s) distribute." % self.csar_id)
+
+ if VnfPackageModel.objects.filter(vnfPackageId=self.csar_id):
+ err_msg = "NF CSAR(%s) already exists." % self.csar_id
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, err_msg)
+ return
+
+ artifact = sdc.get_artifact(sdc.ASSETTYPE_RESOURCES, self.csar_id)
+ local_path = os.path.join(CATALOG_ROOT_PATH, self.csar_id)
+ csar_name = "%s.csar" % artifact.get("name", self.csar_id)
+ local_file_name = sdc.download_artifacts(artifact["toscaModelURL"], local_path, csar_name)
+ if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+ artifact_vnf_file = fileutil.unzip_file(local_file_name, local_path, "Artifacts/Deployment/OTHER/vnf.csar")
+ if os.path.exists(artifact_vnf_file):
+ local_file_name = artifact_vnf_file
+
+ vnfd_json = toscaparser.parse_vnfd(local_file_name)
+ vnfd = json.JSONDecoder().decode(vnfd_json)
+
+ if not vnfd.get("vnf"):
+ raise CatalogException("VNF properties and metadata in VNF Package(id=%s) are empty." % self.csar_id)
+
+ vnfd_id = vnfd["vnf"]["properties"].get("descriptor_id", "")
+ if VnfPackageModel.objects.filter(vnfdId=vnfd_id):
+ logger.error("VNF package(%s) already exists.", vnfd_id)
+ raise CatalogException("VNF package(%s) already exists." % vnfd_id)
+ JobUtil.add_job_status(self.job_id, 30, "Save CSAR(%s) to database." % self.csar_id)
+ vnfd_ver = vnfd["vnf"]["properties"].get("descriptor_version", "")
+ vnf_provider = vnfd["vnf"]["properties"].get("provider", "")
+ vnf_software_version = vnfd["vnf"]["properties"].get("software_version", "")
+ vnfd_product_name = vnfd["vnf"]["properties"].get("product_name", "")
+ VnfPackageModel(
+ vnfPackageId=self.csar_id,
+ vnfdId=vnfd_id,
+ vnfVendor=vnf_provider,
+ vnfdProductName=vnfd_product_name,
+ vnfdVersion=vnfd_ver,
+ vnfSoftwareVersion=vnf_software_version,
+ vnfdModel=vnfd_json,
+ localFilePath=local_file_name,
+ vnfPackageUri=csar_name,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE
+ ).save()
+ JobUtil.add_job_status(self.job_id, 100, "CSAR(%s) distribute successfully." % self.csar_id)
+
+ def rollback_distribute(self):
+ try:
+ VnfPackageModel.objects.filter(vnfPackageId=self.csar_id).delete()
+ fileutil.delete_dirs(self.csar_save_path)
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+
+
+class NfPkgDeleteThread(threading.Thread):
+ """
+ Sdc NF Package Deleting
+ """
+
+ def __init__(self, csar_id, job_id):
+ threading.Thread.__init__(self)
+ self.csar_id = csar_id
+ self.job_id = job_id
+
+ def run(self):
+ try:
+ self.delete_csar()
+ except CatalogException as e:
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, e.args[0])
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, "Failed to delete CSAR(%s)" % self.csar_id)
+
+ def delete_csar(self):
+ JobUtil.create_job(
+ inst_type='nf',
+ jobaction='delete',
+ inst_id=self.csar_id,
+ job_id=self.job_id)
+ JobUtil.add_job_status(self.job_id, 5, "Start to delete CSAR(%s)." % self.csar_id)
+
+ VnfPackageModel.objects.filter(vnfPackageId=self.csar_id).delete()
+
+ JobUtil.add_job_status(self.job_id, 50, "Delete local CSAR(%s) file." % self.csar_id)
+
+ csar_save_path = os.path.join(CATALOG_ROOT_PATH, self.csar_id)
+ fileutil.delete_dirs(csar_save_path)
+
+ JobUtil.add_job_status(self.job_id, 100, "Delete CSAR(%s) successfully." % self.csar_id)
+
+
+class NfPackage(object):
+ """
+ Actions for sdc nf package.
+ """
+
+ def __init__(self):
+ pass
+
+ def get_csars(self):
+ csars = []
+ nf_pkgs = VnfPackageModel.objects.filter()
+ for nf_pkg in nf_pkgs:
+ ret = self.get_csar(nf_pkg.vnfPackageId)
+ csars.append(ret[1])
+ return [0, csars]
+
+ def get_csar(self, csar_id):
+ pkg_info = {}
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=csar_id)
+ if not nf_pkg:
+ nf_pkg = VnfPackageModel.objects.filter(vnfdId=csar_id)
+
+ if nf_pkg:
+ db_csar_id = nf_pkg[0].vnfPackageId
+ pkg_info["vnfdId"] = nf_pkg[0].vnfdId
+ pkg_info["vnfPackageId"] = nf_pkg[0].vnfPackageId
+ pkg_info["vnfdProvider"] = nf_pkg[0].vnfVendor
+ pkg_info["vnfdVersion"] = nf_pkg[0].vnfdVersion
+ pkg_info["vnfVersion"] = nf_pkg[0].vnfSoftwareVersion
+ pkg_info["csarName"] = nf_pkg[0].vnfPackageUri
+ pkg_info["vnfdModel"] = nf_pkg[0].vnfdModel
+ pkg_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+ CATALOG_URL_PATH,
+ db_csar_id,
+ nf_pkg[0].vnfPackageUri)
+ else:
+ raise CatalogException("Vnf package[%s] not Found." % csar_id)
+
+ csar_info = {
+ "csarId": db_csar_id,
+ "packageInfo": pkg_info,
+ "imageInfo": []
+ }
+ return [0, csar_info]
diff --git a/catalog/packages/biz/service_descriptor.py b/catalog/packages/biz/service_descriptor.py
new file mode 100644
index 0000000..11b6fb3
--- /dev/null
+++ b/catalog/packages/biz/service_descriptor.py
@@ -0,0 +1,129 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import uuid
+
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.exceptions import CatalogException, PackageNotFoundException
+from catalog.pub.utils import toscaparser, fileutil
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+class ServiceDescriptor(object):
+ """
+ Action for Service Descriptor
+ """
+
+ def __init__(self):
+ pass
+
+ def create(self, data, csar_id=None):
+ logger.info('Start to create a ServiceD...')
+ user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+ data = {
+ 'id': csar_id if csar_id else str(uuid.uuid4()),
+ 'servicedOnboardingState': PKG_STATUS.CREATED,
+ 'servicedOperationalState': PKG_STATUS.DISABLED,
+ 'servicedUsageState': PKG_STATUS.NOT_IN_USE,
+ 'userDefinedData': user_defined_data,
+ '_links': None # TODO
+ }
+ ServicePackageModel.objects.create(
+ servicePackageId=data['id'],
+ onboardingState=data['servicedOnboardingState'],
+ operationalState=data['servicedOperationalState'],
+ usageState=data['servicedUsageState'],
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ logger.info('A ServiceD(%s) has been created.' % data['id'])
+ return data
+
+ def parse_serviced_and_save(self, serviced_info_id, local_file_name):
+ logger.info('Start to process ServiceD(%s)...' % serviced_info_id)
+ service_pkgs = ServicePackageModel.objects.filter(servicePackageId=serviced_info_id)
+ service_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+
+ serviced_json = toscaparser.parse_sd(local_file_name)
+ serviced = json.JSONDecoder().decode(serviced_json)
+
+ serviced_id = serviced.get("service", {}).get("properties", {}).get("descriptor_id", "")
+ serviced_name = serviced.get("service", {}).get("properties", {}).get("name", "")
+ serviced_version = serviced.get("service", {}).get("properties", {}).get("version", "")
+ serviced_designer = serviced.get("service", {}).get("properties", {}).get("designer", "")
+ invariant_id = serviced.get("service", {}).get("properties", {}).get("invariant_id", "")
+ if serviced_id == "":
+ raise CatalogException("serviced_id(%s) does not exist in metadata." % serviced_id)
+ other_nspkg = ServicePackageModel.objects.filter(servicedId=serviced_id)
+ if other_nspkg and other_nspkg[0].servicePackageId != serviced_info_id:
+ logger.warn("ServiceD(%s,%s) already exists.", serviced_id, other_nspkg[0].servicePackageId)
+ raise CatalogException("ServiceD(%s) already exists." % serviced_id)
+
+ for vnf in serviced["vnfs"]:
+ vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+ if vnfd_id == "undefined":
+ vnfd_id = vnf["properties"].get("id", "undefined")
+ pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if not pkg:
+ pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+ if not pkg:
+ vnfd_name = vnf.get("vnf_id", "undefined")
+ logger.error("[%s] is not distributed.", vnfd_name)
+ raise CatalogException("VNF package(%s) is not distributed." % vnfd_id)
+
+ for pnf in serviced["pnfs"]:
+ pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+ if pnfd_id == "undefined":
+ pnfd_id = pnf["properties"].get("id", "undefined")
+ pkg = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if not pkg:
+ pkg = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+ if not pkg:
+ pnfd_name = pnf.get("pnf_id", "undefined")
+ logger.error("[%s] is not distributed.", pnfd_name)
+ raise CatalogException("PNF package(%s) is not distributed." % pnfd_name)
+
+ service_pkgs.update(
+ servicedId=serviced_id,
+ servicedName=serviced_name,
+ servicedDesigner=serviced_designer,
+ servicedDescription=serviced.get("description", ""),
+ servicedVersion=serviced_version,
+ invariantId=invariant_id,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ servicePackageUri=local_file_name,
+ sdcCsarId=serviced_info_id,
+ localFilePath=local_file_name,
+ servicedModel=serviced_json
+ )
+ logger.info('ServiceD(%s) has been processed.' % serviced_info_id)
+
+ def delete_single(self, serviced_info_id):
+ logger.info('Start to delete ServiceD(%s)...' % serviced_info_id)
+ service_pkgs = ServicePackageModel.objects.filter(servicePackageId=serviced_info_id)
+ if not service_pkgs.exists():
+ logger.warn('ServiceD(%s) not found.' % serviced_info_id)
+ raise PackageNotFoundException("Service package[%s] not Found." % serviced_info_id)
+ service_pkgs.delete()
+ service_pkg_path = os.path.join(CATALOG_ROOT_PATH, serviced_info_id)
+ fileutil.delete_dirs(service_pkg_path)
+ logger.info('ServiceD(%s) has been deleted.' % serviced_info_id)
diff --git a/catalog/packages/biz/vnf_package.py b/catalog/packages/biz/vnf_package.py
new file mode 100644
index 0000000..585a599
--- /dev/null
+++ b/catalog/packages/biz/vnf_package.py
@@ -0,0 +1,227 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import sys
+import threading
+import traceback
+import urllib
+import uuid
+
+from catalog.packages.biz.common import parse_file_range, read, save
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import VnfPackageModel, NSPackageModel
+from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
+from catalog.pub.utils.values import ignore_case_get
+from catalog.pub.utils import fileutil, toscaparser
+from catalog.packages.const import PKG_STATUS
+
+
+logger = logging.getLogger(__name__)
+
+
+class VnfPackage(object):
+
+ def __init__(self):
+ pass
+
+ def create_vnf_pkg(self, data):
+ user_defined_data = ignore_case_get(data, "userDefinedData", {})
+ vnf_pkg_id = str(uuid.uuid4())
+ VnfPackageModel.objects.create(
+ vnfPackageId=vnf_pkg_id,
+ onboardingState=PKG_STATUS.CREATED,
+ operationalState=PKG_STATUS.DISABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ data = {
+ "id": vnf_pkg_id,
+ "onboardingState": PKG_STATUS.CREATED,
+ "operationalState": PKG_STATUS.DISABLED,
+ "usageState": PKG_STATUS.NOT_IN_USE,
+ "userDefinedData": user_defined_data,
+ "_links": None
+ }
+ return data
+
+ def query_multiple(self):
+ pkgs_info = []
+ nf_pkgs = VnfPackageModel.objects.filter()
+ for nf_pkg in nf_pkgs:
+ ret = fill_response_data(nf_pkg)
+ pkgs_info.append(ret)
+ return pkgs_info
+
+ def query_single(self, vnf_pkg_id):
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not nf_pkg.exists():
+ logger.error('VNF package(%s) does not exist.' % vnf_pkg_id)
+ raise ResourceNotFoundException('VNF package(%s) does not exist.' % vnf_pkg_id)
+ return fill_response_data(nf_pkg[0])
+
+ def delete_vnf_pkg(self, vnf_pkg_id):
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not vnf_pkg.exists():
+ logger.debug('VNF package(%s) has been deleted.' % vnf_pkg_id)
+ return
+ '''
+ if vnf_pkg[0].operationalState != PKG_STATUS.DISABLED:
+ raise CatalogException("The VNF package (%s) is not disabled" % vnf_pkg_id)
+ if vnf_pkg[0].usageState != PKG_STATUS.NOT_IN_USE:
+ raise CatalogException("The VNF package (%s) is in use" % vnf_pkg_id)
+ '''
+ del_vnfd_id = vnf_pkg[0].vnfdId
+ ns_pkgs = NSPackageModel.objects.all()
+ for ns_pkg in ns_pkgs:
+ nsd_model = None
+ if ns_pkg.nsdModel:
+ nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+ if not nsd_model:
+ continue
+ for vnf in nsd_model['vnfs']:
+ if del_vnfd_id == vnf["properties"]["descriptor_id"]:
+ raise CatalogException('VNFD(%s) is referenced.' % del_vnfd_id)
+ vnf_pkg.delete()
+ vnf_pkg_path = os.path.join(CATALOG_ROOT_PATH, vnf_pkg_id)
+ fileutil.delete_dirs(vnf_pkg_path)
+ logger.info('VNF package(%s) has been deleted.' % vnf_pkg_id)
+
+ def upload(self, vnf_pkg_id, remote_file):
+ logger.info('Start to upload VNF package(%s)...' % vnf_pkg_id)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ # if vnf_pkg[0].onboardingState != PKG_STATUS.CREATED:
+ # logger.error("VNF package(%s) is not CREATED" % vnf_pkg_id)
+ # raise CatalogException("VNF package(%s) is not CREATED" % vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ local_file_name = save(remote_file, vnf_pkg_id)
+ logger.info('VNF package(%s) has been uploaded.' % vnf_pkg_id)
+ return local_file_name
+
+ def download(self, vnf_pkg_id, file_range):
+ logger.info('Start to download VNF package(%s)...' % vnf_pkg_id)
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not nf_pkg.exists():
+ logger.error('VNF package(%s) does not exist.' % vnf_pkg_id)
+ raise ResourceNotFoundException('VNF package(%s) does not exist.' % vnf_pkg_id)
+ if nf_pkg[0].onboardingState != PKG_STATUS.ONBOARDED:
+ raise CatalogException("VNF package (%s) is not on-boarded" % vnf_pkg_id)
+
+ local_file_path = nf_pkg[0].localFilePath
+ start, end = parse_file_range(local_file_path, file_range)
+ logger.info('VNF package (%s) has been downloaded.' % vnf_pkg_id)
+ return read(local_file_path, start, end)
+
+
+class VnfPkgUploadThread(threading.Thread):
+ def __init__(self, data, vnf_pkg_id):
+ threading.Thread.__init__(self)
+ self.vnf_pkg_id = vnf_pkg_id
+ self.data = data
+ self.upload_file_name = None
+
+ def run(self):
+ try:
+ self.upload_vnf_pkg_from_uri()
+ parse_vnfd_and_save(self.vnf_pkg_id, self.upload_file_name)
+ except CatalogException as e:
+ logger.error(e.args[0])
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+
+ def upload_vnf_pkg_from_uri(self):
+ logger.info("Start to upload VNF packge(%s) from URI..." % self.vnf_pkg_id)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=self.vnf_pkg_id)
+ if vnf_pkg[0].onboardingState != PKG_STATUS.CREATED:
+ logger.error("VNF package(%s) is not CREATED" % self.vnf_pkg_id)
+ raise CatalogException("VNF package (%s) is not created" % self.vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ uri = ignore_case_get(self.data, "addressInformation")
+ response = urllib.request.urlopen(uri)
+
+ local_file_dir = os.path.join(CATALOG_ROOT_PATH, self.vnf_pkg_id)
+ self.upload_file_name = os.path.join(local_file_dir, os.path.basename(uri))
+ if not os.path.exists(local_file_dir):
+ fileutil.make_dirs(local_file_dir)
+ with open(self.upload_file_name, "wt") as local_file:
+ local_file.write(response.read())
+ response.close()
+ logger.info('VNF packge(%s) has been uploaded.' % self.vnf_pkg_id)
+
+
+def fill_response_data(nf_pkg):
+ pkg_info = {}
+ pkg_info["id"] = nf_pkg.vnfPackageId
+ pkg_info["vnfdId"] = nf_pkg.vnfdId
+ pkg_info["vnfProductName"] = nf_pkg.vnfdProductName
+ pkg_info["vnfSoftwareVersion"] = nf_pkg.vnfSoftwareVersion
+ pkg_info["vnfdVersion"] = nf_pkg.vnfdVersion
+ if nf_pkg.checksum:
+ pkg_info["checksum"] = json.JSONDecoder().decode(nf_pkg.checksum)
+ pkg_info["softwareImages"] = None # TODO
+ pkg_info["additionalArtifacts"] = None # TODO
+ pkg_info["onboardingState"] = nf_pkg.onboardingState
+ pkg_info["operationalState"] = nf_pkg.operationalState
+ pkg_info["usageState"] = nf_pkg.usageState
+ if nf_pkg.userDefinedData:
+ pkg_info["userDefinedData"] = json.JSONDecoder().decode(nf_pkg.userDefinedData)
+ pkg_info["_links"] = None # TODO
+ return pkg_info
+
+
+def parse_vnfd_and_save(vnf_pkg_id, vnf_pkg_path):
+ logger.info('Start to process VNF package(%s)...' % vnf_pkg_id)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.PROCESSING)
+ vnfd_json = toscaparser.parse_vnfd(vnf_pkg_path)
+ vnfd = json.JSONDecoder().decode(vnfd_json)
+
+ if vnfd.get("vnf", "") != "":
+ vnfd_id = vnfd["vnf"]["properties"].get("descriptor_id", "")
+ other_pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if other_pkg and other_pkg[0].vnfPackageId != vnf_pkg_id:
+ logger.error("VNF package(%s,%s) already exists.", other_pkg[0].vnfPackageId, vnfd_id)
+ raise CatalogException("VNF package(%s) already exists." % vnfd_id)
+ vnf_provider = vnfd["vnf"]["properties"].get("provider", "")
+ vnfd_ver = vnfd["vnf"]["properties"].get("descriptor_version", "")
+ vnf_software_version = vnfd["vnf"]["properties"].get("software_version", "")
+ vnfd_product_name = vnfd["vnf"]["properties"].get("product_name", "")
+ vnf_pkg.update(
+ vnfPackageId=vnf_pkg_id,
+ vnfdId=vnfd_id,
+ vnfdProductName=vnfd_product_name,
+ vnfVendor=vnf_provider,
+ vnfdVersion=vnfd_ver,
+ vnfSoftwareVersion=vnf_software_version,
+ vnfdModel=vnfd_json,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ localFilePath=vnf_pkg_path,
+ vnfPackageUri=os.path.split(vnf_pkg_path)[-1]
+ )
+ else:
+ raise CatalogException("VNF propeties and metadata in VNF Package(id=%s) are empty." % vnf_pkg_id)
+ logger.info('VNF package(%s) has been processed(done).' % vnf_pkg_id)
+
+
+def handle_upload_failed(vnf_pkg_id):
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.CREATED)
diff --git a/catalog/packages/biz/vnf_pkg_artifacts.py b/catalog/packages/biz/vnf_pkg_artifacts.py
new file mode 100644
index 0000000..37021eb
--- /dev/null
+++ b/catalog/packages/biz/vnf_pkg_artifacts.py
@@ -0,0 +1,43 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.exceptions import ResourceNotFoundException, ArtifactNotFoundException
+from catalog.pub.utils import fileutil
+
+logger = logging.getLogger(__name__)
+
+
+class FetchVnfPkgArtifact(object):
+ def fetch(self, vnfPkgId, artifactPath):
+ logger.debug("FetchVnfPkgArtifact--get--single--artifact--biz::>"
+ "ID: %s path: %s" % (vnfPkgId, artifactPath))
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfPkgId)
+ if not vnf_pkg.exists():
+ err_msg = "NF Package (%s) doesn't exists." % vnfPkgId
+ raise ResourceNotFoundException(err_msg)
+ vnf_pkg = vnf_pkg.get()
+ local_path = vnf_pkg.localFilePath
+ if local_path.endswith(".csar") or local_path.endswith(".zip"):
+ vnf_extract_path = fileutil.unzip_csar_to_tmp(local_path)
+ artifact_path = fileutil.get_artifact_path(vnf_extract_path, artifactPath)
+ if not artifact_path:
+ raise ArtifactNotFoundException("Couldn't artifact %s" % artifactPath)
+ with open(artifact_path, 'rt') as f:
+ file_content = f.read()
+ else:
+ raise ArtifactNotFoundException("NF Package format is not csar or zip")
+ return file_content
diff --git a/catalog/packages/biz/vnf_pkg_subscription.py b/catalog/packages/biz/vnf_pkg_subscription.py
new file mode 100644
index 0000000..29ef92e
--- /dev/null
+++ b/catalog/packages/biz/vnf_pkg_subscription.py
@@ -0,0 +1,190 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ast
+import json
+import logging
+import os
+import requests
+import uuid
+
+from collections import Counter
+from rest_framework import status
+
+from catalog.packages import const
+from catalog.pub.database.models import VnfPkgSubscriptionModel
+from catalog.pub.exceptions import VnfPkgSubscriptionException,\
+ VnfPkgDuplicateSubscriptionException, SubscriptionDoesNotExistsException
+from catalog.pub.utils.values import ignore_case_get
+
+
+logger = logging.getLogger(__name__)
+
+ROOT_FILTERS = {
+ "notificationTypes": "notification_types",
+ "vnfdId": "vnfd_id",
+ "vnfPkgId": "vnf_pkg_id",
+ "operationalState": "operation_states",
+ "usageState": "usage_states"
+}
+
+
+def is_filter_type_equal(new_filter, existing_filter):
+ return Counter(new_filter) == Counter(existing_filter)
+
+
+class CreateSubscription(object):
+
+ def __init__(self, data):
+ self.data = data
+ self.filter = ignore_case_get(self.data, "filters", {})
+ self.callback_uri = ignore_case_get(self.data, "callbackUri")
+ self.authentication = ignore_case_get(self.data, "authentication", {})
+ self.notification_types = ignore_case_get(self.filter, "notificationTypes", [])
+ self.operation_states = ignore_case_get(self.filter, "operationalState", [])
+ self.usage_states = ignore_case_get(self.filter, "usageState", [])
+ self.vnfd_id = ignore_case_get(self.filter, "vnfdId", [])
+ self.vnf_pkg_id = ignore_case_get(self.filter, "vnfPkgId", [])
+ self.vnf_products_from_provider = \
+ ignore_case_get(self.filter, "vnfProductsFromProviders", {})
+
+ def check_callbackuri_connection(self):
+ logger.debug("SubscribeNotification-post::> Sending GET request "
+ "to %s" % self.callback_uri)
+ try:
+ response = requests.get(self.callback_uri, timeout=2)
+ if response.status_code != status.HTTP_204_NO_CONTENT:
+ raise VnfPkgSubscriptionException(
+ "callbackUri %s returns %s status code." % (
+ self.callback_uri,
+ response.status_code
+ )
+ )
+ except Exception:
+ raise VnfPkgSubscriptionException(
+ "callbackUri %s didn't return 204 status code." % self.callback_uri
+ )
+
+ def do_biz(self):
+ self.subscription_id = str(uuid.uuid4())
+ self.check_callbackuri_connection()
+ self.check_valid_auth_info()
+ self.check_valid()
+ self.save_db()
+ subscription = VnfPkgSubscriptionModel.objects.get(
+ subscription_id=self.subscription_id
+ )
+ if subscription:
+ return subscription.toDict()
+
+ def check_valid_auth_info(self):
+ logger.debug("SubscribeNotification--post::> Validating Auth "
+ "details if provided")
+ if self.authentication.get("paramsBasic", {}) and \
+ const.BASIC not in self.authentication.get("authType"):
+ raise VnfPkgSubscriptionException('Auth type should be ' + const.BASIC)
+ if self.authentication.get("paramsOauth2ClientCredentials", {}) and \
+ const.OAUTH2_CLIENT_CREDENTIALS not in self.authentication.get("authType"):
+ raise VnfPkgSubscriptionException('Auth type should be ' + const.OAUTH2_CLIENT_CREDENTIALS)
+
+ def check_filter_exists(self, sub):
+ # Check the usage states, operationStates
+ for filter_type in ["operation_states", "usage_states"]:
+ if not is_filter_type_equal(getattr(self, filter_type),
+ ast.literal_eval(getattr(sub, filter_type))):
+ return False
+ # If all the above types are same then check id filters
+ for id_filter in ["vnfd_id", "vnf_pkg_id"]:
+ if not is_filter_type_equal(getattr(self, id_filter),
+ ast.literal_eval(getattr(sub, id_filter))):
+ return False
+ return True
+
+ def check_valid(self):
+ logger.debug("SubscribeNotification--post::> Checking DB if "
+ "callbackUri already exists")
+ subscriptions = VnfPkgSubscriptionModel.objects.filter(callback_uri=self.callback_uri)
+ if not subscriptions.exists():
+ return True
+ for subscription in subscriptions:
+ if self.check_filter_exists(subscription):
+ raise VnfPkgDuplicateSubscriptionException(
+ "Already Subscription (%s) exists with the "
+ "same callbackUri and filter" % subscription.subscription_id)
+ return True
+
+ def save_db(self):
+ logger.debug("SubscribeNotification--post::> Saving the subscription "
+ "%s to the database" % self.subscription_id)
+ links = {
+ "self": {
+ "href": os.path.join(const.VNFPKG_SUBSCRIPTION_ROOT_URI, self.subscription_id)
+ }
+ }
+ VnfPkgSubscriptionModel.objects.create(
+ subscription_id=self.subscription_id,
+ callback_uri=self.callback_uri,
+ notification_types=json.dumps(self.notification_types),
+ auth_info=json.dumps(self.authentication),
+ usage_states=json.dumps(self.usage_states),
+ operation_states=json.dumps(self.operation_states),
+ vnf_products_from_provider=json.dumps(self.vnf_products_from_provider),
+ vnfd_id=json.dumps(self.vnfd_id),
+ vnf_pkg_id=json.dumps(self.vnf_pkg_id),
+ links=json.dumps(links))
+ logger.debug('Create Subscription[%s] success', self.subscription_id)
+
+
+class QuerySubscription(object):
+
+ def query_multi_subscriptions(self, params):
+ query_data = {}
+ logger.debug("QuerySubscription--get--multi--subscriptions--biz::> Check "
+ "for filters in query params %s" % params)
+ for query, value in list(params.items()):
+ if query in ROOT_FILTERS:
+ query_data[ROOT_FILTERS[query] + '__icontains'] = value
+ # Query the database with filters if the request has fields in request params, else fetch all records
+ if query_data:
+ subscriptions = VnfPkgSubscriptionModel.objects.filter(**query_data)
+ else:
+ subscriptions = VnfPkgSubscriptionModel.objects.all()
+ if not subscriptions.exists():
+ return []
+ return [subscription.toDict() for subscription in subscriptions]
+
+ def query_single_subscription(self, subscription_id):
+ logger.debug("QuerySingleSubscriptions--get--single--subscription--biz::> "
+ "ID: %s" % subscription_id)
+
+ subscription = VnfPkgSubscriptionModel.objects.filter(
+ subscription_id=subscription_id)
+ if not subscription.exists():
+ raise SubscriptionDoesNotExistsException("Subscription with ID: %s "
+ "does not exists" % subscription_id)
+ return subscription[0].toDict()
+
+
+class TerminateSubscription(object):
+
+ def terminate(self, subscription_id):
+ logger.debug("TerminateSubscriptions--delete--biz::> "
+ "ID: %s" % subscription_id)
+
+ subscription = VnfPkgSubscriptionModel.objects.filter(
+ subscription_id=subscription_id)
+ if not subscription.exists():
+ raise SubscriptionDoesNotExistsException("Subscription with ID: %s "
+ "does not exists" % subscription_id)
+ subscription[0].delete()
diff --git a/catalog/packages/const.py b/catalog/packages/const.py
new file mode 100644
index 0000000..cd09b40
--- /dev/null
+++ b/catalog/packages/const.py
@@ -0,0 +1,78 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from catalog.pub.utils.jobutil import enum
+
+PKG_STATUS = enum(
+ CREATED="CREATED",
+ UPLOADING="UPLOADING",
+ PROCESSING="PROCESSING",
+ ONBOARDED="ONBOARDED",
+ IN_USE="IN_USE",
+ NOT_IN_USE="NOT_IN_USE",
+ ENABLED="ENABLED",
+ DISABLED="DISABLED"
+)
+
+AUTH_TYPES = [
+ "BASIC",
+ "OAUTH2_CLIENT_CREDENTIALS",
+ "TLS_CERT"
+]
+
+BASIC = "BASIC"
+
+OAUTH2_CLIENT_CREDENTIALS = "OAUTH2_CLIENT_CREDENTIALS"
+
+NOTIFICATION_TYPES = [
+ "VnfPackageOnboardingNotification",
+ "VnfPackageChangeNotification"
+]
+
+VNFPKG_SUBSCRIPTION_ROOT_URI = "api/vnfpkgm/v1/subscriptions/"
+
+NSDM_SUBSCRIPTION_ROOT_URI = "api/nsd/v1/subscriptions/"
+
+NSDM_NOTIFICATION_FILTERS = [
+ "notificationTypes",
+ "nsdInfoId",
+ "nsdName",
+ "nsdId",
+ "nsdVersion",
+ "nsdDesigner",
+ "nsdInvariantId",
+ "vnfPkgIds",
+ "pnfdInfoIds",
+ "nestedNsdInfoIds",
+ "nsdOnboardingState",
+ "nsdOperationalState",
+ "nsdUsageState",
+ "pnfdId",
+ "pnfdName",
+ "pnfdVersion",
+ "pnfdProvider",
+ "pnfdInvariantId",
+ "pnfdOnboardingState",
+ "pnfdUsageState"
+]
+
+NSDM_NOTIFICATION_TYPES = [
+ "NsdOnBoardingNotification",
+ "NsdOnboardingFailureNotification",
+ "NsdChangeNotification",
+ "NsdDeletionNotification",
+ "PnfdOnBoardingNotification",
+ "PnfdOnBoardingFailureNotification",
+ "PnfdDeletionNotification"
+]
diff --git a/catalog/packages/serializers/__init__.py b/catalog/packages/serializers/__init__.py
new file mode 100644
index 0000000..342c2a8
--- /dev/null
+++ b/catalog/packages/serializers/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/packages/serializers/catalog_serializers.py b/catalog/packages/serializers/catalog_serializers.py
new file mode 100644
index 0000000..f53b06d
--- /dev/null
+++ b/catalog/packages/serializers/catalog_serializers.py
@@ -0,0 +1,442 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from catalog.pub.utils.toscaparser.nsdmodel import EtsiNsdInfoModel
+from catalog.pub.utils.toscaparser.vnfdmodel import EtsiVnfdInfoModel
+
+
+class PostJobRequestSerializer(serializers.Serializer):
+ progress = serializers.CharField(
+ help_text="Job Progress",
+ required=False
+ )
+ desc = serializers.CharField(
+ help_text="Description",
+ required=False
+ )
+ errcode = serializers.CharField(
+ help_text="Error Code",
+ required=False
+ )
+
+
+class JobResponseHistoryListSerializer(serializers.Serializer):
+ status = serializers.CharField(
+ help_text="Status",
+ required=False
+ )
+ progress = serializers.CharField(
+ help_text="Job Progress",
+ required=False
+ )
+ statusDescription = serializers.CharField(
+ help_text="Status Description",
+ required=False
+ )
+ errorCode = serializers.CharField(
+ help_text="Error Code",
+ required=False,
+ allow_null=True
+ )
+ responseId = serializers.CharField(
+ help_text="Response Id",
+ required=False
+ )
+
+
+class JobResponseDescriptorSerializer(serializers.Serializer):
+ status = serializers.CharField(
+ help_text="Status",
+ required=False
+ )
+ progress = serializers.CharField(
+ help_text="Job Progress",
+ required=False
+ )
+ statusDescription = serializers.CharField(
+ help_text="Status Description",
+ required=False
+ )
+ errorCode = serializers.CharField(
+ help_text="Error Code",
+ required=False,
+ allow_null=True
+ )
+ responseId = serializers.CharField(
+ help_text="Response Id",
+ required=False
+ )
+ responseHistoryList = JobResponseHistoryListSerializer(
+ help_text="Response History List",
+ many=True,
+ required=False
+ )
+
+
+class GetJobResponseSerializer(serializers.Serializer):
+ jobId = serializers.CharField(
+ help_text="Job Id",
+ required=False
+ )
+ responseDescriptor = JobResponseDescriptorSerializer(
+ help_text="Job Response Descriptor",
+ required=False
+ )
+
+
+class PostJobResponseResultSerializer(serializers.Serializer):
+ result = serializers.CharField(
+ help_text="Result",
+ required=True
+ )
+ msg = serializers.CharField(
+ help_text="Message",
+ required=False
+ )
+
+
+class InternalErrorRequestSerializer(serializers.Serializer):
+ error = serializers.CharField(
+ help_text="Error",
+ required=True
+ )
+ errorMessage = serializers.CharField(
+ help_text="Error Message",
+ required=False
+ )
+
+
+class NsPackageDistributeRequestSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="csarId",
+ required=True
+ )
+
+
+class NsPackageDistributeResponseSerializer(serializers.Serializer):
+ status = serializers.CharField(
+ help_text="status",
+ required=True
+ )
+ statusDescription = serializers.CharField(
+ help_text="statusDescription",
+ required=True
+ )
+ errorCode = serializers.CharField(
+ help_text="errorCode",
+ required=True,
+ allow_null=True
+ )
+
+
+class NsPackageInfoSerializer(serializers.Serializer):
+ nsdId = serializers.CharField(
+ help_text="NSD ID",
+ required=False,
+ allow_null=True
+ )
+ nsPackageId = serializers.CharField(
+ help_text="NS Package ID",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ nsdProvider = serializers.CharField(
+ help_text="NSD Provider",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ nsdVersion = serializers.CharField(
+ help_text="NSD Version",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ csarName = serializers.CharField(
+ help_text="CSAR name",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ nsdModel = serializers.CharField(
+ help_text="NSD Model",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ downloadUrl = serializers.CharField(
+ help_text="URL to download NSD Model",
+ required=False,
+ allow_null=True
+ )
+
+
+class NsPackageSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="CSAR ID",
+ required=False,
+ allow_null=True
+ )
+ packageInfo = NsPackageInfoSerializer(
+ help_text="NS Package Info",
+ required=False,
+ allow_null=True
+ )
+
+
+class NsPackagesSerializer(serializers.ListSerializer):
+ child = NsPackageSerializer()
+
+
+class ServicePackageDistributeRequestSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="csarId",
+ required=True
+ )
+
+
+class ServicePackageInfoSerializer(serializers.Serializer):
+ servicedId = serializers.CharField(
+ help_text="ServiceD ID",
+ required=False,
+ allow_null=True
+ )
+ servicePackageId = serializers.CharField(
+ help_text="Service Package ID",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ servicedProvider = serializers.CharField(
+ help_text="ServiceD Provider",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ servicedVersion = serializers.CharField(
+ help_text="ServiceD Version",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ csarName = serializers.CharField(
+ help_text="CSAR name",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ servicedModel = serializers.CharField(
+ help_text="ServiceD Model",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ downloadUrl = serializers.CharField(
+ help_text="URL to download ServiceD Model",
+ required=False,
+ allow_null=True
+ )
+
+
+class ServicePackageSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="CSAR ID",
+ required=False,
+ allow_null=True
+ )
+ packageInfo = ServicePackageInfoSerializer(
+ help_text="Service Package Info",
+ required=False,
+ allow_null=True
+ )
+
+
+class ServicePackagesSerializer(serializers.ListSerializer):
+ child = ServicePackageSerializer()
+
+
+class NfPackageDistributeRequestSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="CSAR ID",
+ required=True
+ )
+ vimIds = serializers.ListField(
+ help_text="A string for vimIds",
+ child=serializers.CharField(),
+ required=False
+ )
+ labVimId = serializers.CharField(
+ help_text="A list of VIM IDs.",
+ allow_blank=True,
+ required=False
+ )
+
+
+class NfPackageInfoSerializer(serializers.Serializer):
+ vnfdId = serializers.CharField(
+ help_text="VNFD ID",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfPackageId = serializers.CharField(
+ help_text="VNF Package ID",
+ required=True
+ )
+ vnfdProvider = serializers.CharField(
+ help_text="VNFD Provider",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfdVersion = serializers.CharField(
+ help_text="VNFD Version",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfVersion = serializers.CharField(
+ help_text="VNF Version",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ csarName = serializers.CharField(
+ help_text="CSAR Name",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfdModel = serializers.CharField(
+ help_text="VNFD Model",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ downloadUrl = serializers.CharField(
+ help_text="URL to download VNFD Model",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+
+
+class NfImageInfoSerializer(serializers.Serializer):
+ index = serializers.CharField(
+ help_text="Index of VNF Image",
+ required=True
+ )
+ fileName = serializers.CharField(
+ help_text="Image file name",
+ required=True
+ )
+ imageId = serializers.CharField(
+ help_text="Image ID",
+ required=True
+ )
+ vimId = serializers.CharField(
+ help_text="VIM ID",
+ required=True
+ )
+ vimUser = serializers.CharField(
+ help_text="User of VIM",
+ required=True
+ )
+ tenant = serializers.CharField(
+ help_text="Tenant",
+ required=True
+ )
+ status = serializers.CharField(
+ help_text="Status",
+ required=True
+ )
+
+
+class NfPackageSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="CSAR ID",
+ required=True
+ )
+ packageInfo = NfPackageInfoSerializer(
+ help_text="VNF Package Info",
+ required=True
+ )
+ imageInfo = NfImageInfoSerializer(
+ help_text="Image Info",
+ required=False,
+ many=True,
+ allow_null=True
+ )
+
+
+class NfPackagesSerializer(serializers.ListSerializer):
+ child = NfPackageSerializer()
+
+
+class PostJobResponseSerializer(serializers.Serializer):
+ jobId = serializers.CharField(
+ help_text="jobId",
+ required=True
+ )
+
+
+class ParseModelRequestSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="CSAR ID",
+ required=True
+ )
+ packageType = serializers.CharField(
+ help_text="Package type: VNF, PNF, NS, Service",
+ required=False
+ )
+ inputs = serializers.JSONField(
+ help_text="Inputs",
+ required=False
+ )
+
+
+class ParseModelResponseSerializer(serializers.Serializer):
+ model = serializers.JSONField(
+ help_text="Model",
+ required=True
+ )
+
+
+class EtsiNsdInfoModelSerializer(serializers.ModelSerializer):
+
+ class Meta:
+ model = EtsiNsdInfoModel
+
+
+class EtsiVnfdInfoModelSerializer(serializers.ModelSerializer):
+
+ class Meta:
+ model = EtsiVnfdInfoModel
+
+
+class ParseNSPackageResponseSerializer(serializers.Serializer):
+ model = EtsiNsdInfoModelSerializer(
+ help_text="NSD Model",
+ required=True
+ )
+
+
+class ParseNfPackageResponseSerializer(serializers.Serializer):
+ model = EtsiVnfdInfoModelSerializer(
+ help_text="VNFD Model",
+ required=True
+ )
diff --git a/catalog/packages/serializers/checksum.py b/catalog/packages/serializers/checksum.py
new file mode 100644
index 0000000..1296626
--- /dev/null
+++ b/catalog/packages/serializers/checksum.py
@@ -0,0 +1,30 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class ChecksumSerializer(serializers.Serializer):
+ algorithm = serializers.CharField(
+ help_text="Name of the algorithm used to generate the checksum.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ hash = serializers.CharField(
+ help_text="The hexadecimal value of the checksum.",
+ required=True,
+ allow_null=True,
+ allow_blank=False
+ )
diff --git a/catalog/packages/serializers/create_nsd_info_request.py b/catalog/packages/serializers/create_nsd_info_request.py
new file mode 100644
index 0000000..24fe3b7
--- /dev/null
+++ b/catalog/packages/serializers/create_nsd_info_request.py
@@ -0,0 +1,29 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class CreateNsdInfoRequestSerializer(serializers.Serializer):
+ userDefinedData = serializers.DictField(
+ help_text="User-defined data for the NS descriptor resource to be created."
+ "It shall be present when the user defined data is set for the individual NS "
+ "descriptor resource to be created.",
+ child=serializers.CharField(
+ help_text='Key Value Pairs',
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
diff --git a/catalog/packages/serializers/create_pnfd_info_request.py b/catalog/packages/serializers/create_pnfd_info_request.py
new file mode 100644
index 0000000..01d8229
--- /dev/null
+++ b/catalog/packages/serializers/create_pnfd_info_request.py
@@ -0,0 +1,29 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class CreatePnfdInfoRequestSerializer(serializers.Serializer):
+ userDefinedData = serializers.DictField(
+ help_text="User-defined data for the PNF descriptor resource to be created."
+ "It shall be present when the user defined data is set for the individual "
+ "PNF descriptor resource to be created.",
+ child=serializers.CharField(
+ help_text='Key Value Pairs',
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
diff --git a/catalog/packages/serializers/create_vnf_pkg_info_req.py b/catalog/packages/serializers/create_vnf_pkg_info_req.py
new file mode 100644
index 0000000..6da281d
--- /dev/null
+++ b/catalog/packages/serializers/create_vnf_pkg_info_req.py
@@ -0,0 +1,27 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class CreateVnfPkgInfoRequestSerializer(serializers.Serializer):
+ userDefinedData = serializers.DictField(
+ help_text="User defined data for the VNF package.",
+ child=serializers.CharField(
+ help_text="KeyValue Pairs",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
diff --git a/catalog/packages/serializers/link.py b/catalog/packages/serializers/link.py
new file mode 100644
index 0000000..a6a503c
--- /dev/null
+++ b/catalog/packages/serializers/link.py
@@ -0,0 +1,24 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class LinkSerializer(serializers.Serializer):
+ href = serializers.CharField(
+ help_text='URI of the referenced resource',
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
diff --git a/catalog/packages/serializers/nsd_info.py b/catalog/packages/serializers/nsd_info.py
new file mode 100644
index 0000000..9450582
--- /dev/null
+++ b/catalog/packages/serializers/nsd_info.py
@@ -0,0 +1,161 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .problem_details import ProblemDetailsSerializer
+from .link import LinkSerializer
+
+
+class _LinkSerializer(serializers.Serializer):
+ self = LinkSerializer(
+ help_text="URI of this resource.",
+ required=True,
+ allow_null=False
+ )
+ nsd_content = LinkSerializer(
+ help_text="Link to the NSD content resource.",
+ required=True,
+ allow_null=False
+ )
+
+ class Meta:
+ ref_name = "NSD_LinkSerializer"
+
+
+class NsdInfoSerializer(serializers.Serializer):
+ id = serializers.CharField(
+ help_text="Identifier of the onboarded individual NS descriptor resource."
+ "This identifier is allocated by the NFVO.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ nsdId = serializers.CharField(
+ help_text="This identifier, which is allocated by the NSD designer,"
+ "identifies the NSD in a globally unique way."
+ "It is copied from the NSD content and shall be present after the "
+ "NSD content is on-boarded.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ nsdName = serializers.CharField(
+ help_text="Name of the onboarded NSD."
+ "This information is copied from the NSD content and shall be present "
+ "after the NSD content is on-boarded.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ nsdVersion = serializers.CharField( # TODO: data type is version
+ help_text="Version of the on-boarded NSD."
+ "This information is copied from the NSD content and shall be "
+ "present after the NSD content is on-boarded.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ nsdDesigner = serializers.CharField(
+ help_text="Designer of the on-boarded NSD."
+ "This information is copied from the NSD content and shall be "
+ "present after the NSD content is on-boarded.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ nsdInvariantId = serializers.CharField(
+ help_text="This identifier, which is allocated by the NSD designer,"
+ "identifies an NSD in a version independent manner."
+ "This information is copied from the NSD content and shall be "
+ "present after the NSD content is on-boarded.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfPkgIds = serializers.ListSerializer(
+ help_text="Identifies the VNF package for the VNFD referenced "
+ "by the on-boarded NS descriptor resource.",
+ child=serializers.CharField(
+ help_text="Identifier of the VNF package",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True,
+ allow_empty=True
+ )
+ pnfdInfoIds = serializers.ListSerializer(
+ help_text="Identifies the PnfdInfo element for the PNFD referenced "
+ "by the on-boarded NS descriptor resource.",
+ child=serializers.CharField(
+ help_text="Identifier of the PnfdInfo element",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True,
+ allow_empty=True
+ )
+ nestedNsdInfoIds = serializers.ListSerializer(
+ help_text="Identifies the NsdInfo element for the nested NSD referenced "
+ "by the on-boarded NS descriptor resource.",
+ child=serializers.CharField(
+ help_text="Identifier of the NsdInfo element",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True,
+ allow_empty=True
+ )
+ nsdOnboardingState = serializers.ChoiceField(
+ help_text="Onboarding state of the individual NS descriptor resource.",
+ choices=["CREATED", "UPLOADING", "PROCESSING", "ONBOARDED"],
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ onboardingFailureDetails = ProblemDetailsSerializer(
+ help_text="Failure details of current onboarding procedure."
+ "It shall be present when the nsdOnboardingState attribute is CREATED "
+ "and the uploading or processing fails in NFVO.",
+ required=False,
+ allow_null=True,
+ )
+ nsdOperationalState = serializers.ChoiceField(
+ help_text="Operational state of the individual NS descriptor resource."
+ "This attribute can be modified with the PATCH method.",
+ choices=["ENABLED", "DISABLED"],
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ nsdUsageState = serializers.ChoiceField(
+ help_text="Usage state of the individual NS descriptor resource.",
+ choices=["IN_USE", "NOT_IN_USE"],
+ required=True,
+ allow_null=False,
+ )
+ userDefinedData = serializers.DictField(
+ help_text="User defined data for the individual NS descriptor resource."
+ "This attribute can be modified with the PATCH method.",
+ child=serializers.CharField(
+ help_text="Key Value Pairs",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
+ _links = _LinkSerializer(
+ help_text="Links to resources related to this resource.",
+ required=True,
+ allow_null=True # TODO: supposed to be False
+ )
diff --git a/catalog/packages/serializers/nsd_infos.py b/catalog/packages/serializers/nsd_infos.py
new file mode 100644
index 0000000..d63c332
--- /dev/null
+++ b/catalog/packages/serializers/nsd_infos.py
@@ -0,0 +1,20 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .nsd_info import NsdInfoSerializer
+
+
+class NsdInfosSerializer(serializers.ListSerializer):
+ child = NsdInfoSerializer()
diff --git a/catalog/packages/serializers/nsdm_filter_data.py b/catalog/packages/serializers/nsdm_filter_data.py
new file mode 100644
index 0000000..47d7680
--- /dev/null
+++ b/catalog/packages/serializers/nsdm_filter_data.py
@@ -0,0 +1,177 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from catalog.packages.const import NSDM_NOTIFICATION_TYPES
+
+
+class NsdmNotificationsFilter(serializers.Serializer):
+ notificationTypes = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=NSDM_NOTIFICATION_TYPES
+ ),
+ help_text="Match particular notification types",
+ allow_null=False,
+ required=False
+ )
+ nsdInfoId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match NS packages with particular nsdInfoIds",
+ allow_null=False,
+ required=False
+ )
+ nsdId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match NS Packages with particular nsdIds",
+ allow_null=False,
+ required=False
+ )
+ nsdName = serializers.ListField(
+ child=serializers.CharField(
+ max_length=255,
+ required=True
+ ),
+ help_text="Match NS Packages with particular nsdNames",
+ allow_null=False,
+ required=False
+ )
+ nsdVersion = serializers.ListField(
+ child=serializers.CharField(
+ max_length=255,
+ required=True
+ ),
+ help_text="match NS packages that belong to certain nsdversion",
+ required=False,
+ allow_null=False
+ )
+ nsdInvariantId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match NS Packages with particular nsdInvariantIds",
+ allow_null=False,
+ required=False
+ )
+ vnfPkgIds = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match NS Packages that has VNF PackageIds",
+ allow_null=False,
+ required=False
+ )
+ nestedNsdInfoIds = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match NS Packages with particular nsdInvariantIds",
+ allow_null=False,
+ required=False
+ )
+ nsdOnboardingState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=[
+ 'CREATED',
+ 'UPLOADING',
+ 'PROCESSING',
+ 'ONBOARDED'
+ ]
+ ),
+ help_text="Match NS Packages with particular NS Onboarding State",
+ allow_null=False,
+ required=False
+ )
+ nsdOperationalState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=['ENABLED', 'DISABLED']
+ ),
+ help_text="Match NS Packages with particular NS Operational State",
+ allow_null=False,
+ required=False
+ )
+ nsdUsageState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=['IN_USE', 'NOT_IN_USE']
+ ),
+ help_text="Match NS Packages with particular NS Usage State",
+ allow_null=False,
+ required=False
+ )
+ pnfdInfoIds = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match PF packages with particular pnfdInfoIds",
+ allow_null=False,
+ required=False
+ )
+ pnfdId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match PF packages with particular pnfdInfoIds",
+ allow_null=False,
+ required=False
+ )
+ pnfdName = serializers.ListField(
+ child=serializers.CharField(
+ max_length=255,
+ required=True
+ ),
+ help_text="Match PF Packages with particular pnfdNames",
+ allow_null=False,
+ required=False
+ )
+ pnfdVersion = serializers.ListField(
+ child=serializers.CharField(
+ max_length=255,
+ required=True
+ ),
+ help_text="match PF packages that belong to certain pnfd version",
+ required=False,
+ allow_null=False
+ )
+ pnfdProvider = serializers.ListField(
+ child=serializers.CharField(
+ max_length=255,
+ required=True
+ ),
+ help_text="Match PF Packages with particular pnfdProvider",
+ allow_null=False,
+ required=False
+ )
+ pnfdInvariantId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match PF Packages with particular pnfdInvariantIds",
+ allow_null=False,
+ required=False
+ )
+ pnfdOnboardingState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=[
+ 'CREATED',
+ 'UPLOADING',
+ 'PROCESSING',
+ 'ONBOARDED'
+ ]
+ ),
+ help_text="Match PF Packages with particular PNF Onboarding State ",
+ allow_null=False,
+ required=False
+ )
+ pnfdUsageState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=['IN_USE', 'NOT_IN_USE']
+ ),
+ help_text="Match PF Packages with particular PNF usage State",
+ allow_null=False,
+ required=False
+ )
diff --git a/catalog/packages/serializers/nsdm_subscription.py b/catalog/packages/serializers/nsdm_subscription.py
new file mode 100644
index 0000000..87aa48d
--- /dev/null
+++ b/catalog/packages/serializers/nsdm_subscription.py
@@ -0,0 +1,84 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from .link import LinkSerializer
+from .subscription_auth_data import SubscriptionAuthenticationSerializer
+from .nsdm_filter_data import NsdmNotificationsFilter
+
+
+class NsdmSubscriptionLinkSerializer(serializers.Serializer):
+ self = LinkSerializer(
+ help_text="Links to resources related to this resource.",
+ required=True
+ )
+
+
+class NsdmSubscriptionSerializer(serializers.Serializer):
+ id = serializers.CharField(
+ help_text="Identifier of this subscription resource.",
+ max_length=255,
+ required=True,
+ allow_null=False
+ )
+ callbackUri = serializers.CharField(
+ help_text="The URI of the endpoint to send the notification to.",
+ max_length=255,
+ required=True,
+ allow_null=False
+ )
+ filter = NsdmNotificationsFilter(
+ help_text="Filter settings for this subscription, to define the "
+ "of all notifications this subscription relates to.",
+ required=False
+ )
+ _links = NsdmSubscriptionLinkSerializer(
+ help_text="Links to resources related to this resource.",
+ required=True
+ )
+
+
+class NsdmSubscriptionsSerializer(serializers.ListSerializer):
+ child = NsdmSubscriptionSerializer()
+
+
+class NsdmSubscriptionIdSerializer(serializers.Serializer):
+ subscription_id = serializers.UUIDField(
+ help_text="Identifier of this subscription resource.",
+ required=True,
+ allow_null=False
+ )
+
+
+class NsdmSubscriptionRequestSerializer(serializers.Serializer):
+ callbackUri = serializers.CharField(
+ help_text="The URI of the endpoint to send the notification to.",
+ required=True,
+ allow_null=False
+ )
+ filter = NsdmNotificationsFilter(
+ help_text="Filter settings for the subscription,"
+ " to define the subset of all "
+ "notifications this subscription relates to.",
+ required=False,
+ allow_null=True
+ )
+ authentication = SubscriptionAuthenticationSerializer(
+ help_text="Authentication parameters to configure"
+ " the use of Authorization when sending "
+ "notifications corresponding to this subscription.",
+ required=False,
+ allow_null=True
+ )
diff --git a/catalog/packages/serializers/pnfd_info.py b/catalog/packages/serializers/pnfd_info.py
new file mode 100644
index 0000000..f9f4b6b
--- /dev/null
+++ b/catalog/packages/serializers/pnfd_info.py
@@ -0,0 +1,107 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .problem_details import ProblemDetailsSerializer
+from .link import LinkSerializer
+
+
+class _LinkSerializer(serializers.Serializer):
+ self = LinkSerializer(
+ help_text='URI of this resource.',
+ required=True,
+ allow_null=False
+ )
+ pnfd_content = LinkSerializer(
+ help_text='Link to the PNFD content resource.',
+ required=True,
+ allow_null=False
+ )
+
+
+class PnfdInfoSerializer(serializers.Serializer):
+ id = serializers.CharField(
+ help_text='Identifier of the onboarded individual PNF descriptor resource. \
+ This identifier is allocated by the NFVO.',
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ pnfdId = serializers.CharField(
+ help_text='This identifier, which is allocated by the PNFD designer, \
+ identifies the PNFD in a globally unique way. \
+ It is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ pnfdName = serializers.CharField(
+ help_text='Name of the onboarded PNFD. \
+ This information is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ pnfdVersion = serializers.CharField( # TODO: data type is version
+ help_text='Version of the on-boarded PNFD. \
+ This information is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ pnfdProvider = serializers.CharField(
+ help_text='Provider of the on-boarded PNFD. \
+ This information is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ pnfdInvariantId = serializers.CharField(
+ help_text='Identifies a PNFD in a version independent manner. \
+ This attribute is invariant across versions of PNFD.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ pnfdOnboardingState = serializers.ChoiceField(
+ help_text='Onboarding state of the individual PNF descriptor resource.',
+ choices=['CREATED', 'UPLOADING', 'PROCESSING', 'ONBOARDED'],
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ onboardingFailureDetails = ProblemDetailsSerializer(
+ help_text='Failure details of current onboarding procedure. \
+ It shall be present when the "pnfdOnboardingState" attribute is CREATED and the uploading or processing fails in NFVO.',
+ required=False,
+ allow_null=True,
+ )
+ pnfdUsageState = serializers.ChoiceField(
+ help_text='Usage state of the individual PNF descriptor resource.',
+ choices=['IN_USE', 'NOT_IN_USE'],
+ required=True,
+ allow_null=False,
+ )
+ userDefinedData = serializers.DictField(
+ help_text='User defined data for the individual PNF descriptor resource. \
+ This attribute can be modified with the PATCH method.',
+ child=serializers.CharField(help_text='Key Value Pairs', allow_blank=True),
+ required=False,
+ allow_null=True
+ )
+ _links = _LinkSerializer(
+ help_text='Links to resources related to this resource.',
+ required=True,
+ allow_null=True # TODO: supposed to be False
+ )
diff --git a/catalog/packages/serializers/pnfd_infos.py b/catalog/packages/serializers/pnfd_infos.py
new file mode 100644
index 0000000..0874c9e
--- /dev/null
+++ b/catalog/packages/serializers/pnfd_infos.py
@@ -0,0 +1,20 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .pnfd_info import PnfdInfoSerializer
+
+
+class PnfdInfosSerializer(serializers.ListSerializer):
+ child = PnfdInfoSerializer()
diff --git a/catalog/packages/serializers/problem_details.py b/catalog/packages/serializers/problem_details.py
new file mode 100644
index 0000000..68d4500
--- /dev/null
+++ b/catalog/packages/serializers/problem_details.py
@@ -0,0 +1,58 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class ProblemDetailsSerializer(serializers.Serializer):
+ type = serializers.CharField(
+ help_text='A URI reference according to IETF RFC 3986 [10] that identifies the problem type. \
+ It is encouraged that the URI provides human-readable documentation for the problem (e.g. using HTML) when dereferenced. \
+ When this member is not present, its value is assumed to be "about:blank".',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ title = serializers.CharField(
+ help_text='A short, human-readable summary of the problem type. \
+ It should not change from occurrence to occurrence of the problem, except for purposes of localization. \
+ If type is given and other than "about:blank", this attribute shall also be provided.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ title = serializers.IntegerField(
+ help_text='The HTTP status code for this occurrence of the problem.',
+ required=True,
+ allow_null=False
+ )
+ detail = serializers.CharField(
+ help_text='A human-readable explanation specific to this occurrence of the problem.',
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ instance = serializers.CharField(
+ help_text='A URI reference that identifies the specific occurrence of the problem. \
+ It may yield further information if dereferenced.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ additional_attributes = serializers.DictField(
+ help_text='Any number of additional attributes, as defined in a specification or by an implementation.',
+ child=serializers.CharField(help_text='Additional attribute', allow_blank=True),
+ required=False,
+ allow_null=True,
+ )
diff --git a/catalog/packages/serializers/response.py b/catalog/packages/serializers/response.py
new file mode 100644
index 0000000..e2cca92
--- /dev/null
+++ b/catalog/packages/serializers/response.py
@@ -0,0 +1,51 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class ProblemDetailsSerializer(serializers.Serializer):
+ type = serializers.CharField(
+ help_text="Type",
+ required=False,
+ allow_null=True
+ )
+ title = serializers.CharField(
+ help_text="Title",
+ required=False,
+ allow_null=True
+ )
+ status = serializers.IntegerField(
+ help_text="Status",
+ required=True
+ )
+ detail = serializers.CharField(
+ help_text="Detail",
+ required=True,
+ allow_null=True
+ )
+ instance = serializers.CharField(
+ help_text="Instance",
+ required=False,
+ allow_null=True
+ )
+ additional_details = serializers.ListField(
+ help_text="Any number of additional attributes, as defined in a "
+ "specification or by an implementation.",
+ required=False,
+ allow_null=True
+ )
+
+ class Meta:
+ ref_name = 'SUBSCRIPTION_ProblemDetailsSerializer'
diff --git a/catalog/packages/serializers/subscription_auth_data.py b/catalog/packages/serializers/subscription_auth_data.py
new file mode 100644
index 0000000..bf512d6
--- /dev/null
+++ b/catalog/packages/serializers/subscription_auth_data.py
@@ -0,0 +1,77 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from catalog.packages import const
+
+
+class OAuthCredentialsSerializer(serializers.Serializer):
+ clientId = serializers.CharField(
+ help_text="Client identifier to be used in the access token "
+ "request of the OAuth 2.0 client credentials grant type.",
+ required=False,
+ max_length=255,
+ allow_null=False
+ )
+ clientPassword = serializers.CharField(
+ help_text="Client password to be used in the access token "
+ "request of the OAuth 2.0 client credentials grant type.",
+ required=False,
+ max_length=255,
+ allow_null=False
+ )
+ tokenEndpoint = serializers.CharField(
+ help_text="The token endpoint from which the access token can "
+ "be obtained.",
+ required=False,
+ max_length=255,
+ allow_null=False
+ )
+
+
+class BasicAuthSerializer(serializers.Serializer):
+ userName = serializers.CharField(
+ help_text="Username to be used in HTTP Basic authentication.",
+ max_length=255,
+ required=False,
+ allow_null=False
+ )
+ password = serializers.CharField(
+ help_text="Password to be used in HTTP Basic authentication.",
+ max_length=255,
+ required=False,
+ allow_null=False
+ )
+
+
+class SubscriptionAuthenticationSerializer(serializers.Serializer):
+ authType = serializers.ListField(
+ child=serializers.ChoiceField(required=True, choices=const.AUTH_TYPES),
+ help_text="Defines the types of Authentication / Authorization "
+ "which the API consumer is willing to accept when "
+ "receiving a notification.",
+ required=True
+ )
+ paramsBasic = BasicAuthSerializer(
+ help_text="Parameters for authentication/authorization using BASIC.",
+ required=False,
+ allow_null=False
+ )
+ paramsOauth2ClientCredentials = OAuthCredentialsSerializer(
+ help_text="Parameters for authentication/authorization using "
+ "OAUTH2_CLIENT_CREDENTIALS.",
+ required=False,
+ allow_null=False
+ )
diff --git a/catalog/packages/serializers/upload_vnf_pkg_from_uri_req.py b/catalog/packages/serializers/upload_vnf_pkg_from_uri_req.py
new file mode 100644
index 0000000..b847484
--- /dev/null
+++ b/catalog/packages/serializers/upload_vnf_pkg_from_uri_req.py
@@ -0,0 +1,36 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class UploadVnfPackageFromUriRequestSerializer(serializers.Serializer):
+ addressInformation = serializers.CharField(
+ help_text="Address information of the VNF package content.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ userName = serializers.CharField(
+ help_text="User name to be used for authentication.",
+ required=False,
+ allow_null=False,
+ allow_blank=False
+ )
+ password = serializers.CharField(
+ help_text="Password to be used for authentication.",
+ required=False,
+ allow_null=False,
+ allow_blank=False
+ )
diff --git a/catalog/packages/serializers/vnf_pkg_artifact_info.py b/catalog/packages/serializers/vnf_pkg_artifact_info.py
new file mode 100644
index 0000000..c63b3c2
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_artifact_info.py
@@ -0,0 +1,39 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .checksum import ChecksumSerializer
+
+
+class VnfPackageArtifactInfoSerializer(serializers.Serializer):
+ artifactPath = serializers.CharField(
+ help_text="Path in the VNF package.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ checksum = ChecksumSerializer(
+ help_text="Checksum of the artifact file.",
+ required=True,
+ allow_null=False
+ )
+ metadata = serializers.DictField(
+ help_text="The metadata of the artifact that are available in the VNF package",
+ child=serializers.CharField(
+ help_text="KeyValue Pairs",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
diff --git a/catalog/packages/serializers/vnf_pkg_info.py b/catalog/packages/serializers/vnf_pkg_info.py
new file mode 100644
index 0000000..3fa4b17
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_info.py
@@ -0,0 +1,127 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .checksum import ChecksumSerializer
+from .vnf_pkg_software_image_info import VnfPackageSoftwareImageInfoSerializer
+from .vnf_pkg_artifact_info import VnfPackageArtifactInfoSerializer
+from .link import LinkSerializer
+
+
+class _LinkSerializer(serializers.Serializer):
+ self = LinkSerializer(
+ help_text='URI of this resource.',
+ required=True,
+ allow_null=False
+ )
+ vnfd = LinkSerializer(
+ help_text='Link to the VNFD resource.',
+ required=False,
+ allow_null=False
+ )
+ packageContent = LinkSerializer(
+ help_text='Link to the "VNF package content resource.',
+ required=True,
+ allow_null=False
+ )
+
+ class Meta:
+ ref_name = 'VNF_PKGM_Link_Serializer'
+
+
+class VnfPkgInfoSerializer(serializers.Serializer):
+ id = serializers.CharField(
+ help_text="Identifier of the on-boarded VNF package.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ vnfdId = serializers.CharField(
+ help_text="This identifier, which is managed by the VNF provider, "
+ "identifies the VNF package and the VNFD in a globally unique way.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfProvider = serializers.CharField(
+ help_text="Provider of the VNF package and the VNFD.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfProductName = serializers.CharField(
+ help_text="Name to identify the VNF product.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfSoftwareVersion = serializers.CharField(
+ help_text="Software version of the VNF.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfdVersion = serializers.CharField(
+ help_text="The version of the VNvFD.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ checksum = ChecksumSerializer(
+ help_text="Checksum of the on-boarded VNF package.",
+ required=False,
+ allow_null=True
+ )
+ softwareImages = VnfPackageSoftwareImageInfoSerializer(
+ help_text="Information about VNF package artifacts that are software images.",
+ required=False,
+ allow_null=True,
+ many=True
+ )
+ additionalArtifacts = VnfPackageArtifactInfoSerializer(
+ help_text="Information about VNF package artifacts contained in "
+ "the VNF package that are not software images.",
+ required=False,
+ allow_null=True,
+ many=True
+ )
+ onboardingState = serializers.ChoiceField(
+ help_text="On-boarding state of the VNF package.",
+ choices=["CREATED", "UPLOADING", "PROCESSING", "ONBOARDED"],
+ required=True,
+ allow_null=True
+ )
+ operationalState = serializers.ChoiceField(
+ help_text="Operational state of the VNF package.",
+ choices=["ENABLED", "DISABLED"],
+ required=True,
+ allow_null=True
+ )
+ usageState = serializers.ChoiceField(
+ help_text="Usage state of the VNF package.",
+ choices=["IN_USE", "NOT_IN_USE"],
+ required=True,
+ allow_null=True
+ )
+ userDefinedData = serializers.DictField(
+ help_text="User defined data for the VNF package.",
+ child=serializers.CharField(help_text="KeyValue Pairs", allow_blank=True),
+ required=False,
+ allow_null=True
+ )
+ _links = _LinkSerializer(
+ help_text='Links to resources related to this resource.',
+ required=True,
+ allow_null=True # TODO supposed to be False
+ )
diff --git a/catalog/packages/serializers/vnf_pkg_infos.py b/catalog/packages/serializers/vnf_pkg_infos.py
new file mode 100644
index 0000000..9ffd6f0
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_infos.py
@@ -0,0 +1,20 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .vnf_pkg_info import VnfPkgInfoSerializer
+
+
+class VnfPkgInfosSerializer(serializers.ListSerializer):
+ child = VnfPkgInfoSerializer()
diff --git a/catalog/packages/serializers/vnf_pkg_notifications.py b/catalog/packages/serializers/vnf_pkg_notifications.py
new file mode 100644
index 0000000..5e023af
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_notifications.py
@@ -0,0 +1,117 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from catalog.packages.const import NOTIFICATION_TYPES
+
+PackageOperationalStateType = ["ENABLED", "DISABLED"]
+PackageUsageStateType = ["IN_USE", "NOT_IN_USE"]
+
+
+class VersionSerializer(serializers.Serializer):
+ vnfSoftwareVersion = serializers.CharField(
+ help_text="VNF software version to match.",
+ max_length=255,
+ required=True,
+ allow_null=False
+ )
+ vnfdVersions = serializers.ListField(
+ child=serializers.CharField(),
+ help_text="Match VNF packages that contain "
+ "VNF products with certain VNFD versions",
+ required=False,
+ allow_null=False
+ )
+
+
+class vnfProductsSerializer(serializers.Serializer):
+ vnfProductName = serializers.CharField(
+ help_text="Name of the VNF product to match.",
+ max_length=255,
+ required=True,
+ allow_null=False
+ )
+ versions = VersionSerializer(
+ help_text="match VNF packages that contain "
+ "VNF products with certain versions",
+ required=False,
+ allow_null=False
+ )
+
+
+class vnfProductsProvidersSerializer(serializers.Serializer):
+ vnfProvider = serializers.CharField(
+ help_text="Name of the VNFprovider to match.",
+ max_length=255,
+ required=True,
+ allow_null=False
+ )
+ vnfProducts = vnfProductsSerializer(
+ help_text="match VNF packages that contain "
+ "VNF products with certain product names, "
+ "from one particular provider",
+ required=False,
+ allow_null=False
+ )
+
+
+class PkgmNotificationsFilter(serializers.Serializer):
+ notificationTypes = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=NOTIFICATION_TYPES
+ ),
+ help_text="Match particular notification types",
+ allow_null=False,
+ required=False
+ )
+ vnfProductsFromProviders = vnfProductsProvidersSerializer(
+ help_text="Match VNF packages that contain "
+ "VNF products from certain providers.",
+ allow_null=False,
+ required=False
+ )
+ vnfdId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match VNF packages with a VNFD identifier"
+ "listed in the attribute",
+ required=False,
+ allow_null=False
+ )
+ vnfPkgId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match VNF packages with a VNFD identifier"
+ "listed in the attribute",
+ required=False,
+ allow_null=False
+ )
+ operationalState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=PackageOperationalStateType
+ ),
+ help_text="Operational state of the VNF package.",
+ allow_null=False,
+ required=False
+ )
+ usageState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=PackageUsageStateType
+ ),
+ help_text="Operational state of the VNF package.",
+ allow_null=False,
+ required=False
+ )
diff --git a/catalog/packages/serializers/vnf_pkg_software_image_info.py b/catalog/packages/serializers/vnf_pkg_software_image_info.py
new file mode 100644
index 0000000..790c61e
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_software_image_info.py
@@ -0,0 +1,96 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .checksum import ChecksumSerializer
+
+
+class VnfPackageSoftwareImageInfoSerializer(serializers.Serializer):
+ id = serializers.CharField(
+ help_text="Identifier of the software image.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ name = serializers.CharField(
+ help_text="Name of the software image.",
+ required=True,
+ allow_null=True,
+ allow_blank=False
+ )
+ provider = serializers.CharField(
+ help_text="Provider of the software image.",
+ required=True,
+ allow_null=True,
+ allow_blank=False
+ )
+ version = serializers.CharField(
+ help_text="Version of the software image.",
+ required=True,
+ allow_null=True,
+ allow_blank=False
+ )
+ checksum = ChecksumSerializer(
+ help_text="Checksum of the software image file.",
+ required=True,
+ allow_null=False
+ )
+ containerFormat = serializers.ChoiceField(
+ help_text="terminationType: Indicates whether forceful or graceful termination is requested.",
+ choices=["AKI", "AMI", "ARI", "BARE", "DOCKER", "OVA", "OVF"],
+ required=True,
+ allow_null=True
+ )
+ diskFormat = serializers.ChoiceField(
+ help_text="Disk format of a software image is the format of the underlying disk image.",
+ choices=["AKI", "AMI", "ARI", "ISO", "QCOW2", "RAW", "VDI", "VHD", "VHDX", "VMDK"],
+ required=True,
+ allow_null=True
+ )
+ createdAt = serializers.DateTimeField(
+ help_text="Time when this software image was created.",
+ required=True,
+ format=None,
+ input_formats=None
+ )
+ minDisk = serializers.IntegerField(
+ help_text="The minimal disk for this software image in bytes.",
+ required=True,
+ allow_null=True
+ )
+ minRam = serializers.IntegerField(
+ help_text="The minimal RAM for this software image in bytes.",
+ required=True,
+ allow_null=True
+ )
+ size = serializers.IntegerField(
+ help_text="Size of this software image in bytes.",
+ required=True,
+ allow_null=True
+ )
+ userMetadata = serializers.DictField(
+ help_text="User-defined data.",
+ child=serializers.CharField(
+ help_text="KeyValue Pairs",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
+ imagePath = serializers.CharField(
+ help_text="Path in the VNF package.",
+ required=True,
+ allow_null=True,
+ allow_blank=False
+ )
diff --git a/catalog/packages/serializers/vnf_pkg_subscription.py b/catalog/packages/serializers/vnf_pkg_subscription.py
new file mode 100644
index 0000000..edcd6fe
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_subscription.py
@@ -0,0 +1,93 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from catalog.packages.serializers import subscription_auth_data
+from catalog.packages.serializers import vnf_pkg_notifications
+
+
+class LinkSerializer(serializers.Serializer):
+ href = serializers.CharField(
+ help_text="URI of the referenced resource.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+
+ class Meta:
+ ref_name = 'VNF_SUBSCRIPTION_LINKSERIALIZER'
+
+
+class LinkSelfSerializer(serializers.Serializer):
+ self = LinkSerializer(
+ help_text="URI of this resource.",
+ required=True,
+ allow_null=False
+ )
+
+
+class PkgmSubscriptionRequestSerializer(serializers.Serializer):
+ filters = vnf_pkg_notifications.PkgmNotificationsFilter(
+ help_text="Filter settings for this subscription, "
+ "to define the subset of all notifications"
+ " this subscription relates to",
+ required=False,
+ allow_null=False
+ )
+ callbackUri = serializers.URLField(
+ help_text="Callback URI to send"
+ "the notification",
+ required=True,
+ allow_null=False
+ )
+ authentication = subscription_auth_data.SubscriptionAuthenticationSerializer(
+ help_text="Authentication parameters to configure the use of "
+ "authorization when sending notifications corresponding to"
+ "this subscription",
+ required=False,
+ allow_null=False
+ )
+
+
+class PkgmSubscriptionSerializer(serializers.Serializer):
+ id = serializers.UUIDField(
+ help_text="Identifier of this subscription resource.",
+ required=True,
+ allow_null=False
+ )
+ callbackUri = serializers.URLField(
+ help_text="The URI of the endpoint to send the notification to.",
+ required=True,
+ allow_null=False
+ )
+
+ _links = LinkSelfSerializer(
+ help_text="Links to resources related to this resource.",
+ required=True,
+ allow_null=False
+ )
+
+ filter = vnf_pkg_notifications.PkgmNotificationsFilter(
+ help_text="Filter settings for this subscription, "
+ "to define the subset of all notifications"
+ " this subscription relates to",
+ required=False,
+ allow_null=False
+ )
+
+
+class PkgmSubscriptionsSerializer(serializers.ListSerializer):
+ child = PkgmSubscriptionSerializer()
+ allow_empty = True
diff --git a/catalog/packages/tests/__init__.py b/catalog/packages/tests/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/packages/tests/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/packages/tests/const.py b/catalog/packages/tests/const.py
new file mode 100644
index 0000000..991c87c
--- /dev/null
+++ b/catalog/packages/tests/const.py
@@ -0,0 +1,596 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+vnfd_data = {
+ "volume_storages": [
+ {
+ "properties": {
+ "size_of_storage": {
+ "factor": 10,
+ "value": 10000000000,
+ "unit": "GB",
+ "unit_size": 1000000000
+ },
+ "type_of_storage": "volume",
+ "rdma_enabled": False,
+ "size": "10 GB"
+ },
+ "volume_storage_id": "vNAT_Storage_6wdgwzedlb6sq18uzrr41sof7",
+ "description": ""
+ }
+ ],
+ "inputs": {},
+ "vdus": [
+ {
+ "volume_storages": [
+ "vNAT_Storage_6wdgwzedlb6sq18uzrr41sof7"
+ ],
+ "description": "",
+ "dependencies": [],
+ "vls": [],
+ "properties": {
+ "name": "vNat",
+ "configurable_properties": {
+ "test": {
+ "additional_vnfc_configurable_properties": {
+ "aaa": "1",
+ "bbb": "2",
+ "ccc": "3"
+ }
+ }
+ },
+ "description": "the virtual machine of vNat",
+ "nfvi_constraints": [
+ "test"
+ ],
+ "boot_order": [
+ "vNAT_Storage"
+ ]
+ },
+ "vdu_id": "vdu_vNat",
+ "artifacts": [
+ {
+ "artifact_name": "vNatVNFImage",
+ "type": "tosca.artifacts.nfv.SwImage",
+ "properties": {
+ "operating_system": "linux",
+ "sw_image": "/swimages/vRouterVNF_ControlPlane.qcow2",
+ "name": "vNatVNFImage",
+ "container_format": "bare",
+ "min_ram": "1 GB",
+ "disk_format": "qcow2",
+ "supported_virtualisation_environments": [
+ "test_0"
+ ],
+ "version": "1.0",
+ "checksum": "5000",
+ "min_disk": "10 GB",
+ "size": "10 GB"
+ },
+ "file": "/swimages/vRouterVNF_ControlPlane.qcow2"
+ }
+ ],
+ "nfv_compute": {
+ "flavor_extra_specs": {
+ "hw:cpu_sockets": "2",
+ "sw:ovs_dpdk": "true",
+ "hw:cpu_threads": "2",
+ "hw:numa_mem.1": "3072",
+ "hw:numa_mem.0": "1024",
+ "hw:numa_nodes": "2",
+ "hw:numa_cpus.0": "0,1",
+ "hw:numa_cpus.1": "2,3,4,5",
+ "hw:cpu_cores": "2",
+ "hw:cpu_threads_policy": "isolate"
+ },
+ "cpu_frequency": "2.4 GHz",
+ "num_cpus": 2,
+ "mem_size": "10 GB"
+ },
+ "local_storages": [],
+ "image_file": "vNatVNFImage",
+ "cps": []
+ }
+ ],
+ "image_files": [
+ {
+ "properties": {
+ "operating_system": "linux",
+ "sw_image": "/swimages/vRouterVNF_ControlPlane.qcow2",
+ "name": "vNatVNFImage",
+ "container_format": "bare",
+ "min_ram": "1 GB",
+ "disk_format": "qcow2",
+ "supported_virtualisation_environments": [
+ "test_0"
+ ],
+ "version": "1.0",
+ "checksum": "5000",
+ "min_disk": "10 GB",
+ "size": "10 GB"
+ },
+ "image_file_id": "vNatVNFImage",
+ "description": ""
+ }
+ ],
+ "routers": [],
+ "local_storages": [],
+ "vnf_exposed": {
+ "external_cps": [
+ {
+ "key_name": "sriov_plane",
+ "cp_id": "SRIOV_Port"
+ }
+ ],
+ "forward_cps": []
+ },
+ "vls": [
+ {
+ "route_id": "",
+ "vl_id": "sriov_link",
+ "route_external": False,
+ "description": "",
+ "properties": {
+ "vl_flavours": {
+ "vl_id": "aaaa"
+ },
+ "connectivity_type": {
+ "layer_protocol": "ipv4",
+ "flow_pattern": "flat"
+ },
+ "description": "sriov_link",
+ "test_access": [
+ "test"
+ ]
+ }
+ }
+ ],
+ "cps": [
+ {
+ "vl_id": "sriov_link",
+ "vdu_id": "vdu_vNat",
+ "description": "",
+ "cp_id": "SRIOV_Port",
+ "properties": {
+ "address_data": [
+ {
+ "address_type": "ip_address",
+ "l3_address_data": {
+ "ip_address_type": "ipv4",
+ "floating_ip_activated": False,
+ "number_of_ip_address": 1,
+ "ip_address_assignment": True
+ }
+ }
+ ],
+ "description": "sriov port",
+ "layer_protocol": "ipv4",
+ "virtual_network_interface_requirements": [
+ {
+ "requirement": {
+ "SRIOV": "true"
+ },
+ "support_mandatory": False,
+ "name": "sriov",
+ "description": "sriov"
+ },
+ {
+ "requirement": {
+ "SRIOV": "False"
+ },
+ "support_mandatory": False,
+ "name": "normal",
+ "description": "normal"
+ }
+ ],
+ "role": "root",
+ "bitrate_requirement": 10
+ }
+ }
+ ],
+ "metadata": {
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfProductName": "zte",
+ "localizationLanguage": [
+ "english",
+ "chinese"
+ ],
+ "vnfProvider": "zte",
+ "vnfmInfo": "zte",
+ "defaultLocalizationLanguage": "english",
+ "vnfdId": "zte-hss-1.0",
+ "id": "zte-hss-1.0",
+ "vnfProductInfoDescription": "hss",
+ "vnfdVersion": "1.0.0",
+ "vnfProductInfoName": "hss"
+ },
+ "vnf": {
+ "properties": {
+ "descriptor_id": "zte-hss-1.0",
+ "descriptor_version": "1.0.0",
+ "software_version": "1.0.0",
+ "provider": "zte"
+ },
+ "metadata": {
+ }
+ }
+}
+
+nsd_data = {"vnffgs": [{"vnffg_id": "vnffg1",
+ "description": "",
+ "members": ["path1",
+ "path2"],
+ "properties": {"vendor": "zte",
+ "connection_point": ["m6000_data_in",
+ "m600_tunnel_cp",
+ "m6000_data_out"],
+ "version": "1.0",
+ "constituent_vnfs": ["VFW",
+ "VNAT"],
+ "number_of_endpoints": 3,
+ "dependent_virtual_link": ["sfc_data_network",
+ "ext_datanet_net",
+ "ext_mnet_net"]}}],
+ "inputs": {"sfc_data_network": {"type": "string",
+ "value": "sfc_data_network"},
+ "externalDataNetworkName": {"type": "string",
+ "value": "vlan_4004_tunnel_net"},
+ "externalManageNetworkName": {"type": "string",
+ "value": "vlan_4008_mng_net"},
+ "NatIpRange": {"type": "string",
+ "value": "192.167.0.10-192.168.0.20"},
+ "externalPluginManageNetworkName": {"type": "string",
+ "value": "vlan_4007_plugin_net"}},
+ "pnfs": [{"pnf_id": "m6000_s",
+ "cps": [],
+ "description": "",
+ "properties": {"vendor": "zte",
+ "request_reclassification": False,
+ "pnf_type": "m6000s",
+ "version": "1.0",
+ "management_address": "111111",
+ "id": "m6000_s",
+ "nsh_aware": False}}],
+ "fps": [{"properties": {"symmetric": False,
+ "policy": {"type": "ACL",
+ "criteria": {"dest_port_range": "1-100",
+ "ip_protocol": "tcp",
+ "source_ip_range": ["119.1.1.1-119.1.1.10"],
+ "dest_ip_range": [{"get_input": "NatIpRange"}],
+ "dscp": 0,
+ "source_port_range": "1-100"}}},
+ "forwarder_list": [{"capability": "",
+ "type": "cp",
+ "node_name": "m6000_data_out"},
+ {"capability": "",
+ "type": "cp",
+ "node_name": "m600_tunnel_cp"},
+ {"capability": "vnat_fw_inout",
+ "type": "vnf",
+ "node_name": "VNAT"}],
+ "description": "",
+ "fp_id": "path2"},
+ {"properties": {"symmetric": True,
+ "policy": {"type": "ACL",
+ "criteria": {"dest_port_range": "1-100",
+ "ip_protocol": "tcp",
+ "source_ip_range": ["1-100"],
+ "dest_ip_range": ["1-100"],
+ "dscp": 4,
+ "source_port_range": "1-100"}}},
+ "forwarder_list": [{"capability": "",
+ "type": "cp",
+ "node_name": "m6000_data_in"},
+ {"capability": "",
+ "type": "cp",
+ "node_name": "m600_tunnel_cp"},
+ {"capability": "vfw_fw_inout",
+ "type": "vnf",
+ "node_name": "VFW"},
+ {"capability": "vnat_fw_inout",
+ "type": "vnf",
+ "node_name": "VNAT"},
+ {"capability": "",
+ "type": "cp",
+ "node_name": "m600_tunnel_cp"},
+ {"capability": "",
+ "type": "cp",
+ "node_name": "m6000_data_out"}],
+ "description": "",
+ "fp_id": "path1"}],
+ "routers": [],
+ "vnfs": [{"vnf_id": "VFW",
+ "description": "",
+ "properties": {"plugin_info": "vbrasplugin_1.0",
+ "vendor": "zte",
+ "is_shared": False,
+ "adjust_vnf_capacity": True,
+ "name": "VFW",
+ "vnf_extend_type": "driver",
+ "csarVersion": "v1.0",
+ "csarType": "NFAR",
+ "csarProvider": "ZTE",
+ "version": "1.0",
+ "nsh_aware": True,
+ "cross_dc": False,
+ "vnf_type": "VFW",
+ "vmnumber_overquota_alarm": True,
+ "vnfd_version": "1.0.0",
+ "externalPluginManageNetworkName": "vlan_4007_plugin_net",
+ "id": "vcpe_vfw_zte_1_0",
+ "request_reclassification": False},
+ "dependencies": [{"key_name": "vfw_ctrl_by_manager_cp",
+ "vl_id": "ext_mnet_net"},
+ {"key_name": "vfw_data_cp",
+ "vl_id": "sfc_data_network"}],
+ "type": "tosca.nodes.nfv.ext.zte.VNF.VFW",
+ "networks": []}],
+ "ns_exposed": {"external_cps": [],
+ "forward_cps": []},
+ "policies": [{"file_url": "policies/abc.drl",
+ "name": "aaa"}],
+ "vls": [{"route_id": "",
+ "vl_id": "ext_mnet_net",
+ "route_external": False,
+ "description": "",
+ "properties": {"name": "vlan_4008_mng_net",
+ "mtu": 1500,
+ "location_info": {"tenant": "admin",
+ "vimid": 2,
+ "availability_zone": "nova"},
+ "ip_version": 4,
+ "dhcp_enabled": True,
+ "network_name": "vlan_4008_mng_net",
+ "network_type": "vlan"}},
+ {"route_id": "",
+ "vl_id": "ext_datanet_net",
+ "route_external": False,
+ "description": "",
+ "properties": {"name": "vlan_4004_tunnel_net",
+ "mtu": 1500,
+ "location_info": {"tenant": "admin",
+ "vimid": 2,
+ "availability_zone": "nova"},
+ "ip_version": 4,
+ "dhcp_enabled": True,
+ "network_name": "vlan_4004_tunnel_net",
+ "network_type": "vlan"}},
+ {"route_id": "",
+ "vl_id": "sfc_data_network",
+ "route_external": False,
+ "description": "",
+ "properties": {"name": "sfc_data_network",
+ "dhcp_enabled": True,
+ "is_predefined": False,
+ "location_info": {"tenant": "admin",
+ "vimid": 2,
+ "availability_zone": "nova"},
+ "ip_version": 4,
+ "mtu": 1500,
+ "network_name": "sfc_data_network",
+ "network_type": "vlan"}}],
+ "cps": [{"pnf_id": "m6000_s",
+ "vl_id": "path2",
+ "description": "",
+ "cp_id": "m6000_data_out",
+ "properties": {"direction": "bidirectional",
+ "vnic_type": "normal",
+ "bandwidth": 0,
+ "mac_address": "11-22-33-22-11-44",
+ "interface_name": "xgei-0/4/1/5",
+ "ip_address": "176.1.1.2",
+ "order": 0,
+ "sfc_encapsulation": "mac"}},
+ {"pnf_id": "m6000_s",
+ "vl_id": "ext_datanet_net",
+ "description": "",
+ "cp_id": "m600_tunnel_cp",
+ "properties": {"direction": "bidirectional",
+ "vnic_type": "normal",
+ "bandwidth": 0,
+ "mac_address": "00-11-00-22-33-00",
+ "interface_name": "gei-0/4/0/13",
+ "ip_address": "191.167.100.5",
+ "order": 0,
+ "sfc_encapsulation": "mac"}},
+ {"pnf_id": "m6000_s",
+ "vl_id": "path2",
+ "description": "",
+ "cp_id": "m6000_data_in",
+ "properties": {"direction": "bidirectional",
+ "vnic_type": "normal",
+ "bandwidth": 0,
+ "mac_address": "11-22-33-22-11-41",
+ "interface_name": "gei-0/4/0/7",
+ "ip_address": "1.1.1.1",
+ "order": 0,
+ "sfc_encapsulation": "mac",
+ "bond": "none"}},
+ {"pnf_id": "m6000_s",
+ "vl_id": "ext_mnet_net",
+ "description": "",
+ "cp_id": "m600_mnt_cp",
+ "properties": {"direction": "bidirectional",
+ "vnic_type": "normal",
+ "bandwidth": 0,
+ "mac_address": "00-11-00-22-33-11",
+ "interface_name": "gei-0/4/0/1",
+ "ip_address": "10.46.244.51",
+ "order": 0,
+ "sfc_encapsulation": "mac",
+ "bond": "none"}}],
+ "metadata": {"invariant_id": "vcpe_ns_sff_1",
+ "name": "VCPE_NS",
+ "csarVersion": "v1.0",
+ "csarType": "NSAR",
+ "csarProvider": "ZTE",
+ "version": 1,
+ "vendor": "ZTE",
+ "id": "VCPE_NS",
+ "description": "vcpe_ns"},
+ "ns": {
+ "properties": {
+ "descriptor_id": "VCPE_NS",
+ "version": 1,
+ "name": "VCPE_NS",
+ "desginer": "ZTE",
+ "invariant_id": "vcpe_ns_sff_1"
+ }
+}
+}
+
+pnfd_data = {
+ "metadata": {
+ "id": "zte-1.0",
+ }
+}
+
+sd_data = {
+ "inputs": {
+ "sdwanvpnresource_list": [
+ {
+ "sdwanvpn_topology": "",
+ "required": True,
+ "type": "string"
+ }
+ ]
+ },
+ "pnfs": [
+ {
+ "pnf_id": "m6000_s",
+ "cps": [],
+ "description": "",
+ "properties": {
+ "vendor": "zte",
+ "request_reclassification": False,
+ "pnf_type": "m6000s",
+ "version": "1.0",
+ "management_address": "111111",
+ "id": "m6000_s",
+ "nsh_aware": False
+ }
+ }
+ ],
+ "description": "",
+ "vnfs": [
+ {
+ "vnf_id": "sdwansiteresource",
+ "description": "",
+ "properties": {
+ "sdwandevice_type": "",
+ "sdwandevice_class": "PNF",
+ "multi_stage_design": "false",
+ "min_instances": "1",
+ "sdwansite_controlPoint": "",
+ "id": "cd557883-ac4b-462d-aa01-421b5fa606b1",
+ "sdwansite_longitude": "",
+ "sdwansite_latitude": "",
+ "sdwansite_postcode": "",
+ "sdwansite_type": "",
+ "nf_naming": {
+ "ecomp_generated_naming": True
+ },
+ "sdwansite_emails": "",
+ "sdwansite_role": "",
+ "vnfm_info": "",
+ "sdwansite_address": "",
+ "sdwansite_description": "",
+ "availability_zone_max_count": "1",
+ "sdwansite_name": ""
+ }
+ }
+ ],
+ "service": {
+ "type": "org.openecomp.service.EnhanceService",
+ "properties": {
+ "descriptor_id": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "designer": "",
+ "invariant_id": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "name": "Enhance_Service",
+ "verison": ""
+ },
+ "metadata": {
+ "category": "E2E Service",
+ "serviceType": "",
+ "description": "Enhance_Service",
+ "instantiationType": "A-la-carte",
+ "type": "Service",
+ "environmentContext": "General_Revenue-Bearing",
+ "serviceEcompNaming": True,
+ "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "ecompGeneratedNaming": True,
+ "serviceRole": "",
+ "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "namingPolicy": "",
+ "name": "Enhance_Service"
+ }
+ },
+ "metadata": {
+ "category": "E2E Service",
+ "serviceType": "",
+ "description": "Enhance_Service",
+ "instantiationType": "A-la-carte",
+ "type": "Service",
+ "environmentContext": "General_Revenue-Bearing",
+ "serviceEcompNaming": True,
+ "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "ecompGeneratedNaming": True,
+ "serviceRole": "",
+ "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "namingPolicy": "",
+ "name": "Enhance_Service"
+ }
+}
+
+vnf_subscription_data = {
+ "filters": {
+ "notificationTypes": [
+ "VnfPackageOnboardingNotification"
+ ],
+ "vnfProductsFromProviders": {
+ "vnfProvider": "string",
+ "vnfProducts": {
+ "vnfProductName": "string",
+ "versions": {
+ "vnfSoftwareVersion": "string",
+ "vnfdVersions": [
+ "string"
+ ]
+ }
+ }
+ },
+ "vnfdId": [
+ "3fa85f64-5717-4562-b3fc-2c963f66afa6"
+ ],
+ "vnfPkgId": [
+ "3fa85f64-5717-4562-b3fc-2c963f66afa6"
+ ],
+ "operationalState": [
+ "ENABLED"
+ ],
+ "usageState": [
+ "IN_USE"
+ ]
+ },
+ "callbackUri": "http://www.vnf1.com/notification",
+ "authentication": {
+ "authType": [
+ "BASIC"
+ ],
+ "paramsBasic": {
+ "userName": "string",
+ "password": "string"
+ }
+ }
+}
diff --git a/catalog/packages/tests/test_health_check.py b/catalog/packages/tests/test_health_check.py
new file mode 100644
index 0000000..f8c3b56
--- /dev/null
+++ b/catalog/packages/tests/test_health_check.py
@@ -0,0 +1,50 @@
+# Copyright (c) 2019, CMCC Technologies Co., Ltd.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+
+from django.test import TestCase, Client
+from rest_framework import status
+
+
+class TestHealthCheck(TestCase):
+ def setUp(self):
+ self.client = Client()
+
+ def tearDown(self):
+ pass
+
+ def test_vnfpkgm_health_check(self):
+ response = self.client.get("/api/vnfpkgm/v1/health_check")
+ self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+ resp_data = json.loads(response.content)
+ self.assertEqual({"status": "active"}, resp_data)
+
+ def test_nsd_health_check(self):
+ response = self.client.get("/api/nsd/v1/health_check")
+ self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+ resp_data = json.loads(response.content)
+ self.assertEqual({"status": "active"}, resp_data)
+
+ def test_catalog_health_check(self):
+ response = self.client.get("/api/catalog/v1/health_check")
+ self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+ resp_data = json.loads(response.content)
+ self.assertEqual({"status": "active"}, resp_data)
+
+ def test_parser_health_check(self):
+ response = self.client.get("/api/parser/v1/health_check")
+ self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+ resp_data = json.loads(response.content)
+ self.assertEqual({"status": "active"}, resp_data)
diff --git a/catalog/packages/tests/test_ns_descriptor.py b/catalog/packages/tests/test_ns_descriptor.py
new file mode 100644
index 0000000..473786e
--- /dev/null
+++ b/catalog/packages/tests/test_ns_descriptor.py
@@ -0,0 +1,300 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import json
+import mock
+import os
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+from catalog.packages.biz.ns_descriptor import NsDescriptor
+from catalog.packages.const import PKG_STATUS
+from catalog.packages.tests.const import nsd_data
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import NSPackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.utils import toscaparser
+
+
+class TestNsDescriptor(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ self.user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ self.expected_nsd_info = {
+ 'id': None,
+ 'nsdId': None,
+ 'nsdName': None,
+ 'nsdVersion': None,
+ 'nsdDesigner': None,
+ 'nsdInvariantId': None,
+ 'vnfPkgIds': [],
+ 'pnfdInfoIds': [],
+ 'nestedNsdInfoIds': [],
+ 'nsdOnboardingState': 'CREATED',
+ 'onboardingFailureDetails': None,
+ 'nsdOperationalState': 'DISABLED',
+ 'nsdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': self.user_defined_data,
+ '_links': None
+ }
+ self.nsdModel = {
+ "pnfs": [{"properties": {"id": "m6000_s"}}],
+ "vnfs": [{"properties": {"id": "123"}}]
+ }
+
+ def tearDown(self):
+ pass
+
+ def test_nsd_create_normal(self):
+ reqest_data = {'userDefinedData': self.user_defined_data}
+ expected_reponse_data = {
+ 'nsdOnboardingState': 'CREATED',
+ 'nsdOperationalState': 'DISABLED',
+ 'nsdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': self.user_defined_data,
+ '_links': None
+ }
+
+ response = self.client.post(
+ '/api/nsd/v1/ns_descriptors',
+ data=reqest_data,
+ format='json'
+ )
+ response.data.pop('id')
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_multiple_nsds_normal(self):
+ expected_reponse_data = [
+ copy.deepcopy(self.expected_nsd_info),
+ copy.deepcopy(self.expected_nsd_info)
+ ]
+ expected_reponse_data[0]['id'] = '0'
+ expected_reponse_data[0]['nsdId'] = '0'
+ expected_reponse_data[1]['id'] = '1'
+ expected_reponse_data[1]['nsdId'] = '1'
+
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ for i in range(2):
+ NSPackageModel(
+ nsPackageId=str(i),
+ onboardingState='CREATED',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data,
+ nsdId=str(i)
+ ).save()
+
+ response = self.client.get('/api/nsd/v1/ns_descriptors', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ expected_reponse_data = [
+ copy.deepcopy(self.expected_nsd_info)
+ ]
+ expected_reponse_data[0]['id'] = '1'
+ expected_reponse_data[0]['nsdId'] = '1'
+ response = self.client.get('/api/nsd/v1/ns_descriptors?nsdId=1', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_single_nsd_normal(self):
+ expected_reponse_data = copy.deepcopy(self.expected_nsd_info)
+ expected_reponse_data['id'] = '22'
+
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ NSPackageModel(
+ nsPackageId='22',
+ onboardingState='CREATED',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data,
+ nsdModel=json.JSONEncoder().encode(self.nsdModel)
+ ).save()
+
+ response = self.client.get('/api/nsd/v1/ns_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_single_when_ns_not_exist(self):
+ response = self.client.get('/api/nsd/v1/ns_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_delete_single_nsd_normal(self):
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ NSPackageModel(
+ nsPackageId='21',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data,
+ nsdModel='test'
+ ).save()
+
+ response = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json')
+ self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, response.data)
+
+ def test_delete_when_ns_not_exist(self):
+ response = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json')
+ self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+
+ @mock.patch.object(toscaparser, 'parse_nsd')
+ def test_nsd_content_upload_normal(self, mock_parse_nsd):
+ user_defined_data_json = json.JSONEncoder().encode(self.user_defined_data)
+ mock_parse_nsd.return_value = json.JSONEncoder().encode(nsd_data)
+ VnfPackageModel(
+ vnfPackageId="111",
+ vnfdId="vcpe_vfw_zte_1_0"
+ ).save()
+
+ PnfPackageModel(
+ pnfPackageId="112",
+ pnfdId="m6000_s"
+ ).save()
+
+ NSPackageModel(
+ nsPackageId='22',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data_json,
+ ).save()
+
+ with open('nsd_content.txt', 'wt') as fp:
+ fp.write('test')
+ with open('nsd_content.txt', 'rt') as fp:
+ resp = self.client.put(
+ "/api/nsd/v1/ns_descriptors/22/nsd_content",
+ {'file': fp},
+ )
+ file_content = ''
+ with open(os.path.join(CATALOG_ROOT_PATH, '22/nsd_content.txt')) as fp:
+ data = fp.read()
+ file_content = '%s%s' % (file_content, data)
+ ns_pkg = NSPackageModel.objects.filter(nsPackageId="22")
+ self.assertEqual("VCPE_NS", ns_pkg[0].nsdId)
+ self.assertEqual(PKG_STATUS.ONBOARDED, ns_pkg[0].onboardingState)
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, resp.data)
+ self.assertEqual(file_content, 'test')
+ os.remove('nsd_content.txt')
+
+ def test_nsd_content_upload_failure(self):
+ with open('nsd_content.txt', 'wt') as fp:
+ fp.write('test')
+ with open('nsd_content.txt', 'rt') as fp:
+ response = self.client.put(
+ "/api/nsd/v1/ns_descriptors/22/nsd_content",
+ {'file': fp},
+ )
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_nsd_content_download_normal(self):
+ with open('nsd_content.txt', 'wt') as fp:
+ fp.writelines('test1')
+ fp.writelines('test2')
+ NSPackageModel.objects.create(
+ nsPackageId='23',
+ onboardingState='ONBOARDED',
+ localFilePath='nsd_content.txt'
+ )
+ response = self.client.get(
+ "/api/nsd/v1/ns_descriptors/23/nsd_content", format='json'
+ )
+ file_content = ""
+ for data in response.streaming_content:
+ file_content = '%s%s' % (file_content, data.decode())
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual("test1test2", file_content)
+ os.remove('nsd_content.txt')
+
+ def test_nsd_content_download_when_ns_not_exist(self):
+ response = self.client.get("/api/nsd/v1/ns_descriptors/23/nsd_content", format='json')
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_nsd_content_download_failed(self):
+ NSPackageModel.objects.create(
+ nsPackageId='23',
+ onboardingState='CREATED',
+ localFilePath='nsd_content.txt'
+ )
+ response = self.client.get("/api/nsd/v1/ns_descriptors/23/nsd_content", format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_nsd_content_partial_download_normal(self):
+ with open('nsd_content.txt', 'wt') as fp:
+ fp.writelines('test1')
+ fp.writelines('test2')
+ NSPackageModel(
+ nsPackageId='23',
+ onboardingState='ONBOARDED',
+ localFilePath='nsd_content.txt'
+ ).save()
+
+ response = self.client.get(
+ "/api/nsd/v1/ns_descriptors/23/nsd_content",
+ HTTP_RANGE='5-10',
+ format='json'
+ )
+ partial_file_content = ''
+ for data in response.streaming_content:
+ partial_file_content = '%s%s' % (partial_file_content, data.decode())
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual("test2", partial_file_content)
+ os.remove('nsd_content.txt')
+
+ @mock.patch.object(NsDescriptor, 'create')
+ def test_nsd_create_when_catch_exception(self, mock_create):
+ reqest_data = {'userDefinedData': self.user_defined_data}
+ mock_create.side_effect = TypeError("integer type")
+ response = self.client.post('/api/nsd/v1/ns_descriptors', data=reqest_data, format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(NsDescriptor, 'query_single')
+ def test_query_single_when_catch_exception(self, mock_query_single):
+ mock_query_single.side_effect = TypeError("integer type")
+ response = self.client.get('/api/nsd/v1/ns_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(NsDescriptor, 'query_multiple')
+ def test_query_multiple_when_catch_exception(self, mock_query_multipe):
+ mock_query_multipe.side_effect = TypeError("integer type")
+ response = self.client.get('/api/nsd/v1/ns_descriptors', format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(NsDescriptor, 'delete_single')
+ def test_delete_when_catch_exception(self, mock_delete_single):
+ mock_delete_single.side_effect = TypeError("integer type")
+ response = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(NsDescriptor, 'upload')
+ def test_upload_when_catch_exception(self, mock_upload):
+ mock_upload.side_effect = TypeError("integer type")
+ with open('nsd_content.txt', 'wt') as fp:
+ fp.write('test')
+ with open('nsd_content.txt', 'rt') as fp:
+ response = self.client.put("/api/nsd/v1/ns_descriptors/22/nsd_content", {'file': fp})
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+ os.remove('nsd_content.txt')
+
+ @mock.patch.object(NsDescriptor, 'download')
+ def test_download_when_catch_exception(self, mock_download):
+ mock_download.side_effect = TypeError("integer type")
+ response = self.client.get("/api/nsd/v1/ns_descriptors/23/nsd_content", format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/catalog/packages/tests/test_nsdm_subscription.py b/catalog/packages/tests/test_nsdm_subscription.py
new file mode 100644
index 0000000..f73c416
--- /dev/null
+++ b/catalog/packages/tests/test_nsdm_subscription.py
@@ -0,0 +1,521 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import mock
+import uuid
+from django.test import TestCase
+from rest_framework.test import APIClient
+from rest_framework import status
+
+from catalog.packages.biz.nsdm_subscription import NsdmSubscription
+from catalog.pub.database.models import NsdmSubscriptionModel
+
+
+class TestNsdmSubscription(TestCase):
+
+ def setUp(self):
+ self.client = APIClient()
+ NsdmSubscriptionModel.objects.all().delete()
+ self.subscription_id = str(uuid.uuid4())
+ self.subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ },
+ "filter": {
+ "nsdId": ["b632bddc-abcd-4180-bd8d-4e8a9578eff7"],
+ }
+ }
+ self.links = {
+ "self": {
+ "href": "/api/v1/subscriptions/" + self.subscription_id
+ }
+ }
+ self.test_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "id": self.subscription_id,
+ "filter": {
+ "notificationTypes": [
+ "NsdOnBoardingNotification"
+ ],
+ "nsdInfoId": [],
+ "nsdId": [],
+ "nsdName": [],
+ "nsdVersion": [],
+ "nsdInvariantId": [],
+ "vnfPkgIds": [],
+ "nestedNsdInfoIds": [],
+ "nsdOnboardingState": [],
+ "nsdOperationalState": [],
+ "nsdUsageState": [],
+ "pnfdInfoIds": [],
+ "pnfdId": [],
+ "pnfdName": [],
+ "pnfdVersion": [],
+ "pnfdProvider": [],
+ "pnfdInvariantId": [],
+ "pnfdOnboardingState": [],
+ "pnfdUsageState": []
+ },
+ "_links": self.links,
+ }
+
+ def tearDown(self):
+ pass
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_nsdm_subscribe_notification(self, mock_uuid4, mock_requests):
+ temp_uuid = str(uuid.uuid4())
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=self.subscription, format='json')
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(self.subscription["callbackUri"],
+ response.data["callbackUri"])
+ self.assertEqual(temp_uuid, response.data["id"])
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_nsdm_subscribe_callbackFailure(self, mock_uuid4, mock_requests):
+ temp_uuid = str(uuid.uuid4())
+ mock_requests.return_value.status_code = 500
+ mock_requests.get.return_value.status_code = 500
+ mock_uuid4.return_value = temp_uuid
+ expected_data = {
+ 'status': 500,
+ 'detail': "callbackUri http://callbackuri.com didn't"
+ " return 204 statuscode."
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=self.subscription, format='json')
+ self.assertEqual(500, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_second_subscription(self, mock_requests):
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=self.subscription, format='json')
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(self.subscription["callbackUri"],
+ response.data["callbackUri"])
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ },
+ "filter": {
+ "nsdId": ["b632bddc-bccd-4180-bd8d-4e8a9578eff7"],
+ }
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(dummy_subscription["callbackUri"],
+ response.data["callbackUri"])
+
+ @mock.patch("requests.get")
+ def test_nsdm_duplicate_subscription(self, mock_requests):
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=self.subscription, format='json')
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(self.subscription["callbackUri"],
+ response.data["callbackUri"])
+ expected_data = {
+ 'status': 303,
+ 'detail': 'Already Subscription exists with'
+ ' the same callbackUri and filter'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=self.subscription, format='json')
+ self.assertEqual(303, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_bad_request(self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ },
+ "filter": {
+ "nsdId": "b632bddc-bccd-4180-bd8d-4e8a9578eff7",
+ }
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_authtype_subscription(self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["OAUTH2_CLIENT_CREDENTIALS"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'Auth type should be BASIC'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_authtype_oauthclient_subscription(
+ self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsOauth2ClientCredentials": {
+ "clientId": "clientId",
+ "clientPassword": "password",
+ "tokenEndpoint": "http://tokenEndpoint"
+ }
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'Auth type should be OAUTH2_CLIENT_CREDENTIALS'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_authparams_subscription(self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username"
+ }
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'userName and password needed for BASIC'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_authparams_oauthclient_subscription(
+ self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["OAUTH2_CLIENT_CREDENTIALS"],
+ "paramsOauth2ClientCredentials": {
+ "clientPassword": "password",
+ "tokenEndpoint": "http://tokenEndpoint"
+ }
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'clientId, clientPassword and tokenEndpoint'
+ ' required for OAUTH2_CLIENT_CREDENTIALS'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_filter_subscription(self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ },
+ "filter": {
+ "nsdId": ["b632bddc-bccd-4180-bd8d-4e8a9578eff7"],
+ "nsdInfoId": ["d0ea5ec3-0b98-438a-9bea-488230cff174"]
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'Notification Filter should contain'
+ ' either nsdId or nsdInfoId'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_filter_pnfd_subscription(self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ },
+ "filter": {
+ "pnfdId": ["b632bddc-bccd-4180-bd8d-4e8a9578eff7"],
+ "pnfdInfoIds": ["d0ea5ec3-0b98-438a-9bea-488230cff174"]
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'Notification Filter should contain'
+ ' either pnfdId or pnfdInfoIds'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch.object(NsdmSubscription, 'create')
+ def test_nsdmsubscription_create_when_catch_exception(self, mock_create):
+ mock_create.side_effect = TypeError("Unicode type")
+ response = self.client.post('/api/nsd/v1/subscriptions',
+ data=self.subscription, format='json')
+ self.assertEqual(response.status_code,
+ status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_nsdm_get_subscriptions(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.get("/api/nsd/v1/subscriptions",
+ format='json')
+ self.assertEqual(status.HTTP_200_OK, response.status_code)
+ self.assertEqual([self.test_subscription], response.data)
+
+ def test_nsdm_get_subscriptions_filter(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.get("/api/nsd/v1/subscriptions"
+ "?notificationTypes"
+ "=NsdOnBoardingNotification",
+ format='json')
+ self.assertEqual(status.HTTP_200_OK, response.status_code)
+ self.assertEqual([self.test_subscription], response.data)
+
+ def test_nsdm_get_subscriptions_filter_failure(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.get("/api/nsd/v1/subscriptions"
+ "?notificationTypes="
+ "PnfdOnBoardingFailureNotification",
+ format='json')
+ self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
+
+ def test_nsdm_get_subscriptions_invalid_filter(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.get("/api/nsd/v1/subscriptions"
+ "?notificationTypes="
+ "PnfdOnBoardingFailureNotificati",
+ format='json')
+ self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
+
+ @mock.patch.object(NsdmSubscription, 'query_multi_subscriptions')
+ def test_nsdmsubscription_get_when_catch_exception(self, mock_create):
+ mock_create.side_effect = TypeError("Unicode type")
+ response = self.client.get('/api/nsd/v1/subscriptions', format='json')
+ self.assertEqual(response.status_code,
+ status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_nsdm_get_subscription(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.get('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(status.HTTP_200_OK, response.status_code)
+ self.assertEqual(self.test_subscription, response.data)
+
+ def test_nsdm_get_subscription_failure(self):
+ expected_data = {
+ "status": 404,
+ "detail": "Subscription(" + self.subscription_id + ") "
+ "doesn't exists"
+ }
+ response = self.client.get('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ def test_nsdm_get_subscription_failure_bad_request(self):
+ response = self.client.get("/api/nsd/v1/subscriptions/123",
+ format='json')
+ self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
+
+ @mock.patch.object(NsdmSubscription, 'query_single_subscription')
+ def test_nsdmsubscription_getsingle_when_catch_exception(
+ self, mock_create):
+ mock_create.side_effect = TypeError("Unicode type")
+ response = self.client.get('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(response.status_code,
+ status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_ndsm_delete_subscription(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.delete('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code)
+
+ def test_ndsm_delete_subscription_failure(self):
+ response = self.client.delete('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
+
+ def test_nsdm_delete_subscription_failure_bad_request(self):
+ response = self.client.delete("/api/nsd/v1/subscriptions/123",
+ format='json')
+ self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
+
+ @mock.patch.object(NsdmSubscription, 'delete_single_subscription')
+ def test_nsdmsubscription_delete_when_catch_exception(self, mock_create):
+ mock_create.side_effect = TypeError("Unicode type")
+ response = self.client.delete('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(response.status_code,
+ status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/catalog/packages/tests/test_nspackage.py b/catalog/packages/tests/test_nspackage.py
new file mode 100644
index 0000000..91f3503
--- /dev/null
+++ b/catalog/packages/tests/test_nspackage.py
@@ -0,0 +1,246 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import mock
+from rest_framework import status
+from django.test import TestCase
+from django.test import Client
+
+from catalog.pub.utils import restcall, toscaparser
+from catalog.pub.database.models import NSPackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.msapi import sdc
+from .const import nsd_data
+
+
+class TestNsPackage(TestCase):
+ def setUp(self):
+ self.client = Client()
+ NSPackageModel.objects.filter().delete()
+ VnfPackageModel.objects.filter().delete()
+ self.nsd_data = nsd_data
+
+ def tearDown(self):
+ pass
+
+ def test_ns_pkg_distribute_when_ns_exists(self):
+ NSPackageModel(nsPackageId="1", nsdId="2").save()
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("failed", resp.data["status"])
+ self.assertEqual(
+ "NS CSAR(1) already exists.",
+ resp.data["statusDescription"])
+
+ @mock.patch.object(restcall, 'call_req')
+ def test_ns_pkg_distribute_when_csar_not_exist(self, mock_call_req):
+ mock_call_req.return_value = [0, "[]", '200']
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("failed", resp.data["status"])
+ self.assertEqual(
+ "Failed to query artifact(services,1) from sdc.",
+ resp.data["statusDescription"])
+
+ @mock.patch.object(restcall, 'call_req')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_nsd')
+ def test_ns_pkg_distribute_when_nsd_already_exists(
+ self, mock_parse_nsd, mock_download_artifacts, mock_call_req):
+ mock_parse_nsd.return_value = json.JSONEncoder().encode(self.nsd_data)
+ mock_download_artifacts.return_value = "/home/vcpe.csar"
+ mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+ "uuid": "1",
+ "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/vcpe.csar",
+ "distributionStatus": "DISTRIBUTED"
+ }]), '200']
+ NSPackageModel(nsPackageId="2", nsdId="VCPE_NS").save()
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("failed", resp.data["status"])
+ self.assertEqual(
+ "NSD(VCPE_NS) already exists.",
+ resp.data["statusDescription"])
+
+ @mock.patch.object(restcall, 'call_req')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_nsd')
+ def test_ns_pkg_distribute_when_nf_not_distributed(
+ self, mock_parse_nsd, mock_download_artifacts, mock_call_req):
+ mock_parse_nsd.return_value = json.JSONEncoder().encode(self.nsd_data)
+ mock_download_artifacts.return_value = "/home/vcpe.csar"
+ mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+ "uuid": "1",
+ "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/vcpe.csar",
+ "distributionStatus": "DISTRIBUTED",
+ }]), '200']
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("failed", resp.data["status"])
+ self.assertEqual(
+ "VNF package(vcpe_vfw_zte_1_0) is not distributed.",
+ resp.data["statusDescription"])
+
+ @mock.patch.object(restcall, 'call_req')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_nsd')
+ def test_ns_pkg_distribute_when_successfully(
+ self, mock_parse_nsd, mock_download_artifacts, mock_call_req):
+ mock_parse_nsd.return_value = json.JSONEncoder().encode(self.nsd_data)
+ mock_download_artifacts.return_value = "/home/vcpe.csar"
+ mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+ "uuid": "1",
+ "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/vcpe.csar",
+ "distributionStatus": "DISTRIBUTED"
+ }]), '200']
+ VnfPackageModel(vnfPackageId="1", vnfdId="vcpe_vfw_zte_1_0").save()
+ PnfPackageModel(pnfPackageId="1", pnfdId="m6000_s").save()
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("success", resp.data["status"])
+ self.assertEqual(
+ "CSAR(1) distributed successfully.",
+ resp.data["statusDescription"])
+
+ @mock.patch.object(sdc, 'get_artifacts')
+ def test_ns_when_not_distributed_by_sdc(self, mock_get_artifacts):
+ mock_get_artifacts.return_value = [{
+ "uuid": "1",
+ "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb",
+ "name": "underlayvpn",
+ "version": "2.0",
+ "toscaModelURL": "/sdc/v1/catalog/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel",
+ "category": "Volte",
+ "subCategory": "VolteVNF",
+ "resourceType": "VF",
+ "lifecycleState": "CERTIFIED",
+ "distributionStatus": "DISTRIBUTION_APPROVED",
+ "lastUpdaterUserId": "jh0003"
+ }]
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("failed", resp.data["status"])
+ self.assertEqual(
+ "The artifact (services,1) is not distributed from sdc.",
+ resp.data["statusDescription"])
+
+ ##########################################################################
+
+ def test_ns_pkg_normal_delete(self):
+ NSPackageModel(nsPackageId="8", nsdId="2").save()
+ resp = self.client.delete("/api/catalog/v1/nspackages/8")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ self.assertEqual("success", resp.data["status"])
+ self.assertEqual(
+ "Delete CSAR(8) successfully.",
+ resp.data["statusDescription"])
+
+ def test_ns_pkg_get_all(self):
+ NSPackageModel(
+ nsPackageId="13",
+ nsdId="2",
+ nsdDesginer="2",
+ nsdVersion="2",
+ nsPackageUri="13.csar",
+ nsdModel="").save()
+ NSPackageModel(
+ nsPackageId="14",
+ nsdId="3",
+ nsdDesginer="3",
+ nsdVersion="3",
+ nsPackageUri="14.csar",
+ nsdModel="").save()
+ resp = self.client.get("/api/catalog/v1/nspackages")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ expect_data = [{"csarId": "13",
+ "packageInfo": {"csarName": "13.csar",
+ "nsdProvider": "2",
+ "nsdId": "2",
+ "nsPackageId": "13",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/13/13.csar",
+ "nsdModel": "",
+ "nsdVersion": "2",
+ "nsdInvariantId": None
+ }},
+ {"csarId": "14",
+ "packageInfo": {"csarName": "14.csar",
+ "nsdProvider": "3",
+ "nsdId": "3",
+ "nsPackageId": "14",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/14/14.csar",
+ "nsdModel": "",
+ "nsdVersion": "3",
+ "nsdInvariantId": None}}]
+ self.assertEqual(expect_data, resp.data)
+
+ def test_ns_pkg_get_one(self):
+ NSPackageModel(
+ nsPackageId="14",
+ nsdId="2",
+ nsdDesginer="3",
+ nsdVersion="4",
+ nsPackageUri="14.csar",
+ nsdModel="").save()
+ resp = self.client.get("/api/catalog/v1/nspackages/14")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ expect_data = {
+ "csarId": "14",
+ "packageInfo": {
+ "nsdId": "2",
+ "nsPackageId": "14",
+ "nsdProvider": "3",
+ "nsdVersion": "4",
+ "csarName": "14.csar",
+ "nsdModel": "",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/14/14.csar",
+ "nsdInvariantId": None}}
+ self.assertEqual(expect_data, resp.data)
+
+ def test_ns_pkg_get_one_not_found(self):
+ resp = self.client.get("/api/catalog/v1/nspackages/22")
+ self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+ self.assertEqual(
+ {"error": "Ns package[22] not Found."},
+ resp.data)
+
+ ##########################################################################
+
+ @mock.patch.object(toscaparser, 'parse_nsd')
+ def test_nsd_parse_normal(self, mock_parse_nsd):
+ NSPackageModel(nsPackageId="18", nsdId="12").save()
+ mock_parse_nsd.return_value = json.JSONEncoder().encode({"a": "b"})
+ req_data = {"csarId": "18", "inputs": []}
+ resp = self.client.post(
+ "/api/catalog/v1/parsernsd",
+ req_data,
+ format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual({"model": '{"a": "b"}'}, resp.data)
+
+ def test_nsd_parse_when_csar_not_exist(self):
+ req_data = {"csarId": "1", "inputs": []}
+ resp = self.client.post(
+ "/api/catalog/v1/parsernsd",
+ req_data,
+ format='json')
+ self.assertEqual(
+ resp.status_code,
+ status.HTTP_500_INTERNAL_SERVER_ERROR)
+ self.assertEqual(resp.data, {"error": "NS CSAR(1) does not exist."})
diff --git a/catalog/packages/tests/test_pnf_descriptor.py b/catalog/packages/tests/test_pnf_descriptor.py
new file mode 100644
index 0000000..8af8614
--- /dev/null
+++ b/catalog/packages/tests/test_pnf_descriptor.py
@@ -0,0 +1,286 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import copy
+import json
+import mock
+import os
+import shutil
+
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+from catalog.packages.biz.pnf_descriptor import PnfDescriptor
+from catalog.packages.const import PKG_STATUS
+from catalog.packages.tests.const import pnfd_data
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import PnfPackageModel, NSPackageModel
+from catalog.pub.utils import toscaparser
+
+
+class TestPnfDescriptor(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ self.user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ self.expected_pnfd_info = {
+ 'id': None,
+ 'pnfdId': None,
+ 'pnfdName': None,
+ 'pnfdVersion': None,
+ 'pnfdProvider': None,
+ 'pnfdInvariantId': None,
+ 'pnfdOnboardingState': 'CREATED',
+ 'onboardingFailureDetails': None,
+ 'pnfdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': self.user_defined_data,
+ '_links': None
+ }
+ self.nsdModel = {
+ "pnfs": [{"properties": {"id": "m6000_s"}}]
+ }
+
+ def tearDown(self):
+ file_path = os.path.join(CATALOG_ROOT_PATH, "22")
+ if os.path.exists(file_path):
+ shutil.rmtree(file_path)
+
+ def test_pnfd_create_normal(self):
+ request_data = {'userDefinedData': self.user_defined_data}
+ expected_reponse_data = {
+ 'pnfdOnboardingState': 'CREATED',
+ 'pnfdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': self.user_defined_data,
+ '_links': None
+ }
+
+ response = self.client.post(
+ '/api/nsd/v1/pnf_descriptors',
+ data=request_data,
+ format='json'
+ )
+ response.data.pop('id')
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_multiple_pnfds_normal(self):
+ expected_reponse_data = [
+ copy.deepcopy(self.expected_pnfd_info),
+ copy.deepcopy(self.expected_pnfd_info)
+ ]
+ expected_reponse_data[0]['id'] = '0'
+ expected_reponse_data[1]['id'] = '1'
+
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ for i in range(2):
+ PnfPackageModel(
+ pnfPackageId=str(i),
+ onboardingState='CREATED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data
+ ).save()
+ response = self.client.get('/api/nsd/v1/pnf_descriptors', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_single_pnfd_normal(self):
+ expected_reponse_data = copy.deepcopy(self.expected_pnfd_info)
+ expected_reponse_data['id'] = '22'
+
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ onboardingState='CREATED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data
+ ).save()
+
+ response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_single_pnfd_failed(self):
+ response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_delete_single_pnfd_normal(self):
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ userDefinedData=user_defined_data,
+ pnfdModel='test'
+ ).save()
+ NSPackageModel.objects.create(
+ nsPackageId="111",
+ nsdModel=json.JSONEncoder().encode(self.nsdModel)
+ )
+ resp = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, resp.data)
+
+ def test_delete_single_pnfd_when_not_exist(self):
+ resp = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, resp.data)
+
+ @mock.patch.object(toscaparser, "parse_pnfd")
+ def test_pnfd_content_upload_normal(self, mock_parse_pnfd):
+ user_defined_data_json = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ userDefinedData=user_defined_data_json,
+ ).save()
+ mock_parse_pnfd.return_value = json.JSONEncoder().encode(pnfd_data)
+ with open('pnfd_content.txt', 'wt') as fp:
+ fp.write('test')
+
+ with open('pnfd_content.txt', 'rt') as fp:
+ resp = self.client.put(
+ "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
+ {'file': fp},
+ )
+ pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId="22")
+ self.assertEqual(pnf_pkg[0].pnfdId, "zte-1.0")
+ self.assertEqual(pnf_pkg[0].onboardingState, PKG_STATUS.ONBOARDED)
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, resp.data)
+ os.remove('pnfd_content.txt')
+
+ def test_pnfd_content_upload_when_pnf_not_exist(self):
+ with open('pnfd_content.txt', 'wt') as fp:
+ fp.write('test')
+
+ with open('pnfd_content.txt', 'rt') as fp:
+ resp = self.client.put(
+ "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
+ {'file': fp},
+ )
+ self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(toscaparser, "parse_pnfd")
+ def test_pnfd_content_upload_when_pnfd_exist(self, mock_parse_pnfd):
+ with open('pnfd_content.txt', 'wt') as fp:
+ fp.write('test')
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ pnfdId="zte-1.1"
+ ).save()
+ PnfPackageModel(
+ pnfPackageId='23',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ pnfdId="zte-1.0"
+ ).save()
+ mock_parse_pnfd.return_value = json.JSONEncoder().encode(pnfd_data)
+ with open('pnfd_content.txt', 'rt') as fp:
+ resp = self.client.put(
+ "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
+ {'file': fp},
+ )
+ self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_pnfd_download_normal(self):
+ with open('pnfd_content.txt', 'wt') as fp:
+ fp.writelines('test1')
+ fp.writelines('test2')
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ userDefinedData=user_defined_data,
+ localFilePath="pnfd_content.txt",
+ pnfdModel='test'
+ ).save()
+ resp = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+ file_content = ""
+ for data in resp.streaming_content:
+ file_content = '%s%s' % (file_content, data.decode())
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ self.assertEqual("test1test2", file_content)
+ os.remove('pnfd_content.txt')
+
+ def test_pnfd_download_failed(self):
+ response = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_pnfd_download_when_not_on_boarded(self):
+ with open('pnfd_content.txt', 'wt') as fp:
+ fp.writelines('test1')
+ fp.writelines('test2')
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ onboardingState=PKG_STATUS.CREATED,
+ userDefinedData=user_defined_data,
+ localFilePath="pnfd_content.txt",
+ pnfdModel='test'
+ ).save()
+ response = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+ os.remove('pnfd_content.txt')
+
+ @mock.patch.object(PnfDescriptor, "create")
+ def test_pnfd_create_when_catch_exception(self, mock_create):
+ request_data = {'userDefinedData': self.user_defined_data}
+ mock_create.side_effect = TypeError('integer type')
+ response = self.client.post('/api/nsd/v1/pnf_descriptors', data=request_data, format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(PnfDescriptor, "delete_single")
+ def test_delete_single_when_catch_exception(self, mock_delete_single):
+ mock_delete_single.side_effect = TypeError("integer type")
+ response = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(PnfDescriptor, "query_single")
+ def test_query_single_when_catch_exception(self, mock_query_single):
+ mock_query_single.side_effect = TypeError("integer type")
+ response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(PnfDescriptor, "query_multiple")
+ def test_query_multiple_when_catch_exception(self, mock_query_muitiple):
+ mock_query_muitiple.side_effect = TypeError("integer type")
+ response = self.client.get('/api/nsd/v1/pnf_descriptors', format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(PnfDescriptor, "upload")
+ def test_upload_when_catch_exception(self, mock_upload):
+ mock_upload.side_effect = TypeError("integer type")
+ response = self.client.put("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(PnfDescriptor, "download")
+ def test_download_when_catch_exception(self, mock_download):
+ mock_download.side_effect = TypeError("integer type")
+ response = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(toscaparser, 'parse_pnfd')
+ def test_pnfd_parse_normal(self, mock_parse_pnfd):
+ PnfPackageModel(pnfPackageId="8", pnfdId="10").save()
+ mock_parse_pnfd.return_value = json.JSONEncoder().encode({"c": "d"})
+ req_data = {"csarId": "8", "inputs": []}
+ resp = self.client.post("/api/catalog/v1/parserpnfd", req_data, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual({"model": '{"c": "d"}'}, resp.data)
diff --git a/catalog/packages/tests/test_service_descriptor.py b/catalog/packages/tests/test_service_descriptor.py
new file mode 100644
index 0000000..08a6f03
--- /dev/null
+++ b/catalog/packages/tests/test_service_descriptor.py
@@ -0,0 +1,95 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+import logging
+
+from django.test import TestCase
+from mock import mock
+
+from catalog.packages.biz.service_descriptor import ServiceDescriptor
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.exceptions import PackageNotFoundException
+from catalog.pub.utils import toscaparser
+from .const import sd_data
+
+logger = logging.getLogger(__name__)
+
+
+class TestServiceDescription(TestCase):
+
+ def setUp(self):
+ self.user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ self.data = {
+ 'userDefinedData': self.user_defined_data,
+ }
+ self.sd_data = sd_data
+ ServicePackageModel.objects.filter().delete()
+
+ def tearDown(self):
+ pass
+
+ def test_create(self):
+ result_data = ServiceDescriptor().create(self.data)
+ self.assertIsNotNone(result_data['id'])
+ service_package = ServicePackageModel.objects.filter(servicePackageId=result_data['id'])[0]
+ self.assertIsNotNone(service_package)
+ self.assertEqual(PKG_STATUS.DISABLED, service_package.operationalState)
+ self.assertEqual(PKG_STATUS.CREATED, service_package.onboardingState)
+ self.assertEqual(PKG_STATUS.NOT_IN_USE, service_package.usageState)
+
+ def test_create_with_csarid(self):
+ csar_id = '0b667470-e6b3-4ee8-8f08-186317a04dc2'
+ result_data = ServiceDescriptor().create(self.data, csar_id)
+ self.assertEqual(csar_id, result_data['id'])
+ service_package = ServicePackageModel.objects.filter(servicePackageId=csar_id)[0]
+ self.assertIsNotNone(service_package)
+ self.assertEqual(PKG_STATUS.DISABLED, service_package.operationalState)
+ self.assertEqual(PKG_STATUS.CREATED, service_package.onboardingState)
+ self.assertEqual(PKG_STATUS.NOT_IN_USE, service_package.usageState)
+
+ @mock.patch.object(toscaparser, 'parse_sd')
+ def test_parse_serviced_and_save(self, mock_parse_sd):
+ mock_parse_sd.return_value = json.JSONEncoder().encode(self.sd_data)
+ servcie_desc = ServiceDescriptor()
+ csar_id = '0b667470-e6b3-4ee8-8f08-186317a04dc2'
+ servcie_desc.create(self.data, csar_id)
+ VnfPackageModel(vnfPackageId="1", vnfdId="cd557883-ac4b-462d-aa01-421b5fa606b1").save()
+ PnfPackageModel(pnfPackageId="1", pnfdId="m6000_s").save()
+ local_file_name = "/test.csar"
+ servcie_desc.parse_serviced_and_save(csar_id, local_file_name)
+
+ service_package = ServicePackageModel.objects.filter(servicePackageId=csar_id)[0]
+ self.assertIsNotNone(service_package)
+
+ def test_delete_single(self):
+ servcie_desc = ServiceDescriptor()
+ csar_id = '0b667470-e6b3-4ee8-8f08-186317a04dc2'
+ servcie_desc.create(self.data, csar_id)
+
+ servcie_desc.delete_single(csar_id)
+ self.assertTrue(len(ServicePackageModel.objects.filter(servicePackageId=csar_id)) == 0)
+ self.assertFalse(ServicePackageModel.objects.filter(servicePackageId=csar_id).exists())
+
+ def test_delete_single_not_exists(self):
+ csar_id = "8000"
+ try:
+ ServiceDescriptor().delete_single(csar_id)
+ except Exception as e:
+ self.assertTrue(isinstance(e, PackageNotFoundException))
+ self.assertEqual("Service package[8000] not Found.", e.args[0])
diff --git a/catalog/packages/tests/test_servicepackage.py b/catalog/packages/tests/test_servicepackage.py
new file mode 100644
index 0000000..241d80d
--- /dev/null
+++ b/catalog/packages/tests/test_servicepackage.py
@@ -0,0 +1,481 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+
+from django.test import TestCase, Client
+from mock import mock
+from rest_framework import status
+
+from catalog.packages.biz.sdc_service_package import ServicePackage
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.exceptions import PackageNotFoundException, PackageHasExistsException, CatalogException
+from catalog.pub.msapi import sdc
+from catalog.pub.utils import toscaparser
+
+PARSER_BASE_URL = "/api/parser/v1"
+
+
+class TestServicePackage(TestCase):
+ """ Test case for Service Package operations"""
+
+ def setUp(self):
+ self.client = Client()
+ ServicePackageModel.objects.filter().delete()
+ self.sd_data = {
+ "inputs": {
+ "sdwanvpnresource_list": [
+ {
+ "sdwanvpn_topology": "",
+ "required": True,
+ "type": "string"
+ },
+ {
+ "sdwansitelan_list": [
+ {
+ "deviceName": "",
+ "required": True,
+ "type": "string",
+ "description": "The device name in the site"
+ }
+ ]
+ }
+ ],
+ "sdwansiteresource_list": [
+ {
+ "sdwansite_controlPoint": "",
+ "required": False,
+ "type": "string",
+ "description": "The control point of the site,only for sd-wan-edge"
+ },
+ {
+ "sdwandevice_list": [
+ {
+ "systemIp": "",
+ "required": False,
+ "type": "string",
+ "description": "The system ip of the device"
+ }
+ ]
+ }
+ ]
+ },
+ "pnfs": [
+ {
+ "pnf_id": "m6000_s",
+ "cps": [],
+ "description": "",
+ "properties": {
+ "vendor": "zte",
+ "request_reclassification": False,
+ "pnf_type": "m6000s",
+ "version": "1.0",
+ "management_address": "111111",
+ "id": "m6000_s",
+ "nsh_aware": False
+ }
+ }
+ ],
+ "description": "",
+ "graph": {
+ "sdwansiteresource": [
+ "sdwanvpnresource"
+ ],
+ "sdwanvpnresource": []
+ },
+ "basepath": "c:\\users\\cmcc\\appdata\\local\\temp\\tmpn79jwc\\Definitions",
+ "vnfs": [
+ {
+ "vnf_id": "sdwansiteresource",
+ "description": "",
+ "properties": {
+ "sdwandevice_type": "",
+ "sdwandevice_class": "PNF",
+ "multi_stage_design": "False",
+ "min_instances": "1",
+ "sdwansite_controlPoint": "",
+ "id": "cd557883-ac4b-462d-aa01-421b5fa606b1",
+ "sdwansite_longitude": "",
+ "sdwansite_latitude": "",
+ "sdwansite_postcode": "",
+ "sdwansite_type": "",
+ "nf_naming": {
+ "ecomp_generated_naming": True
+ },
+ "sdwansite_emails": "",
+ "sdwansite_role": "",
+ "vnfm_info": "",
+ "sdwansite_address": "",
+ "sdwansite_description": "",
+ "availability_zone_max_count": "1",
+ "sdwansite_name": ""
+ },
+ "dependencies": [],
+ "networks": [],
+ "metadata": {
+ "category": "Configuration",
+ "subcategory": "Configuration",
+ "UUID": "cd557883-ac4b-462d-aa01-421b5fa606b1",
+ "invariantUUID": "c83b621e-e267-4910-a75a-a2a5957296e4",
+ "name": "sdwansiteresource",
+ "customizationUUID": "673dd6b3-3a06-4ef0-8ad0-8c26224b08f7",
+ "resourceVendorRelease": "1.0",
+ "version": "1.0",
+ "resourceVendor": "onap",
+ "resourceVendorModelNumber": "",
+ "type": "VF",
+ "description": "sdwansiteresource"
+ }
+ }
+ ],
+ "vls": [],
+ "service": {
+ "type": "org.openecomp.service.EnhanceService",
+ "requirements": {
+ "sdwanvpnresource.sdwanvpn.dependency": [
+ "sdwanvpnresource",
+ "sdwanvpn.dependency"
+ ],
+ "sdwansiteresource.sdwansitewan.dependency": [
+ "sdwansiteresource",
+ "sdwansitewan.dependency"
+ ],
+ "sdwansiteresource.sdwandevice.dependency": [
+ "sdwansiteresource",
+ "sdwandevice.dependency"
+ ],
+ "sdwanvpnresource.sdwansitelan.dependency": [
+ "sdwanvpnresource",
+ "sdwansitelan.dependency"
+ ],
+ "sdwanvpnresource.sdwanvpn.device": [
+ "sdwanvpnresource",
+ "sdwanvpn.device"
+ ],
+ "sdwansiteresource.sdwansite.device": [
+ "sdwansiteresource",
+ "sdwansite.device"
+ ],
+ "sdwansiteresource.sdwansite.dependency": [
+ "sdwansiteresource",
+ "sdwansite.dependency"
+ ],
+ "sdwanvpnresource.sdwansitelan.device": [
+ "sdwanvpnresource",
+ "sdwansitelan.device"
+ ],
+ "sdwansiteresource.sdwansitewan.device": [
+ "sdwansiteresource",
+ "sdwansitewan.device"
+ ],
+ "sdwansiteresource.sdwandevice.device": [
+ "sdwansiteresource",
+ "sdwandevice.device"
+ ]
+ },
+ "properties": {
+ "descriptor_id": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "designer": "",
+ "invariant_id": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "name": "Enhance_Service",
+ "verison": ""
+ },
+ "capabilities": {
+ "sdwansiteresource.sdwandevice.feature": [
+ "sdwansiteresource",
+ "sdwandevice.feature"
+ ],
+ "sdwanvpnresource.sdwanvpn.feature": [
+ "sdwanvpnresource",
+ "sdwanvpn.feature"
+ ],
+ "sdwanvpnresource.sdwanvpn.link": [
+ "sdwanvpnresource",
+ "sdwanvpn.link"
+ ],
+ "sdwansiteresource.sdwansite.feature": [
+ "sdwansiteresource",
+ "sdwansite.feature"
+ ],
+ "sdwansiteresource.sdwansitewan.feature": [
+ "sdwansiteresource",
+ "sdwansitewan.feature"
+ ],
+ "sdwanvpnresource.sdwansitelan.feature": [
+ "sdwanvpnresource",
+ "sdwansitelan.feature"
+ ]
+ },
+ "metadata": {
+ "category": "E2E Service",
+ "serviceType": "",
+ "description": "Enhance_Service",
+ "instantiationType": "A-la-carte",
+ "type": "Service",
+ "environmentContext": "General_Revenue-Bearing",
+ "serviceEcompNaming": True,
+ "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "ecompGeneratedNaming": True,
+ "serviceRole": "",
+ "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "namingPolicy": "",
+ "name": "Enhance_Service"
+ }
+ },
+ "metadata": {
+ "category": "E2E Service",
+ "serviceType": "",
+ "description": "Enhance_Service",
+ "instantiationType": "A-la-carte",
+ "type": "Service",
+ "environmentContext": "General_Revenue-Bearing",
+ "serviceEcompNaming": True,
+ "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "ecompGeneratedNaming": True,
+ "serviceRole": "",
+ "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "namingPolicy": "",
+ "name": "Enhance_Service"
+ }
+ }
+ self.asset_data = {
+ "uuid": "1",
+ "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb",
+ "name": "underlayvpn",
+ "version": "2.0",
+ "toscaModelURL": "/sdc/v1/catalog/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel",
+ "category": "Volte",
+ "subCategory": "VolteVNF",
+ "resourceType": "VF",
+ "lifecycleState": "CERTIFIED",
+ "distributionStatus": "DISTRIBUTION_APPROVED",
+ "lastUpdaterUserId": "jh0003",
+ "resources": [
+ {
+ "resourceInstanceName": "contrailV2VLANSubInterface 0",
+ "resourceName": "contrailV2VLANSubInterface",
+ "resourceInvariantUUID": "4d31b775-af63-491d-89f1-254e218e7140",
+ "resourceVersion": "1.0",
+ "resoucreType": "CP",
+ "resourceUUID": "cd557883-ac4b-462d-aa01-421b5fa606b1"
+ },
+ {
+ "resourceInstanceName": "Network 0",
+ "resourceName": "Network",
+ "resourceInvariantUUID": "f90f567e-7d7d-4216-af38-6bca0637c59f",
+ "resourceVersion": "1.0",
+ "resoucreType": "VL",
+ "resourceUUID": "m6000_s"
+ }
+ ]
+ }
+
+ def tearDown(self):
+ pass
+
+ ###############################################################
+
+ def test_service_pkg_distribute_when_pkg_exists(self):
+ ServicePackageModel(servicePackageId="1", servicedId="2").save()
+ csar_id = "1"
+ try:
+ ServicePackage().on_distribute(csar_id)
+ except PackageHasExistsException as e:
+ self.assertEqual("Service CSAR(1) already exists.", e.args[0])
+
+ @mock.patch.object(sdc, 'get_asset')
+ def test_service_pkg_distribute_when_fail_get_artifacts(self, mock_get_asset):
+ mock_get_asset.side_effect = CatalogException("Failed to query artifact(services,1) from sdc.")
+ csar_id = "1"
+ try:
+ ServicePackage().on_distribute(csar_id)
+ except Exception as e:
+ self.assertTrue(isinstance(e, CatalogException))
+ self.assertEqual("Failed to query artifact(services,1) from sdc.", e.args[0])
+
+ @mock.patch.object(sdc, 'get_asset')
+ def test_service_pkg_distribute_when_resource_not_distribute(self, mock_get_asset):
+ mock_get_asset.return_value = self.asset_data
+ csar_id = "1"
+ try:
+ ServicePackage().on_distribute(csar_id)
+ except Exception as e:
+ self.assertTrue(isinstance(e, CatalogException))
+ self.assertEqual("Resource (cd557883-ac4b-462d-aa01-421b5fa606b1) is not distributed.", e.args[0])
+
+ @mock.patch.object(sdc, 'get_asset')
+ @mock.patch.object(sdc, 'download_artifacts')
+ def test_service_pkg_distribute_when_fail_download_artifacts(self, mock_get_asset, mock_download_artifacts):
+ mock_get_asset.return_value = self.asset_data
+ mock_download_artifacts.side_effect = CatalogException("Failed to download 1 from sdc.")
+ csar_id = "1"
+ VnfPackageModel(vnfPackageId="cd557883-ac4b-462d-aa01-421b5fa606b1",
+ vnfdId="cd557883-ac4b-462d-aa01-421b5fa606b1").save()
+ PnfPackageModel(pnfPackageId="m6000_s", pnfdId="m6000_s").save()
+
+ try:
+ ServicePackage().on_distribute(csar_id)
+ except Exception as e:
+ self.assertTrue(isinstance(e, CatalogException))
+ self.assertEqual("Failed to download 1 from sdc.", e.args[0])
+
+ @mock.patch.object(sdc, 'get_asset')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_sd')
+ def test_service_pkg_distribute(self, mock_parse_sd, mock_download_artifacts, mock_get_asset):
+ mock_parse_sd.return_value = json.JSONEncoder().encode(self.sd_data)
+ mock_download_artifacts.return_value = "/test.csar"
+ mock_get_asset.return_value = self.asset_data
+ VnfPackageModel(vnfPackageId="cd557883-ac4b-462d-aa01-421b5fa606b1",
+ vnfdId="cd557883-ac4b-462d-aa01-421b5fa606b1").save()
+ PnfPackageModel(pnfPackageId="m6000_s", pnfdId="m6000_s").save()
+ ServicePackage().on_distribute(csar_id="1")
+
+ service_package = ServicePackageModel.objects.filter(servicePackageId="1").first()
+ self.assertEqual("5de07996-7ff0-4ec1-b93c-e3a00bb3f207", service_package.invariantId)
+ self.assertEqual("Enhance_Service", service_package.servicedName)
+ self.assertEqual(PKG_STATUS.ONBOARDED, service_package.onboardingState)
+ self.assertEqual(PKG_STATUS.ENABLED, service_package.operationalState)
+ self.assertEqual(PKG_STATUS.NOT_IN_USE, service_package.usageState)
+
+ def test_api_service_pkg_distribute_when_pkg_exists(self):
+ ServicePackageModel(servicePackageId="1", servicedId="2").save()
+ resp = self.client.post(
+ PARSER_BASE_URL + "/service_packages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
+ self.assertEqual("Service CSAR(1) already exists.", resp.data["errorMessage"])
+
+ ###############################################################
+
+ def test_service_pkg_get_all(self):
+ ServicePackageModel(
+ servicePackageId="13",
+ servicedId="2",
+ servicedDesigner="2",
+ servicedVersion="2",
+ servicePackageUri="13.csar",
+ servicedModel="").save()
+ ServicePackageModel(
+ servicePackageId="14",
+ servicedId="3",
+ servicedDesigner="3",
+ servicedVersion="3",
+ servicePackageUri="14.csar",
+ servicedModel="").save()
+ csars = ServicePackage().get_csars()
+ self.assertEqual(2, len(csars))
+
+ def test_api_service_pkg_get_all(self):
+ ServicePackageModel(
+ servicePackageId="13",
+ servicedId="2",
+ servicedDesigner="2",
+ servicedVersion="2",
+ servicePackageUri="13.csar",
+ servicedModel="").save()
+ ServicePackageModel(
+ servicePackageId="14",
+ servicedId="3",
+ servicedDesigner="3",
+ servicedVersion="3",
+ servicePackageUri="14.csar",
+ servicedModel="").save()
+ resp = self.client.get(PARSER_BASE_URL + "/service_packages")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+
+ ###############################################################
+
+ def test_service_pkg_get_one(self):
+ ServicePackageModel(
+ servicePackageId="14",
+ servicedId="2",
+ servicedDesigner="3",
+ servicedVersion="4",
+ servicePackageUri="14.csar",
+ servicedModel="").save()
+ csar = ServicePackage().get_csar(14)
+ self.assertEqual(14, csar['csarId'])
+
+ def test_service_pkg_get_one_not_found(self):
+ try:
+ ServicePackage().get_csar(1000)
+ except PackageNotFoundException as e:
+ self.assertEqual("Service package[1000] not Found.", e.args[0])
+
+ def test_api_service_pkg_get_one(self):
+ ServicePackageModel(
+ servicePackageId="14",
+ servicedId="2",
+ servicedDesigner="3",
+ servicedVersion="4",
+ servicePackageUri="14.csar",
+ servicedModel="").save()
+ resp = self.client.get(PARSER_BASE_URL + "/service_packages/14")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+
+ def test_api_service_pkg_get_one_not_found(self):
+ resp = self.client.get(PARSER_BASE_URL + "/service_packages/22")
+ self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
+ self.assertEqual(
+ {"errorMessage": "Service package[22] not Found.", 'error': 404},
+ resp.data)
+
+ ###############################################################
+
+ def test_service_pkg_normal_delete(self):
+ ServicePackageModel(servicePackageId="8", servicedId="2").save()
+ sp = ServicePackageModel.objects.filter(servicePackageId=8)
+ self.assertEqual(1, len(sp))
+ ServicePackage().delete_csar("8")
+ sp = ServicePackageModel.objects.filter(servicePackageId=8)
+ self.assertEqual(0, len(sp))
+
+ def test_service_pkg_normal_delete_not_found(self):
+ try:
+ ServicePackage().delete_csar("8000")
+ except PackageNotFoundException as e:
+ self.assertEqual("Service package[8000] not Found.", e.args[0])
+
+ def test_api_service_pkg_normal_delete(self):
+ ServicePackageModel(servicePackageId="8", servicedId="2").save()
+ resp = self.client.delete(PARSER_BASE_URL + "/service_packages/8")
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+
+ ###############################################################
+
+ @mock.patch.object(toscaparser, 'parse_sd')
+ def test_service_pkg_parser(self, mock_parse_sd):
+ ServicePackageModel(servicePackageId="8", servicedId="2").save()
+ mock_parse_sd.return_value = json.JSONEncoder().encode({"a": "b"})
+
+ inputs = []
+ ret = ServicePackage().parse_serviced("8", inputs)
+ self.assertTrue({"model": '{"c": "d"}'}, ret)
+
+ def test_service_pkg_parser_not_found(self):
+ try:
+ csar_id = "8000"
+ inputs = []
+ ServicePackage().parse_serviced(csar_id, inputs)
+ except PackageNotFoundException as e:
+ self.assertEqual("Service CSAR(8000) does not exist.", e.args[0])
+
+ def test_api_service_pkg_parser_not_found(self):
+ query_data = {
+ "csarId": "1",
+ "packageType": "Service",
+ "inputs": "string"
+ }
+ resp = self.client.post(PARSER_BASE_URL + "/parser", query_data, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
diff --git a/catalog/packages/tests/test_vnf_package.py b/catalog/packages/tests/test_vnf_package.py
new file mode 100644
index 0000000..b83268a
--- /dev/null
+++ b/catalog/packages/tests/test_vnf_package.py
@@ -0,0 +1,382 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import urllib
+import mock
+import shutil
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+
+from catalog.packages.biz.vnf_package import VnfPackage, VnfPkgUploadThread
+from catalog.packages.const import PKG_STATUS
+from catalog.packages.tests.const import vnfd_data
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.utils import toscaparser
+
+
+class MockReq():
+ def read(self):
+ return "1"
+
+ def close(self):
+ pass
+
+
+class TestVnfPackage(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+
+ def tearDown(self):
+ file_path = os.path.join(CATALOG_ROOT_PATH, "222")
+ if os.path.exists(file_path):
+ shutil.rmtree(file_path)
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_upload_vnf_pkg(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "empty.txt"), "rt")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId="222")
+ self.assertEqual("zte-hss-1.0", vnf_pkg[0].vnfdId)
+ self.assertEqual(PKG_STATUS.ONBOARDED, vnf_pkg[0].onboardingState)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+
+ def test_upload_vnf_pkg_failed(self):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "empty.txt"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ )
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ @mock.patch.object(urllib.request, 'urlopen')
+ def test_upload_nf_pkg_from_uri(self, mock_urlopen, mock_parse_vnfd):
+ vnf_pkg = VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+ req_data = {"addressInformation": "https://127.0.0.1:1234/sdc/v1/hss.csar"}
+ mock_urlopen.return_value = MockReq()
+ vnf_pkg_id = vnf_pkg.vnfPackageId
+ VnfPkgUploadThread(req_data, vnf_pkg_id).run()
+ vnf_pkg1 = VnfPackageModel.objects.filter(vnfPackageId="222")
+ self.assertEqual("zte-hss-1.0", vnf_pkg1[0].vnfdId)
+
+ def test_upload_from_uri_failed(self):
+ req_data = {"username": "123"}
+ response = self.client.post("/api/vnfpkgm/v1/vnf_packages/111/package_content/upload_from_uri", data=req_data)
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_create_vnf_pkg(self):
+ req_data = {
+ "userDefinedData": {"a": "A"}
+ }
+ response = self.client.post("/api/vnfpkgm/v1/vnf_packages", data=req_data, format="json")
+ resp_data = json.loads(response.content)
+ expect_resp_data = {
+ "id": resp_data.get("id"),
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None # TODO
+ }
+ self.assertEqual(expect_resp_data, resp_data)
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+
+ def test_query_single_vnf(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222")
+ expect_data = {
+ "id": "222",
+ "vnfdId": "zte-hss-1.0",
+ "vnfProductName": "hss",
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfdVersion": "1.0.0",
+ "checksum": {"algorithm": "111", "hash": "11"},
+ "softwareImages": None,
+ "additionalArtifacts": None,
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None
+ }
+ self.assertEqual(response.data, expect_data)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ def test_query_single_vnf_failed(self):
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_query_multiple_vnf(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="111",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages")
+ expect_data = [
+ {
+ "id": "111",
+ "vnfdId": "zte-hss-1.0",
+ "vnfProductName": "hss",
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfdVersion": "1.0.0",
+ "checksum": {"algorithm": "111", "hash": "11"},
+ "softwareImages": None,
+ "additionalArtifacts": None,
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None
+ },
+ {
+ "id": "222",
+ "vnfdId": "zte-hss-1.0",
+ "vnfProductName": "hss",
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfdVersion": "1.0.0",
+ "checksum": {"algorithm": "111", "hash": "11"},
+ "softwareImages": None,
+ "additionalArtifacts": None,
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None
+ }
+ ]
+ self.assertEqual(response.data, expect_data)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ def test_delete_single_vnf_pkg(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ response = self.client.delete("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(response.data, None)
+
+ def test_delete_when_vnf_pkg_not_exist(self):
+ response = self.client.delete("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(response.data, None)
+
+ def test_fetch_vnf_pkg(self):
+ with open("vnfPackage.csar", "wt") as fp:
+ fp.writelines("AAAABBBBCCCCDDDD")
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="ONBOARDED",
+ localFilePath="vnfPackage.csar"
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+ file_content = ''
+ for data in response.streaming_content:
+ file_content = file_content + data.decode()
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual('AAAABBBBCCCCDDDD', file_content)
+ os.remove("vnfPackage.csar")
+
+ def test_fetch_partical_vnf_pkg(self):
+ with open("vnfPackage.csar", "wt") as fp:
+ fp.writelines("AAAABBBBCCCCDDDD")
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="ONBOARDED",
+ localFilePath="vnfPackage.csar"
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content", HTTP_RANGE="4-7")
+ partial_file_content = ''
+ for data in response.streaming_content:
+ partial_file_content = partial_file_content + data.decode()
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual('BBB', partial_file_content)
+ os.remove("vnfPackage.csar")
+
+ def test_fetch_last_partical_vnf_pkg(self):
+ with open("vnfPackage.csar", "wt") as fp:
+ fp.writelines("AAAABBBBCCCCDDDD")
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="ONBOARDED",
+ localFilePath="vnfPackage.csar"
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content", HTTP_RANGE=" 4-")
+ partial_file_content = ''
+ for data in response.streaming_content:
+ partial_file_content = partial_file_content + data.decode()
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual('BBBBCCCCDDDD', partial_file_content)
+ os.remove("vnfPackage.csar")
+
+ def test_fetch_vnf_pkg_when_pkg_not_exist(self):
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_fetch_vnf_pkg_when_catch_cataloge_exception(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED",
+ localFilePath="vnfPackage.csar"
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPackage, "create_vnf_pkg")
+ def test_create_vnf_pkg_when_catch_exception(self, mock_create_vnf_pkg):
+ mock_create_vnf_pkg.side_effect = TypeError('integer type')
+ req_data = {
+ "userDefinedData": {"a": "A"}
+ }
+ response = self.client.post("/api/vnfpkgm/v1/vnf_packages", data=req_data, format="json")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPackage, "delete_vnf_pkg")
+ def test_delete_single_when_catch_exception(self, mock_delete_vnf_pkg):
+ mock_delete_vnf_pkg.side_effect = TypeError("integer type")
+ response = self.client.delete("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPackage, "query_single")
+ def test_query_single_when_catch_exception(self, mock_query_single):
+ mock_query_single.side_effect = TypeError("integer type")
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPackage, "query_multiple")
+ def test_query_multiple_when_catch_exception(self, mock_query_muitiple):
+ mock_query_muitiple.side_effect = TypeError("integer type")
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_upload_when_catch_exception(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "empty.txt"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.side_effect = TypeError("integer type")
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPkgUploadThread, 'start')
+ def test_upload_from_uri_when_catch_exception(self, mock_start):
+ req_data = {"addressInformation": "https://127.0.0.1:1234/sdc/v1/hss.csar"}
+ mock_start.side_effect = TypeError("integer type")
+ response = self.client.post("/api/vnfpkgm/v1/vnf_packages/111/package_content/upload_from_uri", data=req_data)
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPackage, 'download')
+ def test_fetch_vnf_pkg_when_catch_exception(self, mock_download):
+ mock_download.side_effect = TypeError("integer type")
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_fetch_vnf_artifact(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "resource_test.csar"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/artifacts/image")
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(response.getvalue(), b"ubuntu_16.04\n")
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_fetch_vnf_artifact_not_exists(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "resource_test.csar"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/1451/artifacts/image")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_fetch_vnf_artifact_vnf_not_exists(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "resource_test.csar"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/artifacts/image1")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
diff --git a/catalog/packages/tests/test_vnf_pkg_subscription.py b/catalog/packages/tests/test_vnf_pkg_subscription.py
new file mode 100644
index 0000000..635b137
--- /dev/null
+++ b/catalog/packages/tests/test_vnf_pkg_subscription.py
@@ -0,0 +1,183 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import uuid
+import mock
+
+from rest_framework.test import APIClient
+from django.test import TestCase
+
+from catalog.pub.database.models import VnfPkgSubscriptionModel
+from .const import vnf_subscription_data
+
+
+class TestNfPackageSubscription(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ VnfPkgSubscriptionModel.objects.filter().delete()
+ self.vnf_subscription_data = vnf_subscription_data
+
+ def tearDown(self):
+ pass
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_create_vnf_subscription(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ response = self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(
+ self.vnf_subscription_data["callbackUri"],
+ response.data["callbackUri"]
+ )
+ self.assertEqual(temp_uuid, response.data["id"])
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_duplicate_subscriptions(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ temp1_uuid = "00342b18-a5c7-11e8-998c-bf1755941f12"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.side_effect = [temp_uuid, temp1_uuid]
+ response = self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(
+ self.vnf_subscription_data["callbackUri"],
+ response.data["callbackUri"]
+ )
+ self.assertEqual(temp_uuid, response.data["id"])
+ temp_uuid = "00442b18-a5c7-11e8-998c-bf1755941f12"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ response = self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ self.assertEqual(303, response.status_code)
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_get_subscriptions(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ response = self.client.get(
+ "/api/vnfpkgm/v1/subscriptions?usageState=IN_USE",
+ format='json'
+ )
+ self.assertEqual(200, response.status_code)
+ self.assertEqual(1, len(response.data))
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_get_subscriptions_with_invalid_params(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ response = self.client.get(
+ "/api/vnfpkgm/v1/subscriptions?dummy=dummy",
+ format='json'
+ )
+ self.assertEqual(400, response.status_code)
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_get_subscription_with_id(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ response = self.client.get(
+ "/api/vnfpkgm/v1/subscriptions/%s" % temp_uuid,
+ format='json'
+ )
+ self.assertEqual(200, response.status_code)
+ self.assertEqual(temp_uuid, response.data["id"])
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_get_subscription_with_id_not_exists(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ dummy_uuid = str(uuid.uuid4())
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ response = self.client.get(
+ "/api/vnfpkgm/v1/subscriptions/%s" % dummy_uuid,
+ format='json'
+ )
+ self.assertEqual(404, response.status_code)
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_delete_subscription_with_id(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ dummy_uuid = str(uuid.uuid4())
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ self.client.get(
+ "/api/vnfpkgm/v1/subscriptions/%s" % dummy_uuid,
+ format='json'
+ )
+ response = self.client.delete("/api/vnfpkgm/v1/subscriptions/%s" % temp_uuid)
+ self.assertEqual(204, response.status_code)
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_delete_subscription_with_id_not_exists(self, mock_uuid4, mock_requests):
+ dummy_uuid = str(uuid.uuid4())
+ response = self.client.delete("/api/vnfpkgm/v1/subscriptions/%s" % dummy_uuid)
+ self.assertEqual(404, response.status_code)
diff --git a/catalog/packages/tests/test_vnfpackage.py b/catalog/packages/tests/test_vnfpackage.py
new file mode 100644
index 0000000..0d8cbad
--- /dev/null
+++ b/catalog/packages/tests/test_vnfpackage.py
@@ -0,0 +1,258 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import mock
+from rest_framework.test import APIClient
+from django.test import TestCase
+from rest_framework import status
+from catalog.packages.biz.sdc_vnf_package import NfDistributeThread, NfPkgDeleteThread
+from catalog.pub.database.models import JobStatusModel, JobModel
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.msapi import sdc
+from catalog.pub.utils import restcall, toscaparser
+from .const import vnfd_data
+
+
+class TestNfPackage(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ VnfPackageModel.objects.filter().delete()
+ JobModel.objects.filter().delete()
+ JobStatusModel.objects.filter().delete()
+ self.vnfd_data = vnfd_data
+
+ def tearDown(self):
+ pass
+
+ def assert_job_result(self, job_id, job_progress, job_detail):
+ jobs = JobStatusModel.objects.filter(
+ jobid=job_id,
+ progress=job_progress,
+ descp=job_detail)
+ self.assertEqual(1, len(jobs))
+
+ @mock.patch.object(NfDistributeThread, 'run')
+ def test_nf_pkg_distribute_normal(self, mock_run):
+ resp = self.client.post(
+ "/api/catalog/v1/vnfpackages",
+ {
+ "csarId": "1",
+ "vimIds": ["1"]
+ },
+ format='json'
+ )
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+
+ def test_nf_pkg_distribute_when_csar_already_exist(self):
+ VnfPackageModel(
+ vnfPackageId="1",
+ vnfdId="vcpe_vfw_zte_1_0"
+ ).save()
+ NfDistributeThread(
+ csar_id="1",
+ vim_ids=["1"],
+ lab_vim_id="",
+ job_id="2"
+ ).run()
+ self.assert_job_result("2", 255, "NF CSAR(1) already exists.")
+
+ @mock.patch.object(restcall, 'call_req')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_nf_pkg_distribute_when_vnfd_already_exist(self,
+ mock_parse_vnfd,
+ mock_download_artifacts,
+ mock_call_req):
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(self.vnfd_data)
+ mock_download_artifacts.return_value = "/home/hss.csar"
+ mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+ "uuid": "1",
+ "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/hss.csar"
+ }]), '200']
+ VnfPackageModel(vnfPackageId="2", vnfdId="zte-hss-1.0").save()
+ NfDistributeThread(
+ csar_id="1",
+ vim_ids=["1"],
+ lab_vim_id="",
+ job_id="2"
+ ).run()
+ self.assert_job_result("2", 255, "VNF package(zte-hss-1.0) already exists.")
+
+ @mock.patch.object(restcall, 'call_req')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_nf_pkg_distribute_successfully(self,
+ mock_parse_vnfd,
+ mock_download_artifacts,
+ mock_call_req):
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(self.vnfd_data)
+ mock_download_artifacts.return_value = "/home/hss.csar"
+ mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+ "uuid": "1",
+ "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/hss.csar"
+ }]), '200']
+ NfDistributeThread(
+ csar_id="1",
+ vim_ids=["1"],
+ lab_vim_id="",
+ job_id="4"
+ ).run()
+ self.assert_job_result("4", 100, "CSAR(1) distribute successfully.")
+
+ ###############################################################################################################
+
+ @mock.patch.object(NfPkgDeleteThread, 'run')
+ def test_nf_pkg_delete_normal(self, mock_run):
+ resp = self.client.delete("/api/catalog/v1/vnfpackages/1")
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+
+ def test_nf_pkg_normal_delete(self):
+ VnfPackageModel(
+ vnfPackageId="2",
+ vnfdId="vcpe_vfw_zte_1_0"
+ ).save()
+ NfPkgDeleteThread(
+ csar_id="2",
+ job_id="2"
+ ).run()
+ self.assert_job_result("2", 100, "Delete CSAR(2) successfully.")
+
+ def test_nf_pkg_get_all(self):
+ VnfPackageModel(
+ vnfPackageId="3",
+ vnfdId="3",
+ vnfVendor='3',
+ vnfdVersion='3',
+ vnfSoftwareVersion='',
+ vnfPackageUri='',
+ vnfdModel=''
+ ).save()
+ VnfPackageModel(
+ vnfPackageId="4",
+ vnfdId="4",
+ vnfVendor='4',
+ vnfdVersion='4',
+ vnfSoftwareVersion='',
+ vnfPackageUri='',
+ vnfdModel=''
+ ).save()
+ resp = self.client.get("/api/catalog/v1/vnfpackages")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ expect_data = [
+ {
+ "imageInfo": [],
+ "csarId": "3",
+ "packageInfo": {
+ "csarName": "",
+ "vnfdModel": "",
+ "vnfdProvider": "3",
+ "vnfdId": "3",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/3/",
+ "vnfVersion": "",
+ "vnfdVersion": "3",
+ "vnfPackageId": "3"
+ }
+ },
+ {
+ "imageInfo": [],
+ "csarId": "4",
+ "packageInfo": {
+ "csarName": "",
+ "vnfdModel": "",
+ "vnfdProvider": "4",
+ "vnfdId": "4",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/4/",
+ "vnfVersion": "",
+ "vnfdVersion": "4",
+ "vnfPackageId": "4"
+ }
+ }
+ ]
+ self.assertEqual(expect_data, resp.data)
+
+ def test_nf_pkg_get_one(self):
+ VnfPackageModel(
+ vnfPackageId="4",
+ vnfdId="4",
+ vnfVendor='4',
+ vnfdVersion='4',
+ vnfSoftwareVersion='',
+ vnfPackageUri='',
+ vnfdModel=''
+ ).save()
+
+ resp = self.client.get("/api/catalog/v1/vnfpackages/4")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ expect_data = {
+ "imageInfo": [],
+ "csarId": "4",
+ "packageInfo": {
+ "csarName": "",
+ "vnfdModel": "",
+ "vnfdProvider": "4",
+ "vnfdId": "4",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/4/",
+ "vnfVersion": "",
+ "vnfdVersion": "4",
+ "vnfPackageId": "4"
+ }
+ }
+ self.assertEqual(expect_data, resp.data)
+
+ def test_nf_pkg_get_one_failed(self):
+ VnfPackageModel(
+ vnfPackageId="4",
+ vnfdId="4",
+ vnfVendor='4',
+ vnfdVersion='4',
+ vnfSoftwareVersion='',
+ vnfPackageUri='',
+ vnfdModel=''
+ ).save()
+
+ resp = self.client.get("/api/catalog/v1/vnfpackages/2")
+ self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+ self.assertEqual({'error': 'Vnf package[2] not Found.'}, resp.data)
+
+ ###############################################################################################################
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_vnfd_parse_normal(self, mock_parse_vnfd):
+ VnfPackageModel(
+ vnfPackageId="8",
+ vnfdId="10"
+ ).save()
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode({"c": "d"})
+ req_data = {
+ "csarId": "8",
+ "inputs": []
+ }
+ resp = self.client.post(
+ "/api/catalog/v1/parservnfd",
+ req_data,
+ format='json'
+ )
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual({"model": '{"c": "d"}'}, resp.data)
+
+ def test_vnfd_parse_when_csar_not_exist(self):
+ req_data = {"csarId": "1", "inputs": []}
+ resp = self.client.post(
+ "/api/catalog/v1/parservnfd",
+ req_data,
+ format='json'
+ )
+ self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+ self.assertEqual(resp.data, {"error": "VNF CSAR(1) does not exist."})
diff --git a/catalog/packages/urls.py b/catalog/packages/urls.py
new file mode 100644
index 0000000..776e940
--- /dev/null
+++ b/catalog/packages/urls.py
@@ -0,0 +1,76 @@
+# Copyright 2017-2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import url
+
+from catalog.packages.views import vnf_package_views
+from catalog.packages.views.vnf_package_subscription_views import CreateQuerySubscriptionView,\
+ QueryTerminateSubscriptionView
+from catalog.packages.views.vnf_package_artifact_views import FetchVnfPkgmArtifactsView
+from catalog.packages.views import catalog_views, ns_descriptor_views, pnf_descriptor_views, nsdm_subscription_views
+from catalog.packages.views.health_check_views import HealthCheckView
+
+
+urlpatterns = [
+
+ # Sync package from SDC
+ url(r'^api/catalog/v1/nspackages$', catalog_views.nspackages_rc, name='nspackages_rc'),
+ url(r'^api/catalog/v1/nspackages/(?P<csarId>[0-9a-zA-Z\-\_]+)$', catalog_views.ns_rd_csar, name='nspackage_rd'),
+ url(r'^api/catalog/v1/vnfpackages$', catalog_views.nfpackages_rc, name='nfpackages_rc'),
+ url(r'^api/catalog/v1/vnfpackages/(?P<csarId>[0-9a-zA-Z\-\_]+)$', catalog_views.nf_rd_csar, name='nfpackage_rd'),
+ url(r'^api/parser/v1/service_packages$', catalog_views.servicepackages_rc, name='servicepackages_rc'),
+ url(r'^api/parser/v1/service_packages/(?P<csarId>[0-9a-zA-Z\-\_]+)$', catalog_views.service_rd_csar, name='servicepackage_rd'),
+
+ # NFV Model Parser
+ url(r'^api/parser/v1/parser$', catalog_views.model_parser, name='modelparser_rc'),
+ url(r'^api/parser/v1/parsernsd$', catalog_views.ns_model_parser, name='nsmodelparser_rc'),
+ url(r'^api/parser/v1/parservnfd$', catalog_views.vnf_model_parser, name='vnfmodelparser_rc'),
+ url(r'^api/parser/v1/parserpnfd$', pnf_descriptor_views.pnf_model_parser, name='pnfmodelparser_rc'),
+ url(r'^api/catalog/v1/parsernsd$', catalog_views.ns_model_parser, name='nsmodelparser_rc'),
+ url(r'^api/catalog/v1/parservnfd$', catalog_views.vnf_model_parser, name='vnfmodelparser_rc'),
+ url(r'^api/catalog/v1/parserpnfd$', pnf_descriptor_views.pnf_model_parser, name='pnfmodelparser_rc'),
+
+ # ETSI SOL005 NSD API
+ url(r'^api/nsd/v1/ns_descriptors$', ns_descriptor_views.ns_descriptors_rc, name='ns_descriptors_rc'),
+ url(r'^api/nsd/v1/ns_descriptors/(?P<nsdInfoId>[0-9a-zA-Z\-\_]+)$', ns_descriptor_views.ns_info_rd, name='ns_info_rd'),
+ url(r'^api/nsd/v1/ns_descriptors/(?P<nsdInfoId>[0-9a-zA-Z\-\_]+)/nsd_content$', ns_descriptor_views.nsd_content_ru, name='nsd_content_ru'),
+ url(r'^api/nsd/v1/subscriptions$', nsdm_subscription_views.nsd_subscription_rc, name='nsd_subscription_rc'),
+ url(r'^api/nsd/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', nsdm_subscription_views.nsd_subscription_rd, name='nsd_subscription_rd'),
+
+ # ETSI SOL005 PNFD
+ url(r'^api/nsd/v1/pnf_descriptors$', pnf_descriptor_views.pnf_descriptors_rc, name='pnf_descriptors_rc'),
+ url(r'^api/nsd/v1/pnf_descriptors/(?P<pnfdInfoId>[0-9a-zA-Z\-\_]+)$', pnf_descriptor_views.pnfd_info_rd, name='pnfd_info_rd'),
+ url(r'^api/nsd/v1/pnf_descriptors/(?P<pnfdInfoId>[0-9a-zA-Z\-\_]+)/pnfd_content$', pnf_descriptor_views.pnfd_content_ru, name='pnfd_content_ru'),
+
+ # ETSI SOL005&SOL003 VNF Package
+ url(r'^api/vnfpkgm/v1/vnf_packages$', vnf_package_views.vnf_packages_rc, name='vnf_packages_rc'),
+ url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)$', vnf_package_views.vnf_package_rd, name='vnf_package_rd'),
+ url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/package_content$', vnf_package_views.package_content_ru, name='package_content_ru'),
+ url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/package_content/upload_from_uri$', vnf_package_views.upload_from_uri_c, name='upload_from_uri_c'),
+
+ # ETSI SOL 005 VNF Package Management Subscription APIs
+ url(r'^api/vnfpkgm/v1/subscriptions$', CreateQuerySubscriptionView.as_view(), name='subscriptions_create_query'),
+ url(r'^api/vnfpkgm/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', QueryTerminateSubscriptionView.as_view(), name='subscriptions_query_terminate'),
+ url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/artifacts/(?P<artifactPath>[0-9a-zA-Z\-\_]+)$', FetchVnfPkgmArtifactsView.as_view(), name="fetch_vnf_artifacts"),
+ # url(r'^api/vnfpkgm/v1/subscriptions/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)$', vnf_package_subscription_views.vnf_package_subscriptions_rc, name='subscriptions_rc'),
+ # url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/vnfd$', vnfd.as_view(), name='vnfd_r'),# url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/artifacts/artifactPath$', artifacts.as_view(), name='artifacts_r'),
+
+ # url(r'^api/vnfpkgm/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', vnfpkg_subscription.as_view(), name='subscription_rd'),
+
+ # health check
+ url(r'^api/vnfpkgm/v1/health_check$', HealthCheckView.as_view()),
+ url(r'^api/nsd/v1/health_check$', HealthCheckView.as_view()),
+ url(r'^api/catalog/v1/health_check$', HealthCheckView.as_view()),
+ url(r'^api/parser/v1/health_check$', HealthCheckView.as_view()),
+]
diff --git a/catalog/packages/views/__init__.py b/catalog/packages/views/__init__.py
new file mode 100644
index 0000000..342c2a8
--- /dev/null
+++ b/catalog/packages/views/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/packages/views/catalog_views.py b/catalog/packages/views/catalog_views.py
new file mode 100644
index 0000000..6ed9fb9
--- /dev/null
+++ b/catalog/packages/views/catalog_views.py
@@ -0,0 +1,535 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import uuid
+
+from drf_yasg import openapi
+from drf_yasg.utils import no_body, swagger_auto_schema
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+from catalog.packages.biz import sdc_vnf_package, sdc_ns_package
+from catalog.packages.biz.pnf_descriptor import PnfDescriptor
+from catalog.packages.biz.sdc_service_package import ServicePackage
+from catalog.packages.serializers.catalog_serializers import InternalErrorRequestSerializer, \
+ ServicePackageDistributeRequestSerializer, ServicePackagesSerializer, ServicePackageSerializer
+from catalog.packages.serializers.catalog_serializers import NfPackageDistributeRequestSerializer
+from catalog.packages.serializers.catalog_serializers import NfPackageSerializer
+from catalog.packages.serializers.catalog_serializers import NfPackagesSerializer
+from catalog.packages.serializers.catalog_serializers import NsPackageDistributeRequestSerializer
+from catalog.packages.serializers.catalog_serializers import NsPackageDistributeResponseSerializer
+from catalog.packages.serializers.catalog_serializers import NsPackageSerializer
+from catalog.packages.serializers.catalog_serializers import NsPackagesSerializer
+from catalog.packages.serializers.catalog_serializers import ParseModelRequestSerializer
+from catalog.packages.serializers.catalog_serializers import ParseModelResponseSerializer
+from catalog.packages.serializers.catalog_serializers import PostJobResponseSerializer
+from catalog.packages.views.common import fmt_error_rsp
+from catalog.pub.exceptions import PackageNotFoundException, PackageHasExistsException
+from catalog.pub.utils.syscomm import fun_name
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="On distribute NS package",
+ request_body=NsPackageDistributeRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: NsPackageDistributeResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query NS packages",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NsPackagesSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST', 'GET'])
+def nspackages_rc(request, *args, **kwargs):
+ logger.debug("Enter %s, method is %s", fun_name(), request.method)
+ ret, normal_status, response_serializer, validation_error = None, None, None, None
+
+ if request.method == 'GET':
+ # Gets ns package list
+ ret = sdc_ns_package.ns_get_csars()
+ normal_status = status.HTTP_200_OK
+
+ if ret[0] == 0:
+ response_serializer = NsPackagesSerializer(data=ret[1])
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+ elif request.method == 'POST':
+ # Distributes the package accroding to the given csarId
+ request_serializer = NsPackageDistributeRequestSerializer(data=request.data)
+ validation_error = handleValidatonError(request_serializer, True)
+ if validation_error:
+ return validation_error
+
+ csar_id = ignore_case_get(request.data, "csarId")
+ logger.debug("csar_id is %s", csar_id)
+ ret = sdc_ns_package.ns_on_distribute(csar_id)
+ normal_status = status.HTTP_202_ACCEPTED
+
+ logger.debug("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ return Response(data=ret[1], status=normal_status)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="On distribute Nf package",
+ request_body=NfPackageDistributeRequestSerializer(),
+ responses={
+ status.HTTP_202_ACCEPTED: PostJobResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query Nf packages",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NfPackagesSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST', 'GET'])
+def nfpackages_rc(request, *args, **kwargs):
+ logger.debug(
+ "Enter %s%s, method is %s",
+ fun_name(),
+ request.data,
+ request.method)
+ ret, normal_status, response_serializer, validation_error = None, None, None, None
+ if request.method == 'GET':
+ ret = sdc_vnf_package.nf_get_csars()
+ normal_status = status.HTTP_200_OK
+ response_serializer = NfPackagesSerializer(data=ret[1])
+ elif request.method == 'POST':
+ request_serivalizer = NfPackageDistributeRequestSerializer(
+ data=request.data)
+ validation_error = handleValidatonError(
+ request_serivalizer, True)
+ if validation_error:
+ return validation_error
+
+ csar_id = ignore_case_get(request_serivalizer.data, "csarId")
+ vim_ids = ignore_case_get(request_serivalizer.data, "vimIds")
+ lab_vim_id = ignore_case_get(request_serivalizer.data, "labVimId")
+ job_id = str(uuid.uuid4())
+ sdc_vnf_package.NfDistributeThread(
+ csar_id, vim_ids, lab_vim_id, job_id).start()
+ ret = [0, {"jobId": job_id}]
+ normal_status = status.HTTP_202_ACCEPTED
+
+ response_serializer = PostJobResponseSerializer(data=ret[1])
+ logger.debug("Leave %s, Return value is %s", fun_name(), ret)
+
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+
+ return Response(data=response_serializer.data, status=normal_status)
+
+
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete one NS package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_200_OK: NsPackageDistributeResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+ 'error message',
+ openapi.Schema(
+ type=openapi.TYPE_STRING))})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query one NS package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_200_OK: NsPackageSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+ 'error message',
+ openapi.Schema(
+ type=openapi.TYPE_STRING))})
+@api_view(http_method_names=['DELETE', 'GET'])
+def ns_rd_csar(request, *args, **kwargs):
+ csar_id = ignore_case_get(kwargs, "csarId")
+ logger.info("Enter %s, method is %s, csar_id is %s",
+ fun_name(), request.method, csar_id)
+ ret, normal_status, response_serializer, validation_error = None, None, None, None
+ if request.method == 'GET':
+ ret = sdc_ns_package.ns_get_csar(csar_id)
+ normal_status = status.HTTP_200_OK
+ if ret[0] == 0:
+ response_serializer = NsPackageSerializer(data=ret[1])
+ validation_error = handleValidatonError(response_serializer, False)
+ if validation_error:
+ return validation_error
+ elif request.method == 'DELETE':
+ ret = sdc_ns_package.ns_delete_csar(csar_id)
+ normal_status = status.HTTP_200_OK
+ logger.info("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ return Response(data=ret[1], status=normal_status)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="On distribute Service package",
+ request_body=ServicePackageDistributeRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: "",
+ status.HTTP_400_BAD_REQUEST: InternalErrorRequestSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query Service packages",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: ServicePackagesSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST', 'GET'])
+def servicepackages_rc(request, *args, **kwargs):
+ logger.debug("Enter %s, method is %s", fun_name(), request.method)
+
+ if request.method == 'GET':
+ # Gets service package list
+ try:
+ csar_list = ServicePackage().get_csars()
+ response_serializer = ServicePackagesSerializer(data=csar_list)
+ validation_error = handleValidatonError(response_serializer, False)
+ if validation_error:
+ return validation_error
+ return Response(data=csar_list, status=status.HTTP_200_OK)
+ except Exception as e:
+ error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ elif request.method == 'POST':
+ # Distributes the package according to the given csarId
+ request_serializer = ServicePackageDistributeRequestSerializer(data=request.data)
+ validation_error = handleValidatonError(request_serializer, True)
+ if validation_error:
+ return validation_error
+
+ csar_id = ignore_case_get(request.data, "csarId")
+ logger.debug("csar_id is %s", csar_id)
+ try:
+ ServicePackage().on_distribute(csar_id)
+ return Response(status=status.HTTP_202_ACCEPTED)
+ except PackageHasExistsException as e:
+ error_status = status.HTTP_400_BAD_REQUEST
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ except Exception as e:
+ error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+
+
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete one Service package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_204_NO_CONTENT: "",
+ status.HTTP_404_NOT_FOUND: InternalErrorRequestSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query one Service package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_200_OK: ServicePackageSerializer,
+ status.HTTP_404_NOT_FOUND: InternalErrorRequestSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['DELETE', 'GET'])
+def service_rd_csar(request, *args, **kwargs):
+ csar_id = ignore_case_get(kwargs, "csarId")
+ logger.info("Enter %s, method is %s, csar_id is %s", fun_name(), request.method, csar_id)
+
+ if request.method == 'GET':
+ try:
+ ret = ServicePackage().get_csar(csar_id)
+ response_serializer = ServicePackageSerializer(data=ret)
+ validation_error = handleValidatonError(response_serializer, False)
+ if validation_error:
+ return validation_error
+ return Response(data=ret, status=status.HTTP_200_OK)
+ except PackageNotFoundException as e:
+ error_status = status.HTTP_404_NOT_FOUND
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ except Exception as e:
+ error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+
+ elif request.method == 'DELETE':
+ try:
+ ServicePackage().delete_csar(csar_id)
+ return Response(status=status.HTTP_204_NO_CONTENT)
+ except PackageNotFoundException as e:
+ error_status = status.HTTP_404_NOT_FOUND
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ except Exception as e:
+ error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+
+
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete one Nf package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_202_ACCEPTED: PostJobResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+ 'error message',
+ openapi.Schema(
+ type=openapi.TYPE_STRING))})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query one Nf package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_200_OK: NfPackageSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+ 'error message',
+ openapi.Schema(
+ type=openapi.TYPE_STRING))})
+@api_view(http_method_names=['DELETE', 'GET'])
+def nf_rd_csar(request, *args, **kwargs):
+ csar_id = ignore_case_get(kwargs, "csarId")
+ logger.info("Enter %s, method is %s, csar_id is %s",
+ fun_name(), request.method, csar_id)
+ ret, normal_status, response_serializer, validation_error = None, None, None, None
+
+ if request.method == 'GET':
+ ret = sdc_vnf_package.nf_get_csar(csar_id)
+ normal_status = status.HTTP_200_OK
+ response_serializer = NfPackageSerializer(data=ret[1])
+
+ elif request.method == 'DELETE':
+ job_id = str(uuid.uuid4())
+ sdc_vnf_package.NfPkgDeleteThread(csar_id, job_id).start()
+ ret = [0, {"jobId": job_id}]
+ normal_status = status.HTTP_202_ACCEPTED
+ response_serializer = PostJobResponseSerializer(data=ret[1])
+
+ logger.info("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+
+ return Response(data=response_serializer.data, status=normal_status)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Parse model(NS, Service, VNF, PNF)",
+ request_body=ParseModelRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def model_parser(request, *args, **kwargs):
+ csar_id = ignore_case_get(request.data, "csarId")
+ package_type = ignore_case_get(request.data, "packageType")
+ inputs = ignore_case_get(request.data, "inputs")
+ logger.debug(
+ "Enter %s, csar_id=%s, package_type=%s, inputs=%s",
+ fun_name(),
+ csar_id,
+ package_type,
+ inputs)
+
+ if package_type.lower().__eq__("service"):
+ try:
+ ret = ServicePackage().parse_serviced(csar_id, inputs)
+ response_serializer = ParseModelResponseSerializer(data=ret)
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+ return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+ except PackageNotFoundException as e:
+ error_status = status.HTTP_404_NOT_FOUND
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ except Exception as e:
+ error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ elif package_type.lower().__eq__("ns"):
+ ret = sdc_ns_package.parse_nsd(csar_id, inputs)
+ elif package_type.lower().__eq__("vnf"):
+ ret = sdc_vnf_package.parse_vnfd(csar_id, inputs)
+ elif package_type.lower().__eq__("pnf"):
+ ret = PnfDescriptor().parse_pnfd(csar_id, inputs)
+ else:
+ error_status = status.HTTP_400_BAD_REQUEST
+ error_message = "Invalid package type, it should be one of [VNF, PNF, NS, Service]"
+ return Response(data=fmt_error_rsp(error_message, error_status), status=error_status)
+
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ response_serializer = ParseModelResponseSerializer(data=ret[1])
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+
+ return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Parse NS model",
+ request_body=ParseModelRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def ns_model_parser(request, *args, **kwargs):
+ csar_id = ignore_case_get(request.data, "csarId")
+ inputs = ignore_case_get(request.data, "inputs")
+ logger.debug(
+ "Enter %s, csar_id=%s, inputs=%s",
+ fun_name(),
+ csar_id,
+ inputs)
+ ret = sdc_ns_package.parse_nsd(csar_id, inputs)
+ logger.info("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ response_serializer = ParseModelResponseSerializer(data=ret[1])
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+
+ return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Parse NF model",
+ request_body=ParseModelRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def vnf_model_parser(request, *args, **kwargs):
+ csar_id = ignore_case_get(request.data, "csarId")
+ inputs = ignore_case_get(request.data, "inputs")
+ logger.debug(
+ "Enter %s, csar_id=%s, inputs=%s",
+ fun_name(),
+ csar_id,
+ inputs)
+ ret = sdc_vnf_package.parse_vnfd(csar_id, inputs)
+ logger.info("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ response_serializer = ParseModelResponseSerializer(data=ret[1])
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+
+ return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+
+
+def handleValidatonError(base_serializer, is_request):
+ response = None
+
+ if not base_serializer.is_valid():
+ errormessage = base_serializer.errors
+ logger.error(errormessage)
+
+ if is_request:
+ message = 'Invalid request'
+ else:
+ message = 'Invalid response'
+ logger.error(message)
+ response = Response(
+ data={'error': errormessage},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ return response
diff --git a/catalog/packages/views/common.py b/catalog/packages/views/common.py
new file mode 100644
index 0000000..6285cb9
--- /dev/null
+++ b/catalog/packages/views/common.py
@@ -0,0 +1,123 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import traceback
+import logging
+
+from rest_framework import status
+from rest_framework.response import Response
+
+from catalog.pub.exceptions import CatalogException
+from catalog.pub.exceptions import BadRequestException
+from catalog.pub.exceptions import NsdmBadRequestException
+from catalog.pub.exceptions import PackageNotFoundException
+from catalog.pub.exceptions import ResourceNotFoundException
+from catalog.pub.exceptions import ArtifactNotFoundException
+from catalog.pub.exceptions import NsdmDuplicateSubscriptionException
+from catalog.pub.exceptions import VnfPkgDuplicateSubscriptionException
+from catalog.pub.exceptions import VnfPkgSubscriptionException
+
+logger = logging.getLogger(__name__)
+
+
+def validate_data(data, serializer):
+ serialized_data = serializer(data=data)
+ if not serialized_data.is_valid():
+ logger.error('Data validation failed.')
+ raise CatalogException(serialized_data.errors)
+ return serialized_data
+
+
+def fmt_error_rsp(error_message, status):
+ return {"errorMessage": error_message, "error": status}
+
+
+def make_error_resp(status, detail):
+ return Response(
+ data={
+ 'status': status,
+ 'detail': detail
+ },
+ status=status
+ )
+
+
+def view_safe_call_with_log(logger):
+ def view_safe_call(func):
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except NsdmDuplicateSubscriptionException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_303_SEE_OTHER
+ )
+ except VnfPkgDuplicateSubscriptionException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_303_SEE_OTHER
+ )
+ except PackageNotFoundException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_404_NOT_FOUND
+ )
+ except ResourceNotFoundException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_404_NOT_FOUND
+ )
+ except ArtifactNotFoundException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_404_NOT_FOUND
+ )
+ except BadRequestException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_400_BAD_REQUEST
+ )
+ except NsdmBadRequestException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_400_BAD_REQUEST
+ )
+ except VnfPkgSubscriptionException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR
+ )
+ except CatalogException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR
+ )
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return make_error_resp(
+ detail='Unexpected exception',
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR
+ )
+ return wrapper
+ return view_safe_call
diff --git a/catalog/packages/views/health_check_views.py b/catalog/packages/views/health_check_views.py
new file mode 100644
index 0000000..cc1a379
--- /dev/null
+++ b/catalog/packages/views/health_check_views.py
@@ -0,0 +1,31 @@
+# Copyright (c) 2019, CMCC Technologies Co., Ltd.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+logger = logging.getLogger(__name__)
+
+
+class HealthCheckView(APIView):
+ @swagger_auto_schema(
+ responses={
+ status.HTTP_200_OK: 'Active'})
+ def get(self, request, format=None):
+ logger.debug("Health check.")
+ return Response({"status": "active"})
diff --git a/catalog/packages/views/ns_descriptor_views.py b/catalog/packages/views/ns_descriptor_views.py
new file mode 100644
index 0000000..3b8c1f9
--- /dev/null
+++ b/catalog/packages/views/ns_descriptor_views.py
@@ -0,0 +1,139 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.http import StreamingHttpResponse
+from drf_yasg.utils import no_body, swagger_auto_schema
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from catalog.packages.biz.ns_descriptor import NsDescriptor
+from catalog.packages.serializers.create_nsd_info_request import CreateNsdInfoRequestSerializer
+from catalog.packages.serializers.nsd_info import NsdInfoSerializer
+from catalog.packages.serializers.nsd_infos import NsdInfosSerializer
+from catalog.packages.views.common import validate_data
+from catalog.pub.exceptions import CatalogException
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query a NSD",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NsdInfoSerializer(),
+ status.HTTP_404_NOT_FOUND: 'NSDs do not exist',
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete a NSD",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: "No content",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def ns_info_rd(request, **kwargs):
+ nsd_info_id = kwargs.get("nsdInfoId")
+ if request.method == 'GET':
+ data = NsDescriptor().query_single(nsd_info_id)
+ nsd_info = validate_data(data, NsdInfoSerializer)
+ return Response(data=nsd_info.data, status=status.HTTP_200_OK)
+ if request.method == 'DELETE':
+ NsDescriptor().delete_single(nsd_info_id)
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Create a NSD",
+ request_body=CreateNsdInfoRequestSerializer(),
+ responses={
+ status.HTTP_201_CREATED: NsdInfoSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query multiple NSDs",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NsdInfosSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['POST', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def ns_descriptors_rc(request):
+ if request.method == 'POST':
+ create_nsd_info_request = validate_data(request.data, CreateNsdInfoRequestSerializer)
+ data = NsDescriptor().create(create_nsd_info_request.data)
+ validate_data(data, NsdInfoSerializer)
+ return Response(data=data, status=status.HTTP_201_CREATED)
+
+ if request.method == 'GET':
+ nsdId = request.query_params.get("nsdId", None)
+ data = NsDescriptor().query_multiple(nsdId)
+ validate_data(data, NsdInfosSerializer)
+ return Response(data=data, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+ method='PUT',
+ operation_description="Upload NSD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: 'PNFD file',
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Download NSD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: "No content",
+ status.HTTP_404_NOT_FOUND: 'NSD does not exist.',
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['PUT', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def nsd_content_ru(request, **kwargs):
+ nsd_info_id = kwargs.get("nsdInfoId")
+ if request.method == 'PUT':
+ files = request.FILES.getlist('file')
+ try:
+ local_file_name = NsDescriptor().upload(nsd_info_id, files[0])
+ NsDescriptor().parse_nsd_and_save(nsd_info_id, local_file_name)
+ return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+ except CatalogException as e:
+ NsDescriptor().handle_upload_failed(nsd_info_id)
+ raise e
+ except Exception as e:
+ NsDescriptor().handle_upload_failed(nsd_info_id)
+ raise e
+
+ if request.method == 'GET':
+ file_range = request.META.get('HTTP_RANGE')
+ file_iterator = NsDescriptor().download(nsd_info_id, file_range)
+ return StreamingHttpResponse(file_iterator, status=status.HTTP_200_OK)
diff --git a/catalog/packages/views/nsdm_subscription_views.py b/catalog/packages/views/nsdm_subscription_views.py
new file mode 100644
index 0000000..5e6394e
--- /dev/null
+++ b/catalog/packages/views/nsdm_subscription_views.py
@@ -0,0 +1,127 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from drf_yasg.utils import swagger_auto_schema, no_body
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from catalog.packages.serializers.nsdm_filter_data import NsdmNotificationsFilter
+from catalog.packages.serializers.nsdm_subscription import NsdmSubscriptionsSerializer
+from catalog.packages.serializers.nsdm_subscription import NsdmSubscriptionIdSerializer
+from catalog.packages.serializers.nsdm_subscription import NsdmSubscriptionSerializer
+from catalog.packages.serializers.nsdm_subscription import NsdmSubscriptionRequestSerializer
+from catalog.packages.serializers.response import ProblemDetailsSerializer
+
+from catalog.pub.exceptions import NsdmBadRequestException
+from catalog.packages.biz.nsdm_subscription import NsdmSubscription
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+def validate_data(data, serializer):
+ serialized_data = serializer(data=data)
+ if not serialized_data.is_valid():
+ logger.error('Data validation failed.')
+ raise NsdmBadRequestException(serialized_data.errors)
+ return serialized_data
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Create Subscription for NSD Management",
+ request_body=NsdmSubscriptionRequestSerializer(),
+ responses={
+ status.HTTP_201_CREATED: NsdmSubscriptionSerializer,
+ status.HTTP_303_SEE_OTHER: ProblemDetailsSerializer(),
+ status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query subscriptions for Nsd Management",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NsdmSubscriptionsSerializer(),
+ status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer(),
+ }
+)
+@api_view(http_method_names=['POST', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def nsd_subscription_rc(request):
+ if request.method == 'POST':
+ logger.debug("SubscribeNotification--post::> %s" % request.data)
+ nsdm_subscription_request = \
+ validate_data(request.data,
+ NsdmSubscriptionRequestSerializer)
+ subscription = NsdmSubscription().create(
+ nsdm_subscription_request.data)
+ validate_data(subscription, NsdmSubscriptionSerializer)
+ return Response(data=subscription, status=status.HTTP_201_CREATED)
+
+ if request.method == 'GET':
+ logger.debug("Subscription Notification GET %s" % request.query_params)
+ request_query_params = {}
+ if request.query_params:
+ request_query_params = \
+ validate_data(request.query_params,
+ NsdmNotificationsFilter).data
+ subscription_data = \
+ NsdmSubscription().query_multi_subscriptions(
+ request_query_params)
+ subscriptions = validate_data(subscription_data,
+ NsdmSubscriptionsSerializer)
+ return Response(data=subscriptions.data, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query subscriptions for Nsd Management",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NsdmSubscriptionSerializer(),
+ status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete subscription for Nsd Management",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: 'No_Content',
+ status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def nsd_subscription_rd(request, **kwargs):
+ subscription_id = kwargs.get("subscriptionId")
+ validate_data({'subscription_id': subscription_id}, NsdmSubscriptionIdSerializer)
+ if request.method == 'GET':
+ subscription_data = NsdmSubscription().query_single_subscription(subscription_id)
+ subscription = validate_data(subscription_data, NsdmSubscriptionSerializer)
+ return Response(data=subscription.data, status=status.HTTP_200_OK)
+ elif request.method == 'DELETE':
+ subscription_data = NsdmSubscription().delete_single_subscription(subscription_id)
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/catalog/packages/views/pnf_descriptor_views.py b/catalog/packages/views/pnf_descriptor_views.py
new file mode 100644
index 0000000..9120980
--- /dev/null
+++ b/catalog/packages/views/pnf_descriptor_views.py
@@ -0,0 +1,166 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.http import StreamingHttpResponse
+from drf_yasg.utils import no_body, swagger_auto_schema
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from catalog.packages.biz.pnf_descriptor import PnfDescriptor
+from catalog.packages.serializers.create_pnfd_info_request import CreatePnfdInfoRequestSerializer
+from catalog.packages.serializers.pnfd_info import PnfdInfoSerializer
+from catalog.packages.serializers.pnfd_infos import PnfdInfosSerializer
+from catalog.packages.views.common import validate_data
+from catalog.packages.serializers.catalog_serializers import ParseModelRequestSerializer
+from catalog.packages.serializers.catalog_serializers import ParseModelResponseSerializer
+from catalog.packages.serializers.catalog_serializers import InternalErrorRequestSerializer
+from catalog.packages.serializers.response import ProblemDetailsSerializer
+from catalog.pub.utils.syscomm import fun_name
+from catalog.pub.utils.values import ignore_case_get
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query a PNFD",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: PnfdInfoSerializer(),
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete a PNFD",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: "No content",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def pnfd_info_rd(request, **kwargs): # TODO
+ pnfd_info_id = kwargs.get('pnfdInfoId')
+ if request.method == 'GET':
+ logger.debug("Query an individual PNF descriptor> %s" % request.data)
+ data = PnfDescriptor().query_single(pnfd_info_id)
+ pnfd_info = validate_data(data, PnfdInfoSerializer)
+ return Response(data=pnfd_info.data, status=status.HTTP_200_OK)
+
+ if request.method == 'DELETE':
+ logger.debug("Delete an individual PNFD resource> %s" % request.data)
+ PnfDescriptor().delete_single(pnfd_info_id)
+ return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Create a PNFD",
+ request_body=CreatePnfdInfoRequestSerializer(),
+ responses={
+ status.HTTP_201_CREATED: PnfdInfoSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query multiple PNFDs",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: PnfdInfosSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@api_view(http_method_names=['POST', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def pnf_descriptors_rc(request):
+ if request.method == 'POST':
+ create_pnfd_info_request = validate_data(request.data, CreatePnfdInfoRequestSerializer)
+ data = PnfDescriptor().create(create_pnfd_info_request.data)
+ validate_data(data, PnfdInfoSerializer)
+ return Response(data=data, status=status.HTTP_201_CREATED)
+
+ if request.method == 'GET':
+ data = PnfDescriptor().query_multiple(request)
+ validate_data(data, PnfdInfosSerializer)
+ return Response(data=data, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+ method='PUT',
+ operation_description="Upload PNFD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: "No content",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Fetch PNFD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: 'PNFD file',
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@api_view(http_method_names=['PUT', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def pnfd_content_ru(request, **kwargs):
+ pnfd_info_id = kwargs.get("pnfdInfoId")
+ if request.method == 'PUT':
+ files = request.FILES.getlist('file')
+ try:
+ local_file_name = PnfDescriptor().upload(files[0], pnfd_info_id)
+ PnfDescriptor().parse_pnfd_and_save(pnfd_info_id, local_file_name)
+ return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+ except Exception as e:
+ PnfDescriptor().handle_upload_failed(pnfd_info_id)
+ raise e
+
+ if request.method == 'GET':
+ file_iterator = PnfDescriptor().download(pnfd_info_id)
+ return StreamingHttpResponse(file_iterator, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Parse PNF model",
+ request_body=ParseModelRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def pnf_model_parser(request, *args, **kwargs):
+ csar_id = ignore_case_get(request.data, "csarId")
+ inputs = ignore_case_get(request.data, "inputs")
+ logger.debug(
+ "Enter %s, csar_id=%s, inputs=%s",
+ fun_name(),
+ csar_id,
+ inputs)
+ ret = PnfDescriptor().parse_pnfd(csar_id, inputs)
+ logger.info("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(data={'error': ret[1]}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ response = validate_data(ret[1], ParseModelResponseSerializer)
+ return Response(data=response.data, status=status.HTTP_202_ACCEPTED)
diff --git a/catalog/packages/views/vnf_package_artifact_views.py b/catalog/packages/views/vnf_package_artifact_views.py
new file mode 100644
index 0000000..0de9682
--- /dev/null
+++ b/catalog/packages/views/vnf_package_artifact_views.py
@@ -0,0 +1,54 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.views import APIView
+from django.http import FileResponse
+
+from catalog.packages.serializers.response import ProblemDetailsSerializer
+from catalog.packages.biz.vnf_pkg_artifacts import FetchVnfPkgArtifact
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+VALID_FILTERS = [
+ "callbackUri",
+ "notificationTypes",
+ "vnfdId",
+ "vnfPkgId",
+ "operationalState",
+ "usageState"
+]
+
+
+class FetchVnfPkgmArtifactsView(APIView):
+
+ @swagger_auto_schema(
+ responses={
+ status.HTTP_200_OK: "HTTP_200_OK",
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+ )
+ @view_safe_call_with_log(logger=logger)
+ def get(self, request, vnfPkgId, artifactPath):
+ logger.debug("FetchVnfPkgmArtifactsView--get::> ")
+
+ resp_data = FetchVnfPkgArtifact().fetch(vnfPkgId, artifactPath)
+ response = FileResponse(resp_data)
+
+ return response
diff --git a/catalog/packages/views/vnf_package_subscription_views.py b/catalog/packages/views/vnf_package_subscription_views.py
new file mode 100644
index 0000000..32904e3
--- /dev/null
+++ b/catalog/packages/views/vnf_package_subscription_views.py
@@ -0,0 +1,120 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.views import APIView
+from rest_framework.response import Response
+
+from catalog.packages.serializers.vnf_pkg_subscription import PkgmSubscriptionRequestSerializer
+from catalog.packages.serializers.vnf_pkg_subscription import PkgmSubscriptionSerializer
+from catalog.packages.serializers.vnf_pkg_subscription import PkgmSubscriptionsSerializer
+from catalog.packages.serializers.response import ProblemDetailsSerializer
+from catalog.packages.biz.vnf_pkg_subscription import CreateSubscription
+from catalog.packages.biz.vnf_pkg_subscription import QuerySubscription
+from catalog.packages.biz.vnf_pkg_subscription import TerminateSubscription
+from catalog.packages.views.common import validate_data
+from catalog.pub.exceptions import VnfPkgSubscriptionException
+from catalog.pub.exceptions import BadRequestException
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+VALID_FILTERS = [
+ "callbackUri",
+ "notificationTypes",
+ "vnfdId",
+ "vnfPkgId",
+ "operationalState",
+ "usageState"
+]
+
+
+class CreateQuerySubscriptionView(APIView):
+
+ @swagger_auto_schema(
+ request_body=PkgmSubscriptionRequestSerializer,
+ responses={
+ status.HTTP_201_CREATED: PkgmSubscriptionSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+ )
+ @view_safe_call_with_log(logger=logger)
+ def post(self, request):
+ logger.debug("Create VNF package Subscription> %s" % request.data)
+
+ vnf_pkg_subscription_request = validate_data(request.data, PkgmSubscriptionRequestSerializer)
+ data = CreateSubscription(vnf_pkg_subscription_request.data).do_biz()
+ subscription_info = validate_data(data, PkgmSubscriptionSerializer)
+ return Response(data=subscription_info.data, status=status.HTTP_201_CREATED)
+
+ @swagger_auto_schema(
+ responses={
+ status.HTTP_200_OK: PkgmSubscriptionSerializer(),
+ status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+ )
+ @view_safe_call_with_log(logger=logger)
+ def get(self, request):
+ logger.debug("SubscribeNotification--get::> %s" % request.query_params)
+
+ if request.query_params and not set(request.query_params).issubset(set(VALID_FILTERS)):
+ raise BadRequestException("Not a valid filter")
+
+ resp_data = QuerySubscription().query_multi_subscriptions(request.query_params)
+
+ subscriptions_serializer = PkgmSubscriptionsSerializer(data=resp_data)
+ if not subscriptions_serializer.is_valid():
+ raise VnfPkgSubscriptionException(subscriptions_serializer.errors)
+
+ return Response(data=subscriptions_serializer.data, status=status.HTTP_200_OK)
+
+
+class QueryTerminateSubscriptionView(APIView):
+
+ @swagger_auto_schema(
+ responses={
+ status.HTTP_200_OK: PkgmSubscriptionSerializer(),
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+ )
+ @view_safe_call_with_log(logger=logger)
+ def get(self, request, subscriptionId):
+ logger.debug("SubscribeNotification--get::> %s" % subscriptionId)
+
+ resp_data = QuerySubscription().query_single_subscription(subscriptionId)
+
+ subscription_serializer = PkgmSubscriptionSerializer(data=resp_data)
+ if not subscription_serializer.is_valid():
+ raise VnfPkgSubscriptionException(subscription_serializer.errors)
+
+ return Response(data=subscription_serializer.data, status=status.HTTP_200_OK)
+
+ @swagger_auto_schema(
+ responses={
+ status.HTTP_204_NO_CONTENT: "",
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+ )
+ @view_safe_call_with_log(logger=logger)
+ def delete(self, request, subscriptionId):
+ logger.debug("SubscribeNotification--get::> %s" % subscriptionId)
+
+ TerminateSubscription().terminate(subscriptionId)
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/catalog/packages/views/vnf_package_views.py b/catalog/packages/views/vnf_package_views.py
new file mode 100644
index 0000000..9fc143b
--- /dev/null
+++ b/catalog/packages/views/vnf_package_views.py
@@ -0,0 +1,168 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.http import StreamingHttpResponse
+from drf_yasg.utils import swagger_auto_schema, no_body
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from catalog.packages.serializers.upload_vnf_pkg_from_uri_req import UploadVnfPackageFromUriRequestSerializer
+from catalog.packages.serializers.create_vnf_pkg_info_req import CreateVnfPkgInfoRequestSerializer
+from catalog.packages.serializers.vnf_pkg_info import VnfPkgInfoSerializer
+from catalog.packages.serializers.vnf_pkg_infos import VnfPkgInfosSerializer
+from catalog.packages.biz.vnf_package import VnfPackage
+from catalog.packages.biz.vnf_package import VnfPkgUploadThread
+from catalog.packages.biz.vnf_package import parse_vnfd_and_save
+from catalog.packages.biz.vnf_package import handle_upload_failed
+from .common import validate_data
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+ method="GET",
+ operation_description="Query multiple VNF package resource",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: VnfPkgInfosSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method="POST",
+ operation_description="Create an individual VNF package resource",
+ request_body=CreateVnfPkgInfoRequestSerializer,
+ responses={
+ status.HTTP_201_CREATED: VnfPkgInfoSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=["GET", "POST"])
+@view_safe_call_with_log(logger=logger)
+def vnf_packages_rc(request):
+ if request.method == 'GET':
+ logger.debug("Query VNF packages> %s" % request.data)
+ data = VnfPackage().query_multiple()
+ validate_data(data, VnfPkgInfosSerializer)
+ return Response(data=data, status=status.HTTP_200_OK)
+
+ if request.method == 'POST':
+ logger.debug("Create VNF package> %s" % request.data)
+ create_vnf_pkg_info_request = validate_data(request.data,
+ CreateVnfPkgInfoRequestSerializer)
+ data = VnfPackage().create_vnf_pkg(create_vnf_pkg_info_request.data)
+ validate_data(data, VnfPkgInfoSerializer)
+ return Response(data=data, status=status.HTTP_201_CREATED)
+
+
+@swagger_auto_schema(
+ method='PUT',
+ operation_description="Upload VNF package content",
+ request_body=no_body,
+ responses={
+ status.HTTP_202_ACCEPTED: "Successfully",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method="GET",
+ operation_description="Fetch VNF package content",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: VnfPkgInfosSerializer(),
+ status.HTTP_404_NOT_FOUND: "VNF package does not exist",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=["PUT", "GET"])
+@view_safe_call_with_log(logger=logger)
+def package_content_ru(request, **kwargs):
+ vnf_pkg_id = kwargs.get("vnfPkgId")
+ if request.method == "PUT":
+ logger.debug("Upload VNF package %s" % vnf_pkg_id)
+ files = request.FILES.getlist('file')
+ try:
+ local_file_name = VnfPackage().upload(vnf_pkg_id, files[0])
+ parse_vnfd_and_save(vnf_pkg_id, local_file_name)
+ return Response(None, status=status.HTTP_202_ACCEPTED)
+ except Exception as e:
+ handle_upload_failed(vnf_pkg_id)
+ raise e
+
+ if request.method == "GET":
+ file_range = request.META.get('HTTP_RANGE')
+ file_iterator = VnfPackage().download(vnf_pkg_id, file_range)
+ return StreamingHttpResponse(file_iterator, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Upload VNF package content from uri",
+ request_body=UploadVnfPackageFromUriRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: "Successfully",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['POST'])
+@view_safe_call_with_log(logger=logger)
+def upload_from_uri_c(request, **kwargs):
+ vnf_pkg_id = kwargs.get("vnfPkgId")
+ try:
+ upload_vnf_from_uri_request = validate_data(request.data,
+ UploadVnfPackageFromUriRequestSerializer)
+ VnfPkgUploadThread(upload_vnf_from_uri_request.data, vnf_pkg_id).start()
+ return Response(None, status=status.HTTP_202_ACCEPTED)
+ except Exception as e:
+ handle_upload_failed(vnf_pkg_id)
+ raise e
+
+
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query an individual VNF package resource",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: VnfPkgInfoSerializer(),
+ status.HTTP_404_NOT_FOUND: "VNF package does not exist",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete an individual VNF package resource",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: "No content",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def vnf_package_rd(request, **kwargs):
+ vnf_pkg_id = kwargs.get("vnfPkgId")
+ if request.method == 'GET':
+ logger.debug("Query an individual VNF package> %s" % request.data)
+ data = VnfPackage().query_single(vnf_pkg_id)
+ validate_data(data, VnfPkgInfoSerializer)
+ return Response(data=data, status=status.HTTP_200_OK)
+
+ if request.method == 'DELETE':
+ logger.debug("Delete an individual VNF package> %s" % request.data)
+ VnfPackage().delete_vnf_pkg(vnf_pkg_id)
+ return Response(data=None, status=status.HTTP_204_NO_CONTENT)