aboutsummaryrefslogtreecommitdiffstats
path: root/genericparser/packages/biz
diff options
context:
space:
mode:
Diffstat (limited to 'genericparser/packages/biz')
-rw-r--r--genericparser/packages/biz/__init__.py13
-rw-r--r--genericparser/packages/biz/common.py51
-rw-r--r--genericparser/packages/biz/ns_descriptor.py239
-rw-r--r--genericparser/packages/biz/nsdm_subscription.py219
-rw-r--r--genericparser/packages/biz/pnf_descriptor.py227
-rw-r--r--genericparser/packages/biz/sdc_ns_package.py172
-rw-r--r--genericparser/packages/biz/sdc_service_package.py114
-rw-r--r--genericparser/packages/biz/sdc_vnf_package.py254
-rw-r--r--genericparser/packages/biz/service_descriptor.py130
-rw-r--r--genericparser/packages/biz/vnf_package.py228
-rw-r--r--genericparser/packages/biz/vnf_pkg_artifacts.py40
-rw-r--r--genericparser/packages/biz/vnf_pkg_subscription.py183
12 files changed, 1870 insertions, 0 deletions
diff --git a/genericparser/packages/biz/__init__.py b/genericparser/packages/biz/__init__.py
new file mode 100644
index 0000000..342c2a8
--- /dev/null
+++ b/genericparser/packages/biz/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/packages/biz/common.py b/genericparser/packages/biz/common.py
new file mode 100644
index 0000000..be5627e
--- /dev/null
+++ b/genericparser/packages/biz/common.py
@@ -0,0 +1,51 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.utils import fileutil
+
+CHUNK_SIZE = 1024 * 8
+
+
+def save(remote_file, vnf_pkg_id):
+ local_file_name = remote_file.name
+ local_file_dir = os.path.join(GENERICPARSER_ROOT_PATH, vnf_pkg_id)
+ local_file_name = os.path.join(local_file_dir, local_file_name)
+ if not os.path.exists(local_file_dir):
+ fileutil.make_dirs(local_file_dir)
+ with open(local_file_name, 'wb') as local_file:
+ for chunk in remote_file.chunks(chunk_size=CHUNK_SIZE):
+ local_file.write(chunk)
+ return local_file_name
+
+
+def read(file_path, start, end):
+ fp = open(file_path, 'rb')
+ fp.seek(start)
+ pos = start
+ while pos + CHUNK_SIZE < end:
+ yield fp.read(CHUNK_SIZE)
+ pos = fp.tell()
+ yield fp.read(end - pos)
+
+
+def parse_file_range(file_path, file_range):
+ start, end = 0, os.path.getsize(file_path)
+ if file_range:
+ [start, end] = file_range.split('-')
+ start, end = start.strip(), end.strip()
+ start, end = int(start), int(end)
+ return start, end
diff --git a/genericparser/packages/biz/ns_descriptor.py b/genericparser/packages/biz/ns_descriptor.py
new file mode 100644
index 0000000..60d4a26
--- /dev/null
+++ b/genericparser/packages/biz/ns_descriptor.py
@@ -0,0 +1,239 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import json
+import logging
+import os
+import uuid
+
+from genericparser.packages.biz.common import parse_file_range, read, save
+from genericparser.packages.const import PKG_STATUS
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.database.models import NSPackageModel, PnfPackageModel, VnfPackageModel
+from genericparser.pub.exceptions import GenericparserException, ResourceNotFoundException
+from genericparser.pub.utils import fileutil, toscaparsers
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+METADATA = "metadata"
+
+
+class NsDescriptor(object):
+
+ def __init__(self):
+ pass
+
+ def create(self, data, id=None):
+ logger.info('Start to create a NSD...')
+ user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+ data = {
+ 'id': id if id else str(uuid.uuid4()),
+ 'nsdOnboardingState': PKG_STATUS.CREATED,
+ 'nsdOperationalState': PKG_STATUS.DISABLED,
+ 'nsdUsageState': PKG_STATUS.NOT_IN_USE,
+ 'userDefinedData': user_defined_data,
+ '_links': None # TODO
+ }
+ NSPackageModel.objects.create(
+ nsPackageId=data['id'],
+ onboardingState=data['nsdOnboardingState'],
+ operationalState=data['nsdOperationalState'],
+ usageState=data['nsdUsageState'],
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ logger.info('A NSD(%s) has been created.' % data['id'])
+ return data
+
+ def query_multiple(self, nsdId=None):
+ if nsdId:
+ ns_pkgs = NSPackageModel.objects.filter(nsdId=nsdId)
+ else:
+ ns_pkgs = NSPackageModel.objects.all()
+ response_data = []
+ for ns_pkg in ns_pkgs:
+ data = self.fill_resp_data(ns_pkg)
+ response_data.append(data)
+ return response_data
+
+ def query_single(self, nsd_info_id):
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.error('NSD(%s) does not exist.' % nsd_info_id)
+ raise ResourceNotFoundException('NSD(%s) does not exist.' % nsd_info_id)
+ return self.fill_resp_data(ns_pkgs[0])
+
+ def delete_single(self, nsd_info_id):
+ logger.info('Start to delete NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.info('NSD(%s) has been deleted.' % nsd_info_id)
+ return
+ '''
+ if ns_pkgs[0].operationalState != PKG_STATUS.DISABLED:
+ logger.error('NSD(%s) shall be DISABLED.' % nsd_info_id)
+ raise GenericparserException('NSD(%s) shall be DISABLED.' % nsd_info_id)
+ if ns_pkgs[0].usageState != PKG_STATUS.NOT_IN_USE:
+ logger.error('NSD(%s) shall be NOT_IN_USE.' % nsd_info_id)
+ raise GenericparserException('NSD(%s) shall be NOT_IN_USE.' % nsd_info_id)
+ '''
+ ns_pkgs.delete()
+ ns_pkg_path = os.path.join(GENERICPARSER_ROOT_PATH, nsd_info_id)
+ fileutil.delete_dirs(ns_pkg_path)
+ logger.info('NSD(%s) has been deleted.' % nsd_info_id)
+
+ def upload(self, nsd_info_id, remote_file):
+ logger.info('Start to upload NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.error('NSD(%s) does not exist.' % nsd_info_id)
+ raise GenericparserException('NSD(%s) does not exist.' % nsd_info_id)
+ ns_pkgs.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ local_file_name = save(remote_file, nsd_info_id)
+ logger.info('NSD(%s) content has been uploaded.' % nsd_info_id)
+ return local_file_name
+
+ def download(self, nsd_info_id, file_range):
+ logger.info('Start to download NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.error('NSD(%s) does not exist.' % nsd_info_id)
+ raise ResourceNotFoundException('NSD(%s) does not exist.' % nsd_info_id)
+ if ns_pkgs[0].onboardingState != PKG_STATUS.ONBOARDED:
+ logger.error('NSD(%s) is not ONBOARDED.' % nsd_info_id)
+ raise GenericparserException('NSD(%s) is not ONBOARDED.' % nsd_info_id)
+
+ local_file_path = ns_pkgs[0].localFilePath
+ start, end = parse_file_range(local_file_path, file_range)
+ logger.info('NSD(%s) has been downloaded.' % nsd_info_id)
+ return read(local_file_path, start, end)
+
+ def parse_nsd_and_save(self, nsd_info_id, local_file_name):
+ logger.info('Start to process NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ ns_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+
+ nsd_json = toscaparsers.parse_nsd(local_file_name)
+ logger.debug("%s", nsd_json)
+ nsd = json.JSONDecoder().decode(nsd_json)
+
+ nsd_id = nsd.get("ns", {}).get("properties", {}).get("descriptor_id", "")
+ nsd_name = nsd.get("ns", {}).get("properties", {}).get("name", "")
+ nsd_version = nsd.get("ns", {}).get("properties", {}).get("version", "")
+ nsd_designer = nsd.get("ns", {}).get("properties", {}).get("designer", "")
+ invariant_id = nsd.get("ns", {}).get("properties", {}).get("invariant_id", "")
+ if nsd_id == "":
+ raise GenericparserException("nsd_id(%s) does not exist in metadata." % nsd_id)
+ other_nspkg = NSPackageModel.objects.filter(nsdId=nsd_id)
+ if other_nspkg and other_nspkg[0].nsPackageId != nsd_info_id:
+ logger.warn("NSD(%s,%s) already exists.", nsd_id, other_nspkg[0].nsPackageId)
+ raise GenericparserException("NSD(%s) already exists." % nsd_id)
+
+ for vnf in nsd["vnfs"]:
+ vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+ if vnfd_id == "undefined":
+ vnfd_id = vnf["properties"].get("id", "undefined")
+ pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if not pkg:
+ pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+ if not pkg:
+ vnfd_name = vnf.get("vnf_id", "undefined")
+ logger.error("[%s] is not distributed.", vnfd_name)
+ raise GenericparserException("VNF package(%s) is not distributed." % vnfd_id)
+
+ for pnf in nsd["pnfs"]:
+ pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+ if pnfd_id == "undefined":
+ pnfd_id = pnf["properties"].get("id", "undefined")
+ pkg = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if not pkg:
+ pkg = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+ if not pkg:
+ pnfd_name = pnf.get("pnf_id", "undefined")
+ logger.error("[%s] is not distributed.", pnfd_name)
+ raise GenericparserException("PNF package(%s) is not distributed." % pnfd_name)
+
+ ns_pkgs.update(
+ nsdId=nsd_id,
+ nsdName=nsd_name,
+ nsdDesginer=nsd_designer,
+ nsdDescription=nsd.get("description", ""),
+ nsdVersion=nsd_version,
+ invariantId=invariant_id,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ nsPackageUri=local_file_name,
+ sdcCsarId=nsd_info_id,
+ localFilePath=local_file_name,
+ nsdModel=nsd_json
+ )
+ logger.info('NSD(%s) has been processed.' % nsd_info_id)
+
+ def fill_resp_data(self, ns_pkg):
+ data = {
+ 'id': ns_pkg.nsPackageId,
+ 'nsdId': ns_pkg.nsdId,
+ 'nsdName': ns_pkg.nsdName,
+ 'nsdVersion': ns_pkg.nsdVersion,
+ 'nsdDesigner': ns_pkg.nsdDesginer,
+ 'nsdInvariantId': ns_pkg.invariantId,
+ 'vnfPkgIds': [],
+ 'pnfdInfoIds': [], # TODO
+ 'nestedNsdInfoIds': [], # TODO
+ 'nsdOnboardingState': ns_pkg.onboardingState,
+ 'onboardingFailureDetails': None, # TODO
+ 'nsdOperationalState': ns_pkg.operationalState,
+ 'nsdUsageState': ns_pkg.usageState,
+ 'userDefinedData': {},
+ '_links': None # TODO
+ }
+
+ if ns_pkg.nsdModel:
+ nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+ vnf_pkg_ids = []
+ for vnf in nsd_model['vnfs']:
+ vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+ if vnfd_id == "undefined":
+ vnfd_id = vnf["properties"].get("id", "undefined")
+ pkgs = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if not pkgs:
+ pkgs = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+ for pkg in pkgs:
+ vnf_pkg_ids.append(pkg.vnfPackageId)
+ data['vnfPkgIds'] = vnf_pkg_ids
+
+ pnf_info_ids = []
+ for pnf in nsd_model['pnfs']:
+ pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+ if pnfd_id == "undefined":
+ pnfd_id = pnf["properties"].get("id", "undefined")
+ pkgs = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if not pkgs:
+ pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+ for pkg in pkgs:
+ pnf_info_ids.append(pkg.pnfPackageId)
+ data['pnfInfoIds'] = pnf_info_ids # TODO: need reconfirming
+
+ if ns_pkg.userDefinedData:
+ user_defined_data = json.JSONDecoder().decode(ns_pkg.userDefinedData)
+ data['userDefinedData'] = user_defined_data
+
+ return data
+
+ def handle_upload_failed(self, nsd_info_id):
+ ns_pkg = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ ns_pkg.update(onboardingState=PKG_STATUS.CREATED)
diff --git a/genericparser/packages/biz/nsdm_subscription.py b/genericparser/packages/biz/nsdm_subscription.py
new file mode 100644
index 0000000..ba74c70
--- /dev/null
+++ b/genericparser/packages/biz/nsdm_subscription.py
@@ -0,0 +1,219 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ast
+import json
+import logging
+import requests
+import uuid
+
+from collections import Counter
+
+from rest_framework import status
+
+from genericparser.packages import const
+from genericparser.pub.database.models import NsdmSubscriptionModel
+from genericparser.pub.exceptions import GenericparserException, \
+ ResourceNotFoundException, \
+ NsdmBadRequestException, NsdmDuplicateSubscriptionException
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+PARAMSBASICKEYS = ["userName", "password"]
+
+PARAMSOAUTH2CLIENTCREDENTIALSKEYS = ["clientId", "clientPassword",
+ "tokenEndpoint"]
+
+
+def is_filter_type_equal(new_filter, existing_filter):
+ return Counter(list(set(new_filter))) == Counter(existing_filter)
+
+
+class NsdmSubscription:
+
+ def __init__(self):
+ pass
+
+ def query_single_subscription(self, subscription_id):
+ logger.debug("Start Query Subscription... ")
+ subscription = \
+ NsdmSubscriptionModel.objects.filter(
+ subscriptionid=subscription_id)
+ if not subscription.exists():
+ raise ResourceNotFoundException(
+ "Subscription(%s) doesn't exists" % subscription_id)
+ logger.debug("Subscription found... ")
+ return self.fill_resp_data(subscription[0])
+
+ def delete_single_subscription(self, subscription_id):
+ logger.debug("Start Delete Subscription... ")
+ subscription = \
+ NsdmSubscriptionModel.objects.filter(
+ subscriptionid=subscription_id)
+ if not subscription.exists():
+ raise ResourceNotFoundException(
+ "Subscription(%s) doesn't exists" % subscription_id)
+ subscription.delete()
+ logger.debug("Deleted Subscription... ")
+
+ def query_multi_subscriptions(self, query_params):
+ self.params = query_params
+ query_data = {}
+ logger.debug("Start QueryMultiSubscriptions get --> "
+ "Check for filters in query params" % self.params)
+ for query, value in self.params.iteritems():
+ if query in const.NSDM_NOTIFICATION_FILTERS and value:
+ query_data[query + '__icontains'] = json.dumps(list(set(value)))
+ # Query the database with filters if the request
+ # has fields in request params, else fetch all records
+ if query_data:
+ subscriptions = NsdmSubscriptionModel.objects.filter(**query_data)
+ else:
+ subscriptions = NsdmSubscriptionModel.objects.all()
+ if not subscriptions.exists():
+ raise ResourceNotFoundException("Subscriptions doesn't exist")
+ return [self.fill_resp_data(subscription)
+ for subscription in subscriptions]
+
+ def check_callbackuri_connection(self):
+ logger.debug("Create Subscription --> Test Callback URI --"
+ "Sending GET request to %s" % self.callback_uri)
+ try:
+ response = requests.get(self.callback_uri, timeout=2)
+ if response.status_code != status.HTTP_204_NO_CONTENT:
+ raise GenericparserException("callbackUri %s returns %s status "
+ "code." % (self.callback_uri,
+ response.status_code))
+ except Exception:
+ raise GenericparserException("callbackUri %s didn't return 204 status"
+ "code." % self.callback_uri)
+
+ def fill_resp_data(self, subscription):
+ subscription_filter = dict()
+ for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+ subscription_filter[filter_type] = \
+ ast.literal_eval(subscription.__dict__[filter_type])
+ resp_data = {
+ 'id': subscription.subscriptionid,
+ 'callbackUri': subscription.callback_uri,
+ 'filter': subscription_filter,
+ '_links': json.loads(subscription.links)
+ }
+ return resp_data
+
+ def create(self, data):
+ logger.debug("Start Create Subscription... ")
+ self.filter = ignore_case_get(data, "filter", {})
+ self.callback_uri = ignore_case_get(data, "callbackUri")
+ self.authentication = ignore_case_get(data, "authentication", {})
+ self.subscription_id = str(uuid.uuid4())
+ self.check_callbackuri_connection()
+ self.check_valid_auth_info()
+ self.check_filter_types()
+ self.check_valid()
+ self.save_db()
+ subscription = \
+ NsdmSubscriptionModel.objects.get(
+ subscriptionid=self.subscription_id)
+ return self.fill_resp_data(subscription)
+
+ def check_filter_types(self):
+ # Check if both nsdId and nsdInfoId
+ # or pnfdId and pnfdInfoId are present
+ logger.debug("Create Subscription --> Validating Filters... ")
+ if self.filter and \
+ self.filter.get("nsdId", "") and \
+ self.filter.get("nsdInfoId", ""):
+ raise NsdmBadRequestException("Notification Filter should contain"
+ " either nsdId or nsdInfoId")
+ if self.filter and \
+ self.filter.get("pnfdId", "") and \
+ self.filter.get("pnfdInfoIds", ""):
+ raise NsdmBadRequestException("Notification Filter should contain"
+ " either pnfdId or pnfdInfoIds")
+
+ def check_valid_auth_info(self):
+ logger.debug("Create Subscription --> Validating Auth "
+ "details if provided... ")
+ if self.authentication.get("paramsBasic", {}) and \
+ const.BASIC not in self.authentication.get("authType", ''):
+ raise NsdmBadRequestException('Auth type should be ' + const.BASIC)
+ if self.authentication.get("paramsOauth2ClientCredentials", {}) and \
+ const.OAUTH2_CLIENT_CREDENTIALS not in \
+ self.authentication.get("authType", ''):
+ raise NsdmBadRequestException('Auth type should '
+ 'be ' + const.OAUTH2_CLIENT_CREDENTIALS)
+ if const.BASIC in self.authentication.get("authType", '') and \
+ "paramsBasic" in self.authentication.keys() and \
+ not is_filter_type_equal(PARAMSBASICKEYS,
+ self.authentication.
+ get("paramsBasic").keys()):
+ raise NsdmBadRequestException('userName and password needed '
+ 'for ' + const.BASIC)
+ if const.OAUTH2_CLIENT_CREDENTIALS in \
+ self.authentication.get("authType", '') and \
+ "paramsOauth2ClientCredentials" in \
+ self.authentication.keys() and \
+ not is_filter_type_equal(PARAMSOAUTH2CLIENTCREDENTIALSKEYS,
+ self.authentication.
+ get("paramsOauth2ClientCredentials")
+ .keys()):
+ raise NsdmBadRequestException('clientId, clientPassword and '
+ 'tokenEndpoint required '
+ 'for ' + const.OAUTH2_CLIENT_CREDENTIALS)
+
+ def check_filter_exists(self, subscription):
+ for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+ if not is_filter_type_equal(self.filter.get(filter_type, []),
+ ast.literal_eval(
+ getattr(subscription,
+ filter_type))):
+ return False
+ return True
+
+ def check_valid(self):
+ logger.debug("Create Subscription --> Checking DB if "
+ "same subscription exists already exists... ")
+ subscriptions = \
+ NsdmSubscriptionModel.objects.filter(
+ callback_uri=self.callback_uri)
+ if not subscriptions.exists():
+ return
+ for subscription in subscriptions:
+ if self.check_filter_exists(subscription):
+ raise NsdmDuplicateSubscriptionException(
+ "Already Subscription exists with the "
+ "same callbackUri and filter")
+
+ def save_db(self):
+ logger.debug("Create Subscription --> Saving the subscription "
+ "%s to the database" % self.subscription_id)
+ links = {
+ "self": {
+ "href":
+ const.NSDM_SUBSCRIPTION_ROOT_URI + self.subscription_id
+ }
+ }
+ subscription_save_db = {
+ "subscriptionid": self.subscription_id,
+ "callback_uri": self.callback_uri,
+ "auth_info": self.authentication,
+ "links": json.dumps(links)
+ }
+ for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+ subscription_save_db[filter_type] = json.dumps(
+ list(set(self.filter.get(filter_type, []))))
+ NsdmSubscriptionModel.objects.create(**subscription_save_db)
+ logger.debug('Create Subscription[%s] success', self.subscription_id)
diff --git a/genericparser/packages/biz/pnf_descriptor.py b/genericparser/packages/biz/pnf_descriptor.py
new file mode 100644
index 0000000..ae635ff
--- /dev/null
+++ b/genericparser/packages/biz/pnf_descriptor.py
@@ -0,0 +1,227 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import json
+import logging
+import os
+import uuid
+
+from genericparser.packages.biz.common import read, save
+from genericparser.packages.const import PKG_STATUS
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.database.models import NSPackageModel, PnfPackageModel
+from genericparser.pub.exceptions import GenericparserException, ResourceNotFoundException
+from genericparser.pub.utils import fileutil, toscaparsers
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+class PnfDescriptor(object):
+
+ def __init__(self):
+ pass
+
+ def create(self, data):
+ logger.info('Start to create a PNFD...')
+ user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+ data = {
+ 'id': str(uuid.uuid4()),
+ 'pnfdOnboardingState': PKG_STATUS.CREATED,
+ 'pnfdUsageState': PKG_STATUS.NOT_IN_USE,
+ 'userDefinedData': user_defined_data,
+ '_links': None # TODO
+ }
+ PnfPackageModel.objects.create(
+ pnfPackageId=data['id'],
+ onboardingState=data['pnfdOnboardingState'],
+ usageState=data['pnfdUsageState'],
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ logger.info('A PNFD(%s) has been created.' % data['id'])
+ return data
+
+ def query_multiple(self, pnfdId=None):
+ if pnfdId:
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfdId=pnfdId)
+ else:
+ pnf_pkgs = PnfPackageModel.objects.all()
+ response_data = []
+ for pnf_pkg in pnf_pkgs:
+ data = self.fill_response_data(pnf_pkg)
+ response_data.append(data)
+ return response_data
+
+ def query_single(self, pnfd_info_id):
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
+ raise ResourceNotFoundException('PNFD(%s) does not exist.' % pnfd_info_id)
+ return self.fill_response_data(pnf_pkgs[0])
+
+ def upload(self, remote_file, pnfd_info_id):
+ logger.info('Start to upload PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.info('PNFD(%s) does not exist.' % pnfd_info_id)
+ raise GenericparserException('PNFD (%s) does not exist.' % pnfd_info_id)
+ pnf_pkgs.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ local_file_name = save(remote_file, pnfd_info_id)
+ logger.info('PNFD(%s) content has been uploaded.' % pnfd_info_id)
+ return local_file_name
+
+ def delete_single(self, pnfd_info_id):
+ logger.info('Start to delete PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.info('PNFD(%s) has been deleted.' % pnfd_info_id)
+ return
+ '''
+ if pnf_pkgs[0].usageState != PKG_STATUS.NOT_IN_USE:
+ logger.info('PNFD(%s) shall be NOT_IN_USE.' % pnfd_info_id)
+ raise GenericparserException('PNFD(%s) shall be NOT_IN_USE.' % pnfd_info_id)
+ '''
+ del_pnfd_id = pnf_pkgs[0].pnfdId
+ ns_pkgs = NSPackageModel.objects.all()
+ for ns_pkg in ns_pkgs:
+ nsd_model = None
+ if ns_pkg.nsdModel:
+ nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+ if not nsd_model:
+ continue
+ for pnf in nsd_model['pnfs']:
+ if del_pnfd_id == pnf["properties"]["id"]:
+ logger.warn("PNFD(%s) is referenced in NSD", del_pnfd_id)
+ raise GenericparserException('PNFD(%s) is referenced.' % pnfd_info_id)
+ pnf_pkgs.delete()
+ pnf_pkg_path = os.path.join(GENERICPARSER_ROOT_PATH, pnfd_info_id)
+ fileutil.delete_dirs(pnf_pkg_path)
+ logger.debug('PNFD(%s) has been deleted.' % pnfd_info_id)
+
+ def download(self, pnfd_info_id):
+ logger.info('Start to download PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
+ raise ResourceNotFoundException('PNFD(%s) does not exist.' % pnfd_info_id)
+ if pnf_pkgs[0].onboardingState != PKG_STATUS.ONBOARDED:
+ logger.error('PNFD(%s) is not ONBOARDED.' % pnfd_info_id)
+ raise GenericparserException('PNFD(%s) is not ONBOARDED.' % pnfd_info_id)
+
+ local_file_path = pnf_pkgs[0].localFilePath
+ start, end = 0, os.path.getsize(local_file_path)
+ logger.info('PNFD(%s) has been downloaded.' % pnfd_info_id)
+ return read(local_file_path, start, end)
+
+ def parse_pnfd_and_save(self, pnfd_info_id, local_file_name):
+ logger.info('Start to process PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ pnf_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+ pnfd_json = toscaparsers.parse_pnfd(local_file_name)
+ pnfd = json.JSONDecoder().decode(pnfd_json)
+
+ logger.debug("pnfd_json is %s" % pnfd_json)
+ pnfd_id = ""
+ pnfdVersion = ""
+ pnfdProvider = ""
+ pnfdName = ""
+ if pnfd.get("pnf", "") != "":
+ if pnfd["pnf"].get("properties", "") != "":
+ pnfd_id = pnfd["pnf"].get("properties", "").get("descriptor_id", "")
+ pnfdVersion = pnfd["pnf"].get("properties", "").get("version", "")
+ pnfdProvider = pnfd["pnf"].get("properties", "").get("provider", "")
+ pnfdName = pnfd["pnf"].get("properties", "").get("name", "")
+ if pnfd_id == "":
+ pnfd_id = pnfd["metadata"].get("descriptor_id", "")
+ if pnfd_id == "":
+ pnfd_id = pnfd["metadata"].get("id", "")
+ if pnfd_id == "":
+ pnfd_id = pnfd["metadata"].get("UUID", "")
+ if pnfd_id == "":
+ raise GenericparserException('pnfd_id is Null.')
+
+ if pnfdVersion == "":
+ pnfdVersion = pnfd["metadata"].get("template_version", "")
+ if pnfdVersion == "":
+ pnfdVersion = pnfd["metadata"].get("version", "")
+
+ if pnfdProvider == "":
+ pnfdProvider = pnfd["metadata"].get("template_author", "")
+ if pnfdVersion == "":
+ pnfdVersion = pnfd["metadata"].get("provider", "")
+
+ if pnfdName == "":
+ pnfdName = pnfd["metadata"].get("template_name", "")
+ if pnfdVersion == "":
+ pnfdName = pnfd["metadata"].get("name", "")
+
+ if pnfd_id:
+ other_pnf = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if other_pnf and other_pnf[0].pnfPackageId != pnfd_info_id:
+ logger.info('PNFD(%s) already exists.' % pnfd_id)
+ raise GenericparserException("PNFD(%s) already exists." % pnfd_id)
+
+ pnf_pkgs.update(
+ pnfdId=pnfd_id,
+ pnfdName=pnfdName,
+ pnfdVersion=pnfdVersion,
+ pnfVendor=pnfdProvider,
+ pnfPackageUri=local_file_name,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ localFilePath=local_file_name,
+ pnfdModel=pnfd_json
+ )
+ logger.info('PNFD(%s) has been processed.' % pnfd_info_id)
+
+ def fill_response_data(self, pnf_pkg):
+ data = {
+ 'id': pnf_pkg.pnfPackageId,
+ 'pnfdId': pnf_pkg.pnfdId,
+ 'pnfdName': pnf_pkg.pnfdName,
+ 'pnfdVersion': pnf_pkg.pnfdVersion,
+ 'pnfdProvider': pnf_pkg.pnfVendor,
+ 'pnfdInvariantId': None, # TODO
+ 'pnfdOnboardingState': pnf_pkg.onboardingState,
+ 'onboardingFailureDetails': None, # TODO
+ 'pnfdUsageState': pnf_pkg.usageState,
+ 'userDefinedData': {},
+ '_links': None # TODO
+ }
+ if pnf_pkg.userDefinedData:
+ user_defined_data = json.JSONDecoder().decode(pnf_pkg.userDefinedData)
+ data['userDefinedData'] = user_defined_data
+
+ return data
+
+ def handle_upload_failed(self, pnf_pkg_id):
+ pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId=pnf_pkg_id)
+ pnf_pkg.update(onboardingState=PKG_STATUS.CREATED)
+
+ def parse_pnfd(self, csar_id, inputs):
+ ret = None
+ try:
+ pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId=csar_id)
+ if not pnf_pkg:
+ raise GenericparserException("PNF CSAR(%s) does not exist." % csar_id)
+ csar_path = pnf_pkg[0].localFilePath
+ ret = {"model": toscaparsers.parse_pnfd(csar_path, inputs)}
+ except GenericparserException as e:
+ return [1, e.message]
+ except Exception as e:
+ logger.error(e.message)
+ return [1, e.message]
+ return [0, ret]
diff --git a/genericparser/packages/biz/sdc_ns_package.py b/genericparser/packages/biz/sdc_ns_package.py
new file mode 100644
index 0000000..42346ff
--- /dev/null
+++ b/genericparser/packages/biz/sdc_ns_package.py
@@ -0,0 +1,172 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import sys
+import traceback
+
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH, GENERICPARSER_URL_PATH
+from genericparser.pub.config.config import REG_TO_MSB_REG_PARAM
+from genericparser.pub.database.models import NSPackageModel
+from genericparser.pub.exceptions import GenericparserException
+from genericparser.pub.msapi import sdc
+from genericparser.pub.utils import toscaparsers
+from genericparser.packages.biz.ns_descriptor import NsDescriptor
+from genericparser.pub.utils import fileutil
+
+logger = logging.getLogger(__name__)
+
+STATUS_SUCCESS, STATUS_FAILED = "success", "failed"
+
+METADATA = "metadata"
+
+
+def fmt_ns_pkg_rsp(status, desc, error_code="500"):
+ return [0, {"status": status, "statusDescription": desc, "errorCode": error_code}]
+
+
+def ns_on_distribute(csar_id):
+ ret = None
+ try:
+ ret = NsPackage().on_distribute(csar_id)
+ except GenericparserException as e:
+ NsPackage().delete_csar(csar_id)
+ return fmt_ns_pkg_rsp(STATUS_FAILED, e.message)
+ except:
+ logger.error(traceback.format_exc())
+ NsPackage().delete_csar(csar_id)
+ return fmt_ns_pkg_rsp(STATUS_FAILED, str(sys.exc_info()))
+ if ret[0]:
+ return fmt_ns_pkg_rsp(STATUS_FAILED, ret[1])
+ return fmt_ns_pkg_rsp(STATUS_SUCCESS, ret[1], "")
+
+
+def ns_delete_csar(csar_id):
+ ret = None
+ try:
+ ret = NsPackage().delete_csar(csar_id)
+ except GenericparserException as e:
+ return fmt_ns_pkg_rsp(STATUS_FAILED, e.message)
+ except:
+ logger.error(traceback.format_exc())
+ return fmt_ns_pkg_rsp(STATUS_FAILED, str(sys.exc_info()))
+ return fmt_ns_pkg_rsp(STATUS_SUCCESS, ret[1], "")
+
+
+def ns_get_csars():
+ ret = None
+ try:
+ ret = NsPackage().get_csars()
+ except GenericparserException as e:
+ return [1, e.message]
+ except:
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def ns_get_csar(csar_id):
+ ret = None
+ try:
+ ret = NsPackage().get_csar(csar_id)
+ except GenericparserException as e:
+ return [1, e.message]
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def parse_nsd(csar_id, inputs):
+ ret = None
+ try:
+ ns_pkg = NSPackageModel.objects.filter(nsPackageId=csar_id)
+ if not ns_pkg:
+ raise GenericparserException("NS CSAR(%s) does not exist." % csar_id)
+ csar_path = ns_pkg[0].localFilePath
+ ret = {"model": toscaparsers.parse_nsd(csar_path, inputs)}
+ except GenericparserException as e:
+ return [1, e.message]
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return [0, ret]
+
+
+class NsPackage(object):
+ """
+ Actions for sdc ns package.
+ """
+
+ def __init__(self):
+ pass
+
+ def on_distribute(self, csar_id):
+ if NSPackageModel.objects.filter(nsPackageId=csar_id):
+ return [1, "NS CSAR(%s) already exists." % csar_id]
+
+ artifact = sdc.get_artifact(sdc.ASSETTYPE_SERVICES, csar_id)
+ local_path = os.path.join(GENERICPARSER_ROOT_PATH, csar_id)
+ csar_name = "%s.csar" % artifact.get("name", csar_id)
+ local_file_name = sdc.download_artifacts(artifact["toscaModelURL"], local_path, csar_name)
+ if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+ artifact_vnf_file = fileutil.unzip_file(local_file_name, local_path, "Artifacts/Deployment/OTHER/ns.csar")
+ if os.path.exists(artifact_vnf_file):
+ local_file_name = artifact_vnf_file
+
+ data = {
+ 'userDefinedData': {}
+ }
+ nsd = NsDescriptor()
+ nsd.create(data, csar_id)
+ nsd.parse_nsd_and_save(csar_id, local_file_name)
+ return [0, "CSAR(%s) distributed successfully." % csar_id]
+
+ def delete_csar(self, csar_id):
+ nsd = NsDescriptor()
+ nsd.delete_single(csar_id)
+ return [0, "Delete CSAR(%s) successfully." % csar_id]
+
+ def get_csars(self):
+ csars = []
+ nss = NSPackageModel.objects.filter()
+ for ns in nss:
+ ret = self.get_csar(ns.nsPackageId)
+ csars.append(ret[1])
+ return [0, csars]
+
+ def get_csar(self, csar_id):
+ package_info = {}
+ csars = NSPackageModel.objects.filter(nsPackageId=csar_id)
+ if csars:
+ package_info["nsdId"] = csars[0].nsdId
+ package_info["nsPackageId"] = csars[0].nsPackageId
+ package_info["nsdProvider"] = csars[0].nsdDesginer
+ package_info["nsdVersion"] = csars[0].nsdVersion
+ package_info["csarName"] = csars[0].nsPackageUri
+ package_info["nsdModel"] = csars[0].nsdModel
+ package_info["nsdInvariantId"] = csars[0].invariantId
+ package_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+ GENERICPARSER_URL_PATH,
+ csar_id,
+ csars[0].nsPackageUri)
+ else:
+ raise GenericparserException("Ns package[%s] not Found." % csar_id)
+
+ return [0, {"csarId": csar_id, "packageInfo": package_info}]
diff --git a/genericparser/packages/biz/sdc_service_package.py b/genericparser/packages/biz/sdc_service_package.py
new file mode 100644
index 0000000..0390b5c
--- /dev/null
+++ b/genericparser/packages/biz/sdc_service_package.py
@@ -0,0 +1,114 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import traceback
+
+from coverage.xmlreport import os
+
+from genericparser.packages.biz.service_descriptor import ServiceDescriptor
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH, REG_TO_MSB_REG_PARAM, GENERICPARSER_URL_PATH
+from genericparser.pub.database.models import ServicePackageModel
+from genericparser.pub.exceptions import GenericparserException, PackageNotFoundException, \
+ PackageHasExistsException
+from genericparser.pub.msapi import sdc
+from genericparser.pub.utils import fileutil, toscaparsers
+
+logger = logging.getLogger(__name__)
+
+
+class ServicePackage(object):
+ """
+ Actions for sdc service package.
+ """
+
+ def __init__(self):
+ pass
+
+ def on_distribute(self, csar_id):
+ if ServicePackageModel.objects.filter(servicePackageId=csar_id):
+ raise PackageHasExistsException("Service CSAR(%s) already exists." % csar_id)
+
+ try:
+ artifact = sdc.get_artifact(sdc.ASSETTYPE_SERVICES, csar_id)
+ local_path = os.path.join(GENERICPARSER_ROOT_PATH, csar_id)
+ csar_name = "%s.csar" % artifact.get("name", csar_id)
+ local_file_name = sdc.download_artifacts(artifact["toscaModelURL"], local_path, csar_name)
+ if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+ fileutil.unzip_file(local_file_name, local_path, "")
+ data = {
+ 'userDefinedData': {}
+ }
+ serviced = ServiceDescriptor()
+ serviced.create(data, csar_id)
+ serviced.parse_serviced_and_save(csar_id, local_file_name)
+
+ except Exception as e:
+ logger.error(traceback.format_exc())
+ if ServicePackageModel.objects.filter(servicePackageId=csar_id):
+ ServicePackage().delete_csar(csar_id)
+ raise e
+
+ def delete_csar(self, csar_id):
+ serviced = ServiceDescriptor()
+ serviced.delete_single(csar_id)
+
+ def get_csars(self):
+ csars = []
+ packages = ServicePackageModel.objects.filter()
+ for package in packages:
+ csar = self.get_csar(package.servicePackageId)
+ csars.append(csar)
+ return csars
+
+ def get_csar(self, csar_id):
+ package_info = {}
+ csars = ServicePackageModel.objects.filter(servicePackageId=csar_id)
+ if csars:
+ package_info["servicedId"] = csars[0].servicedId
+ package_info["servicePackageId"] = csars[0].servicePackageId
+ package_info["servicedProvider"] = csars[0].servicedDesigner
+ package_info["servicedVersion"] = csars[0].servicedVersion
+ package_info["csarName"] = csars[0].servicePackageUri
+ package_info["servicedModel"] = csars[0].servicedModel
+ package_info["servicedInvariantId"] = csars[0].invariantId
+ package_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+ GENERICPARSER_URL_PATH,
+ csar_id,
+ csars[0].servicePackageUri)
+ else:
+ error_message = "Service package[%s] not Found." % csar_id
+ logger.error(error_message)
+ raise PackageNotFoundException(error_message)
+
+ return {"csarId": csar_id, "packageInfo": package_info}
+
+ def parse_serviced(self, csar_id, inputs):
+ service_pkg = ServicePackageModel.objects.filter(servicePackageId=csar_id)
+ if not service_pkg:
+ raise PackageNotFoundException("Service CSAR(%s) does not exist." % csar_id)
+
+ try:
+ csar_path = service_pkg[0].localFilePath
+ ret = {"model": toscaparsers.parse_sd(csar_path, inputs)}
+ return ret
+ except GenericparserException as e:
+ logger.error(e.message)
+ raise e
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ raise e
diff --git a/genericparser/packages/biz/sdc_vnf_package.py b/genericparser/packages/biz/sdc_vnf_package.py
new file mode 100644
index 0000000..afd219a
--- /dev/null
+++ b/genericparser/packages/biz/sdc_vnf_package.py
@@ -0,0 +1,254 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import sys
+import threading
+import traceback
+
+from genericparser.packages.const import PKG_STATUS
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH, GENERICPARSER_URL_PATH
+from genericparser.pub.config.config import REG_TO_MSB_REG_PARAM
+from genericparser.pub.database.models import VnfPackageModel
+from genericparser.pub.exceptions import GenericparserException
+from genericparser.pub.msapi import sdc
+from genericparser.pub.utils import fileutil
+from genericparser.pub.utils import toscaparsers
+from genericparser.pub.utils.jobutil import JobUtil
+
+logger = logging.getLogger(__name__)
+
+JOB_ERROR = 255
+
+
+def nf_get_csars():
+ ret = None
+ try:
+ ret = NfPackage().get_csars()
+ except GenericparserException as e:
+ return [1, e.message]
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def nf_get_csar(csar_id):
+ ret = None
+ try:
+ ret = NfPackage().get_csar(csar_id)
+ except GenericparserException as e:
+ return [1, e.message]
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def parse_vnfd(csar_id, inputs):
+ ret = None
+ try:
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=csar_id)
+ if not nf_pkg:
+ raise GenericparserException("VNF CSAR(%s) does not exist." % csar_id)
+ csar_path = nf_pkg[0].localFilePath
+ ret = {"model": toscaparsers.parse_vnfd(csar_path, inputs)}
+ except GenericparserException as e:
+ return [1, e.message]
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return [0, ret]
+
+
+class NfDistributeThread(threading.Thread):
+ """
+ Sdc NF Package Distribute
+ """
+
+ def __init__(self, csar_id, vim_ids, lab_vim_id, job_id):
+ threading.Thread.__init__(self)
+ self.csar_id = csar_id
+ self.vim_ids = vim_ids
+ self.lab_vim_id = lab_vim_id
+ self.job_id = job_id
+
+ self.csar_save_path = os.path.join(GENERICPARSER_ROOT_PATH, csar_id)
+
+ def run(self):
+ try:
+ self.on_distribute()
+ except GenericparserException as e:
+ self.rollback_distribute()
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, e.message)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+ self.rollback_distribute()
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, "Failed to distribute CSAR(%s)" % self.csar_id)
+
+ def on_distribute(self):
+ JobUtil.create_job(
+ inst_type='nf',
+ jobaction='on_distribute',
+ inst_id=self.csar_id,
+ job_id=self.job_id)
+ JobUtil.add_job_status(self.job_id, 5, "Start CSAR(%s) distribute." % self.csar_id)
+
+ if VnfPackageModel.objects.filter(vnfPackageId=self.csar_id):
+ err_msg = "NF CSAR(%s) already exists." % self.csar_id
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, err_msg)
+ return
+
+ artifact = sdc.get_artifact(sdc.ASSETTYPE_RESOURCES, self.csar_id)
+ local_path = os.path.join(GENERICPARSER_ROOT_PATH, self.csar_id)
+ csar_name = "%s.csar" % artifact.get("name", self.csar_id)
+ local_file_name = sdc.download_artifacts(artifact["toscaModelURL"], local_path, csar_name)
+ if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+ artifact_vnf_file = fileutil.unzip_file(local_file_name, local_path, "Artifacts/Deployment/OTHER/vnf.csar")
+ if os.path.exists(artifact_vnf_file):
+ local_file_name = artifact_vnf_file
+
+ vnfd_json = toscaparsers.parse_vnfd(local_file_name)
+ vnfd = json.JSONDecoder().decode(vnfd_json)
+
+ if not vnfd.get("vnf"):
+ raise GenericparserException("VNF propeties and metadata in VNF Package(id=%s) are empty." % self.csar_id)
+
+ vnfd_id = vnfd["vnf"]["properties"].get("descriptor_id", "")
+ if VnfPackageModel.objects.filter(vnfdId=vnfd_id):
+ logger.error("VNF package(%s) already exists.", vnfd_id)
+ raise GenericparserException("VNF package(%s) already exists." % vnfd_id)
+ JobUtil.add_job_status(self.job_id, 30, "Save CSAR(%s) to database." % self.csar_id)
+ vnfd_ver = vnfd["vnf"]["properties"].get("descriptor_verison", "")
+ vnf_provider = vnfd["vnf"]["properties"].get("provider", "")
+ vnf_software_version = vnfd["vnf"]["properties"].get("software_version", "")
+ vnfd_product_name = vnfd["vnf"]["properties"].get("product_name", "")
+ VnfPackageModel(
+ vnfPackageId=self.csar_id,
+ vnfdId=vnfd_id,
+ vnfVendor=vnf_provider,
+ vnfdProductName=vnfd_product_name,
+ vnfdVersion=vnfd_ver,
+ vnfSoftwareVersion=vnf_software_version,
+ vnfdModel=vnfd_json,
+ localFilePath=local_file_name,
+ vnfPackageUri=csar_name,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE
+ ).save()
+ JobUtil.add_job_status(self.job_id, 100, "CSAR(%s) distribute successfully." % self.csar_id)
+
+ def rollback_distribute(self):
+ try:
+ VnfPackageModel.objects.filter(vnfPackageId=self.csar_id).delete()
+ fileutil.delete_dirs(self.csar_save_path)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+
+
+class NfPkgDeleteThread(threading.Thread):
+ """
+ Sdc NF Package Deleting
+ """
+
+ def __init__(self, csar_id, job_id):
+ threading.Thread.__init__(self)
+ self.csar_id = csar_id
+ self.job_id = job_id
+
+ def run(self):
+ try:
+ self.delete_csar()
+ except GenericparserException as e:
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, e.message)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, "Failed to delete CSAR(%s)" % self.csar_id)
+
+ def delete_csar(self):
+ JobUtil.create_job(
+ inst_type='nf',
+ jobaction='delete',
+ inst_id=self.csar_id,
+ job_id=self.job_id)
+ JobUtil.add_job_status(self.job_id, 5, "Start to delete CSAR(%s)." % self.csar_id)
+
+ VnfPackageModel.objects.filter(vnfPackageId=self.csar_id).delete()
+
+ JobUtil.add_job_status(self.job_id, 50, "Delete local CSAR(%s) file." % self.csar_id)
+
+ csar_save_path = os.path.join(GENERICPARSER_ROOT_PATH, self.csar_id)
+ fileutil.delete_dirs(csar_save_path)
+
+ JobUtil.add_job_status(self.job_id, 100, "Delete CSAR(%s) successfully." % self.csar_id)
+
+
+class NfPackage(object):
+ """
+ Actions for sdc nf package.
+ """
+
+ def __init__(self):
+ pass
+
+ def get_csars(self):
+ csars = []
+ nf_pkgs = VnfPackageModel.objects.filter()
+ for nf_pkg in nf_pkgs:
+ ret = self.get_csar(nf_pkg.vnfPackageId)
+ csars.append(ret[1])
+ return [0, csars]
+
+ def get_csar(self, csar_id):
+ pkg_info = {}
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=csar_id)
+ if not nf_pkg:
+ nf_pkg = VnfPackageModel.objects.filter(vnfdId=csar_id)
+
+ if nf_pkg:
+ db_csar_id = nf_pkg[0].vnfPackageId
+ pkg_info["vnfdId"] = nf_pkg[0].vnfdId
+ pkg_info["vnfPackageId"] = nf_pkg[0].vnfPackageId
+ pkg_info["vnfdProvider"] = nf_pkg[0].vnfVendor
+ pkg_info["vnfdVersion"] = nf_pkg[0].vnfdVersion
+ pkg_info["vnfVersion"] = nf_pkg[0].vnfSoftwareVersion
+ pkg_info["csarName"] = nf_pkg[0].vnfPackageUri
+ pkg_info["vnfdModel"] = nf_pkg[0].vnfdModel
+ pkg_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+ GENERICPARSER_URL_PATH,
+ db_csar_id,
+ nf_pkg[0].vnfPackageUri)
+ else:
+ raise GenericparserException("Vnf package[%s] not Found." % csar_id)
+
+ csar_info = {
+ "csarId": db_csar_id,
+ "packageInfo": pkg_info,
+ "imageInfo": []
+ }
+ return [0, csar_info]
diff --git a/genericparser/packages/biz/service_descriptor.py b/genericparser/packages/biz/service_descriptor.py
new file mode 100644
index 0000000..b8cba6d
--- /dev/null
+++ b/genericparser/packages/biz/service_descriptor.py
@@ -0,0 +1,130 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import uuid
+
+
+from genericparser.packages.const import PKG_STATUS
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from genericparser.pub.exceptions import GenericparserException, PackageNotFoundException
+from genericparser.pub.utils import toscaparsers, fileutil
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+class ServiceDescriptor(object):
+ """
+ Action for Service Descriptor
+ """
+
+ def __init__(self):
+ pass
+
+ def create(self, data, csar_id=None):
+ logger.info('Start to create a ServiceD...')
+ user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+ data = {
+ 'id': csar_id if csar_id else str(uuid.uuid4()),
+ 'servicedOnboardingState': PKG_STATUS.CREATED,
+ 'servicedOperationalState': PKG_STATUS.DISABLED,
+ 'servicedUsageState': PKG_STATUS.NOT_IN_USE,
+ 'userDefinedData': user_defined_data,
+ '_links': None # TODO
+ }
+ ServicePackageModel.objects.create(
+ servicePackageId=data['id'],
+ onboardingState=data['servicedOnboardingState'],
+ operationalState=data['servicedOperationalState'],
+ usageState=data['servicedUsageState'],
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ logger.info('A ServiceD(%s) has been created.' % data['id'])
+ return data
+
+ def parse_serviced_and_save(self, serviced_info_id, local_file_name):
+ logger.info('Start to process ServiceD(%s)...' % serviced_info_id)
+ service_pkgs = ServicePackageModel.objects.filter(servicePackageId=serviced_info_id)
+ service_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+
+ serviced_json = toscaparsers.parse_sd(local_file_name)
+ serviced = json.JSONDecoder().decode(serviced_json)
+
+ serviced_id = serviced.get("service", {}).get("properties", {}).get("descriptor_id", "")
+ serviced_name = serviced.get("service", {}).get("properties", {}).get("name", "")
+ serviced_version = serviced.get("service", {}).get("properties", {}).get("version", "")
+ serviced_designer = serviced.get("service", {}).get("properties", {}).get("designer", "")
+ invariant_id = serviced.get("service", {}).get("properties", {}).get("invariant_id", "")
+ if serviced_id == "":
+ raise GenericparserException("serviced_id(%s) does not exist in metadata." % serviced_id)
+ other_nspkg = ServicePackageModel.objects.filter(servicedId=serviced_id)
+ if other_nspkg and other_nspkg[0].servicePackageId != serviced_info_id:
+ logger.warn("ServiceD(%s,%s) already exists.", serviced_id, other_nspkg[0].servicePackageId)
+ raise GenericparserException("ServiceD(%s) already exists." % serviced_id)
+
+ for vnf in serviced["vnfs"]:
+ vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+ if vnfd_id == "undefined":
+ vnfd_id = vnf["properties"].get("id", "undefined")
+ pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if not pkg:
+ pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+ if not pkg:
+ vnfd_name = vnf.get("vnf_id", "undefined")
+ logger.error("[%s] is not distributed.", vnfd_name)
+ raise GenericparserException("VNF package(%s) is not distributed." % vnfd_id)
+
+ for pnf in serviced["pnfs"]:
+ pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+ if pnfd_id == "undefined":
+ pnfd_id = pnf["properties"].get("id", "undefined")
+ pkg = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if not pkg:
+ pkg = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+ if not pkg:
+ pnfd_name = pnf.get("pnf_id", "undefined")
+ logger.error("[%s] is not distributed.", pnfd_name)
+ raise GenericparserException("PNF package(%s) is not distributed." % pnfd_name)
+
+ service_pkgs.update(
+ servicedId=serviced_id,
+ servicedName=serviced_name,
+ servicedDesigner=serviced_designer,
+ servicedDescription=serviced.get("description", ""),
+ servicedVersion=serviced_version,
+ invariantId=invariant_id,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ servicePackageUri=local_file_name,
+ sdcCsarId=serviced_info_id,
+ localFilePath=local_file_name,
+ servicedModel=serviced_json
+ )
+ logger.info('ServiceD(%s) has been processed.' % serviced_info_id)
+
+ def delete_single(self, serviced_info_id):
+ logger.info('Start to delete ServiceD(%s)...' % serviced_info_id)
+ service_pkgs = ServicePackageModel.objects.filter(servicePackageId=serviced_info_id)
+ if not service_pkgs.exists():
+ logger.warn('ServiceD(%s) not found.' % serviced_info_id)
+ raise PackageNotFoundException("Service package[%s] not Found." % serviced_info_id)
+ service_pkgs.delete()
+ service_pkg_path = os.path.join(GENERICPARSER_ROOT_PATH, serviced_info_id)
+ fileutil.delete_dirs(service_pkg_path)
+ logger.info('ServiceD(%s) has been deleted.' % serviced_info_id)
diff --git a/genericparser/packages/biz/vnf_package.py b/genericparser/packages/biz/vnf_package.py
new file mode 100644
index 0000000..b655f1f
--- /dev/null
+++ b/genericparser/packages/biz/vnf_package.py
@@ -0,0 +1,228 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import sys
+import threading
+import traceback
+import urllib2
+import uuid
+
+from genericparser.packages.biz.common import parse_file_range, read, save
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.database.models import VnfPackageModel, NSPackageModel
+from genericparser.pub.exceptions import GenericparserException, ResourceNotFoundException
+from genericparser.pub.utils.values import ignore_case_get
+from genericparser.pub.utils import fileutil, toscaparsers
+from genericparser.packages.const import PKG_STATUS
+
+
+logger = logging.getLogger(__name__)
+
+
+class VnfPackage(object):
+
+ def __init__(self):
+ pass
+
+ def create_vnf_pkg(self, data):
+ user_defined_data = ignore_case_get(data, "userDefinedData", {})
+ vnf_pkg_id = str(uuid.uuid4())
+ VnfPackageModel.objects.create(
+ vnfPackageId=vnf_pkg_id,
+ onboardingState=PKG_STATUS.CREATED,
+ operationalState=PKG_STATUS.DISABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ data = {
+ "id": vnf_pkg_id,
+ "onboardingState": PKG_STATUS.CREATED,
+ "operationalState": PKG_STATUS.DISABLED,
+ "usageState": PKG_STATUS.NOT_IN_USE,
+ "userDefinedData": user_defined_data,
+ "_links": None
+ }
+ return data
+
+ def query_multiple(self):
+ pkgs_info = []
+ nf_pkgs = VnfPackageModel.objects.filter()
+ for nf_pkg in nf_pkgs:
+ ret = fill_response_data(nf_pkg)
+ pkgs_info.append(ret)
+ return pkgs_info
+
+ def query_single(self, vnf_pkg_id):
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not nf_pkg.exists():
+ logger.error('VNF package(%s) does not exist.' % vnf_pkg_id)
+ raise ResourceNotFoundException('VNF package(%s) does not exist.' % vnf_pkg_id)
+ return fill_response_data(nf_pkg[0])
+
+ def delete_vnf_pkg(self, vnf_pkg_id):
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not vnf_pkg.exists():
+ logger.debug('VNF package(%s) has been deleted.' % vnf_pkg_id)
+ return
+ '''
+ if vnf_pkg[0].operationalState != PKG_STATUS.DISABLED:
+ raise GenericparserException("The VNF package (%s) is not disabled" % vnf_pkg_id)
+ if vnf_pkg[0].usageState != PKG_STATUS.NOT_IN_USE:
+ raise GenericparserException("The VNF package (%s) is in use" % vnf_pkg_id)
+ '''
+ del_vnfd_id = vnf_pkg[0].vnfdId
+ ns_pkgs = NSPackageModel.objects.all()
+ for ns_pkg in ns_pkgs:
+ nsd_model = None
+ if ns_pkg.nsdModel:
+ nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+ if not nsd_model:
+ continue
+ for vnf in nsd_model['vnfs']:
+ if del_vnfd_id == vnf["properties"]["descriptor_id"]:
+ raise GenericparserException('VNFD(%s) is referenced.' % del_vnfd_id)
+ vnf_pkg.delete()
+ vnf_pkg_path = os.path.join(GENERICPARSER_ROOT_PATH, vnf_pkg_id)
+ fileutil.delete_dirs(vnf_pkg_path)
+ logger.info('VNF package(%s) has been deleted.' % vnf_pkg_id)
+
+ def upload(self, vnf_pkg_id, remote_file):
+ logger.info('Start to upload VNF package(%s)...' % vnf_pkg_id)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ # if vnf_pkg[0].onboardingState != PKG_STATUS.CREATED:
+ # logger.error("VNF package(%s) is not CREATED" % vnf_pkg_id)
+ # raise GenericparserException("VNF package(%s) is not CREATED" % vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ local_file_name = save(remote_file, vnf_pkg_id)
+ logger.info('VNF package(%s) has been uploaded.' % vnf_pkg_id)
+ return local_file_name
+
+ def download(self, vnf_pkg_id, file_range):
+ logger.info('Start to download VNF package(%s)...' % vnf_pkg_id)
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not nf_pkg.exists():
+ logger.error('VNF package(%s) does not exist.' % vnf_pkg_id)
+ raise ResourceNotFoundException('VNF package(%s) does not exist.' % vnf_pkg_id)
+ if nf_pkg[0].onboardingState != PKG_STATUS.ONBOARDED:
+ raise GenericparserException("VNF package (%s) is not on-boarded" % vnf_pkg_id)
+
+ local_file_path = nf_pkg[0].localFilePath
+ start, end = parse_file_range(local_file_path, file_range)
+ logger.info('VNF package (%s) has been downloaded.' % vnf_pkg_id)
+ return read(local_file_path, start, end)
+
+
+class VnfPkgUploadThread(threading.Thread):
+ def __init__(self, data, vnf_pkg_id):
+ threading.Thread.__init__(self)
+ self.vnf_pkg_id = vnf_pkg_id
+ self.data = data
+ self.upload_file_name = None
+
+ def run(self):
+ try:
+ self.upload_vnf_pkg_from_uri()
+ parse_vnfd_and_save(self.vnf_pkg_id, self.upload_file_name)
+ except GenericparserException as e:
+ logger.error(e.message)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+
+ def upload_vnf_pkg_from_uri(self):
+ logger.info("Start to upload VNF packge(%s) from URI..." % self.vnf_pkg_id)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=self.vnf_pkg_id)
+ if vnf_pkg[0].onboardingState != PKG_STATUS.CREATED:
+ logger.error("VNF package(%s) is not CREATED" % self.vnf_pkg_id)
+ raise GenericparserException("VNF package (%s) is not created" % self.vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ uri = ignore_case_get(self.data, "addressInformation")
+ request = urllib2.Request(uri)
+ response = urllib2.urlopen(request)
+
+ local_file_dir = os.path.join(GENERICPARSER_ROOT_PATH, self.vnf_pkg_id)
+ self.upload_file_name = os.path.join(local_file_dir, os.path.basename(uri))
+ if not os.path.exists(local_file_dir):
+ fileutil.make_dirs(local_file_dir)
+ with open(self.upload_file_name, "wb") as local_file:
+ local_file.write(response.read())
+ response.close()
+ logger.info('VNF packge(%s) has been uploaded.' % self.vnf_pkg_id)
+
+
+def fill_response_data(nf_pkg):
+ pkg_info = {}
+ pkg_info["id"] = nf_pkg.vnfPackageId
+ pkg_info["vnfdId"] = nf_pkg.vnfdId
+ pkg_info["vnfProductName"] = nf_pkg.vnfdProductName
+ pkg_info["vnfSoftwareVersion"] = nf_pkg.vnfSoftwareVersion
+ pkg_info["vnfdVersion"] = nf_pkg.vnfdVersion
+ if nf_pkg.checksum:
+ pkg_info["checksum"] = json.JSONDecoder().decode(nf_pkg.checksum)
+ pkg_info["softwareImages"] = None # TODO
+ pkg_info["additionalArtifacts"] = None # TODO
+ pkg_info["onboardingState"] = nf_pkg.onboardingState
+ pkg_info["operationalState"] = nf_pkg.operationalState
+ pkg_info["usageState"] = nf_pkg.usageState
+ if nf_pkg.userDefinedData:
+ pkg_info["userDefinedData"] = json.JSONDecoder().decode(nf_pkg.userDefinedData)
+ pkg_info["_links"] = None # TODO
+ return pkg_info
+
+
+def parse_vnfd_and_save(vnf_pkg_id, vnf_pkg_path):
+ logger.info('Start to process VNF package(%s)...' % vnf_pkg_id)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.PROCESSING)
+ vnfd_json = toscaparsers.parse_vnfd(vnf_pkg_path)
+ vnfd = json.JSONDecoder().decode(vnfd_json)
+
+ if vnfd.get("vnf", "") != "":
+ vnfd_id = vnfd["vnf"]["properties"].get("descriptor_id", "")
+ other_pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if other_pkg and other_pkg[0].vnfPackageId != vnf_pkg_id:
+ logger.error("VNF package(%s,%s) already exists.", other_pkg[0].vnfPackageId, vnfd_id)
+ raise GenericparserException("VNF package(%s) already exists." % vnfd_id)
+ vnf_provider = vnfd["vnf"]["properties"].get("provider", "")
+ vnfd_ver = vnfd["vnf"]["properties"].get("descriptor_verison", "")
+ vnf_software_version = vnfd["vnf"]["properties"].get("software_version", "")
+ vnfd_product_name = vnfd["vnf"]["properties"].get("product_name", "")
+ vnf_pkg.update(
+ vnfPackageId=vnf_pkg_id,
+ vnfdId=vnfd_id,
+ vnfdProductName=vnfd_product_name,
+ vnfVendor=vnf_provider,
+ vnfdVersion=vnfd_ver,
+ vnfSoftwareVersion=vnf_software_version,
+ vnfdModel=vnfd_json,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ localFilePath=vnf_pkg_path,
+ vnfPackageUri=os.path.split(vnf_pkg_path)[-1]
+ )
+ else:
+ raise GenericparserException("VNF propeties and metadata in VNF Package(id=%s) are empty." % vnf_pkg_id)
+ logger.info('VNF package(%s) has been processed(done).' % vnf_pkg_id)
+
+
+def handle_upload_failed(vnf_pkg_id):
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.CREATED)
diff --git a/genericparser/packages/biz/vnf_pkg_artifacts.py b/genericparser/packages/biz/vnf_pkg_artifacts.py
new file mode 100644
index 0000000..f2506da
--- /dev/null
+++ b/genericparser/packages/biz/vnf_pkg_artifacts.py
@@ -0,0 +1,40 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from genericparser.pub.database.models import VnfPackageModel
+from genericparser.pub.exceptions import ResourceNotFoundException, ArtifactNotFoundException
+from genericparser.pub.utils import fileutil
+
+logger = logging.getLogger(__name__)
+
+
+class FetchVnfPkgArtifact(object):
+ def fetch(self, vnfPkgId, artifactPath):
+ logger.debug("FetchVnfPkgArtifact--get--single--artifact--biz::>"
+ "ID: %s path: %s" % (vnfPkgId, artifactPath))
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfPkgId)
+ if not vnf_pkg.exists():
+ err_msg = "NF Package (%s) doesn't exists." % vnfPkgId
+ raise ResourceNotFoundException(err_msg)
+ vnf_pkg = vnf_pkg.get()
+ local_path = vnf_pkg.localFilePath
+ if local_path.endswith(".csar") or local_path.endswith(".zip"):
+ vnf_extract_path = fileutil.unzip_csar_to_tmp(local_path)
+ artifact_path = fileutil.get_artifact_path(vnf_extract_path, artifactPath)
+ if not artifact_path:
+ raise ArtifactNotFoundException("Couldn't artifact %s" % artifactPath)
+ file_content = open(artifact_path, 'rb').read()
+ return file_content
diff --git a/genericparser/packages/biz/vnf_pkg_subscription.py b/genericparser/packages/biz/vnf_pkg_subscription.py
new file mode 100644
index 0000000..349db08
--- /dev/null
+++ b/genericparser/packages/biz/vnf_pkg_subscription.py
@@ -0,0 +1,183 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ast
+import json
+import logging
+import os
+import requests
+import uuid
+
+from collections import Counter
+from rest_framework import status
+
+from genericparser.packages import const
+from genericparser.pub.database.models import VnfPkgSubscriptionModel
+from genericparser.pub.exceptions import VnfPkgSubscriptionException,\
+ VnfPkgDuplicateSubscriptionException, SubscriptionDoesNotExistsException
+from genericparser.pub.utils.values import ignore_case_get
+
+
+logger = logging.getLogger(__name__)
+
+ROOT_FILTERS = {
+ "notificationTypes": "notification_types",
+ "vnfdId": "vnfd_id",
+ "vnfPkgId": "vnf_pkg_id",
+ "operationalState": "operation_states",
+ "usageState": "usage_states"
+}
+
+
+def is_filter_type_equal(new_filter, existing_filter):
+ return Counter(new_filter) == Counter(existing_filter)
+
+
+class CreateSubscription(object):
+
+ def __init__(self, data):
+ self.data = data
+ self.filter = ignore_case_get(self.data, "filters", {})
+ self.callback_uri = ignore_case_get(self.data, "callbackUri")
+ self.authentication = ignore_case_get(self.data, "authentication", {})
+ self.notification_types = ignore_case_get(self.filter, "notificationTypes", [])
+ self.operation_states = ignore_case_get(self.filter, "operationalState", [])
+ self.usage_states = ignore_case_get(self.filter, "usageState", [])
+ self.vnfd_id = ignore_case_get(self.filter, "vnfdId", [])
+ self.vnf_pkg_id = ignore_case_get(self.filter, "vnfPkgId", [])
+ self.vnf_products_from_provider = \
+ ignore_case_get(self.filter, "vnfProductsFromProviders", {})
+
+ def check_callbackuri_connection(self):
+ logger.debug("SubscribeNotification-post::> Sending GET request "
+ "to %s" % self.callback_uri)
+ try:
+ response = requests.get(self.callback_uri, timeout=2)
+ if response.status_code != status.HTTP_204_NO_CONTENT:
+ raise VnfPkgSubscriptionException("callbackUri %s returns %s status "
+ "code." % (self.callback_uri, response.status_code))
+ except Exception:
+ raise VnfPkgSubscriptionException("callbackUri %s didn't return 204 status"
+ "code." % self.callback_uri)
+
+ def do_biz(self):
+ self.subscription_id = str(uuid.uuid4())
+ self.check_callbackuri_connection()
+ self.check_valid_auth_info()
+ self.check_valid()
+ self.save_db()
+ subscription = VnfPkgSubscriptionModel.objects.get(subscription_id=self.subscription_id)
+ if subscription:
+ return subscription.toDict()
+
+ def check_valid_auth_info(self):
+ logger.debug("SubscribeNotification--post::> Validating Auth "
+ "details if provided")
+ if self.authentication.get("paramsBasic", {}) and \
+ const.BASIC not in self.authentication.get("authType"):
+ raise VnfPkgSubscriptionException('Auth type should be ' + const.BASIC)
+ if self.authentication.get("paramsOauth2ClientCredentials", {}) and \
+ const.OAUTH2_CLIENT_CREDENTIALS not in self.authentication.get("authType"):
+ raise VnfPkgSubscriptionException('Auth type should be ' + const.OAUTH2_CLIENT_CREDENTIALS)
+
+ def check_filter_exists(self, sub):
+ # Check the usage states, operationStates
+ for filter_type in ["operation_states", "usage_states"]:
+ if not is_filter_type_equal(getattr(self, filter_type),
+ ast.literal_eval(getattr(sub, filter_type))):
+ return False
+ # If all the above types are same then check id filters
+ for id_filter in ["vnfd_id", "vnf_pkg_id"]:
+ if not is_filter_type_equal(getattr(self, id_filter),
+ ast.literal_eval(getattr(sub, id_filter))):
+ return False
+ return True
+
+ def check_valid(self):
+ logger.debug("SubscribeNotification--post::> Checking DB if "
+ "callbackUri already exists")
+ subscriptions = VnfPkgSubscriptionModel.objects.filter(callback_uri=self.callback_uri)
+ if not subscriptions.exists():
+ return True
+ for subscription in subscriptions:
+ if self.check_filter_exists(subscription):
+ raise VnfPkgDuplicateSubscriptionException(
+ "Already Subscription (%s) exists with the "
+ "same callbackUri and filter" % subscription.subscription_id)
+ return True
+
+ def save_db(self):
+ logger.debug("SubscribeNotification--post::> Saving the subscription "
+ "%s to the database" % self.subscription_id)
+ links = {
+ "self": {
+ "href": os.path.join(const.VNFPKG_SUBSCRIPTION_ROOT_URI, self.subscription_id)
+ }
+ }
+ VnfPkgSubscriptionModel.objects.create(
+ subscription_id=self.subscription_id,
+ callback_uri=self.callback_uri,
+ notification_types=json.dumps(self.notification_types),
+ auth_info=json.dumps(self.authentication),
+ usage_states=json.dumps(self.usage_states),
+ operation_states=json.dumps(self.operation_states),
+ vnf_products_from_provider=json.dumps(self.vnf_products_from_provider),
+ vnfd_id=json.dumps(self.vnfd_id),
+ vnf_pkg_id=json.dumps(self.vnf_pkg_id),
+ links=json.dumps(links))
+ logger.debug('Create Subscription[%s] success', self.subscription_id)
+
+
+class QuerySubscription(object):
+
+ def query_multi_subscriptions(self, params):
+ query_data = {}
+ logger.debug("QuerySubscription--get--multi--subscriptions--biz::> Check "
+ "for filters in query params %s" % params)
+ for query, value in params.iteritems():
+ if query in ROOT_FILTERS:
+ query_data[ROOT_FILTERS[query] + '__icontains'] = value
+ # Query the database with filters if the request has fields in request params, else fetch all records
+ if query_data:
+ subscriptions = VnfPkgSubscriptionModel.objects.filter(**query_data)
+ else:
+ subscriptions = VnfPkgSubscriptionModel.objects.all()
+ if not subscriptions.exists():
+ return []
+ return [subscription.toDict() for subscription in subscriptions]
+
+ def query_single_subscription(self, subscription_id):
+ logger.debug("QuerySingleSubscriptions--get--single--subscription--biz::> "
+ "ID: %s" % subscription_id)
+
+ subscription = VnfPkgSubscriptionModel.objects.filter(
+ subscription_id=subscription_id)
+ if not subscription.exists():
+ raise SubscriptionDoesNotExistsException("Subscription with ID: %s "
+ "does not exists" % subscription_id)
+ return subscription[0].toDict()
+
+
+class TerminateSubscription(object):
+
+ def terminate(self, subscription_id):
+ logger.debug("TerminateSubscriptions--delete--biz::> "
+ "ID: %s" % subscription_id)
+
+ subscription = VnfPkgSubscriptionModel.objects.filter(
+ subscription_id=subscription_id)
+ if not subscription.exists():
+ raise SubscriptionDoesNotExistsException("Subscription with ID: %s "
+ "does not exists" % subscription_id)
+ subscription[0].delete()