aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--catalog/packages/biz/ns_descriptor.py191
-rw-r--r--catalog/packages/biz/nspackage.py (renamed from catalog/packages/biz/ns_package.py)0
-rw-r--r--catalog/packages/biz/pnf_descriptor.py104
-rw-r--r--catalog/packages/biz/vnf_package.py196
-rw-r--r--catalog/packages/biz/vnfpackage.py (renamed from catalog/packages/biz/nf_package.py)69
-rw-r--r--catalog/packages/serializers/pnfd_infos.py17
-rw-r--r--catalog/packages/serializers/vnf_pkg_infos.py20
-rw-r--r--catalog/packages/tests/test_ns_descriptor.py200
-rw-r--r--catalog/packages/tests/test_nsd.py73
-rw-r--r--catalog/packages/tests/test_nspackage.py (renamed from catalog/packages/tests/test_ns.py)0
-rw-r--r--catalog/packages/tests/test_pnf_descriptor.py181
-rw-r--r--catalog/packages/tests/test_pnfd.py71
-rw-r--r--catalog/packages/tests/test_vnf_package.py430
-rw-r--r--catalog/packages/tests/test_vnfpackage.py (renamed from catalog/packages/tests/test_nf.py)44
-rw-r--r--catalog/packages/urls.py22
-rw-r--r--catalog/packages/views/catalog_views.py23
-rw-r--r--catalog/packages/views/ns_descriptor_views.py49
-rw-r--r--catalog/packages/views/nsd_content_views.py71
-rw-r--r--catalog/packages/views/nsd_views.py201
-rw-r--r--catalog/packages/views/pnf_descriptor_views.py153
-rw-r--r--catalog/packages/views/pnfd_content_views.py71
-rw-r--r--catalog/packages/views/vnf_package_views.py228
-rw-r--r--catalog/packages/views/vnfpkg_views.py268
-rw-r--r--catalog/pub/database/models.py23
-rw-r--r--catalog/pub/utils/toscaparser/__init__.py4
-rwxr-xr-xdocker/build_image.sh2
-rw-r--r--pom.xml4
-rw-r--r--version.properties2
28 files changed, 1700 insertions, 1017 deletions
diff --git a/catalog/packages/biz/ns_descriptor.py b/catalog/packages/biz/ns_descriptor.py
index b9ebdf40..ff25c4b6 100644
--- a/catalog/packages/biz/ns_descriptor.py
+++ b/catalog/packages/biz/ns_descriptor.py
@@ -22,6 +22,7 @@ from catalog.pub.utils import fileutil
from catalog.pub.utils.values import ignore_case_get
from catalog.pub.database.models import NSPackageModel, VnfPackageModel
from catalog.pub.exceptions import CatalogException
+from catalog.pub.utils import toscaparser
logger = logging.getLogger(__name__)
@@ -38,6 +39,7 @@ def create(data):
}
NSPackageModel(
nsPackageId=data['id'],
+ onboardingState=data['nsdOnboardingState'],
operationalState=data['nsdOperationalState'],
usageState=data['nsdUsageState'],
userDefinedData=data['userDefinedData']
@@ -47,50 +49,11 @@ def create(data):
def query_multiple():
ns_pkgs = NSPackageModel.objects.all()
- if not ns_pkgs:
+ if not ns_pkgs.exists():
raise CatalogException('NS descriptors do not exist.')
response_data = []
for ns_pkg in ns_pkgs:
- data = {
- 'id': ns_pkg.nsPackageId,
- 'nsdId': ns_pkg.nsdId,
- 'nsdName': ns_pkg.nsdName,
- 'nsdVersion': ns_pkg.nsdVersion,
- 'nsdDesigner': ns_pkg.nsdDesginer,
- 'nsdInvariantId': None, # TODO
- 'vnfPkgIds': [],
- 'pnfdInfoIds': [], # TODO
- 'nestedNsdInfoIds': [], # TODO
- 'nsdOnboardingState': 'CREATED',
- 'onboardingFailureDetails': None, # TODO
- 'nsdOperationalState': ns_pkg.operationalState,
- 'nsdUsageState': ns_pkg.usageState,
- 'userDefinedData': {},
- '_links': None # TODO
- }
-
- if ns_pkg.nsdModel:
- data['nsdOnboardingState'] = 'ONBOARDED'
- elif ns_pkg.localFilePath: # TODO: strip()
- data['nsdOnboardingState'] = 'PROCESSING'
- elif ns_pkg.nsdId:
- data['nsdOnboardingState'] = 'UPLOADING'
- data['nsdOnboardingState'] = 'CREATED'
-
- if ns_pkg.nsdModel:
- nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
- vnf_pkg_ids = []
- for vnf in nsd_model['vnfs']:
- vnfd_id = vnf["properties"]["id"]
- pkgs = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
- for pkg in pkgs:
- vnf_pkg_ids.append(pkg.vnfPackageId)
- data['vnfPkgIds'] = vnf_pkg_ids
-
- if ns_pkg.userDefinedData:
- user_defined_data = json.JSONDecoder().decode(ns_pkg.userDefinedData)
- data['userDefinedData'] = user_defined_data
-
+ data = fill_resp_data(ns_pkg)
response_data.append(data)
return response_data
@@ -99,34 +62,106 @@ def query_single(nsd_info_id):
ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
if not ns_pkgs.exists():
raise CatalogException('NS descriptors(%s) does not exist.' % nsd_info_id)
+ return fill_resp_data(ns_pkgs[0])
+
+
+def delete_single(nsd_info_id):
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.debug('NS descriptor (%s) is deleted.' % nsd_info_id)
+ return
+ if ns_pkgs[0].onboardingState == 'ONBOARDED':
+ raise CatalogException('The NS descriptor (%s) shall be non-ONBOARDED.' % nsd_info_id)
+ if ns_pkgs[0].operationalState != 'DISABLED':
+ raise CatalogException('The NS descriptor (%s) shall be DISABLED.' % nsd_info_id)
+ if ns_pkgs[0].usageState != 'NOT_IN_USE':
+ raise CatalogException('The NS descriptor (%s) shall be NOT_IN_USE.' % nsd_info_id)
+ ns_pkgs.delete()
+ ns_pkg_path = os.path.join(CATALOG_ROOT_PATH, nsd_info_id)
+ fileutil.delete_dirs(ns_pkg_path)
+ logger.debug('NS descriptor (%s) is deleted.' % nsd_info_id)
+
+
+def process(nsd_info_id, local_file_name):
+ nsd_json = toscaparser.parse_nsd(local_file_name)
+ nsd = json.JSONDecoder().decode(nsd_json)
+
+ nsd_id = nsd["metadata"]["id"]
+ if nsd_id and NSPackageModel.objects.filter(nsdId=nsd_id): # nsd_id may not exist
+ raise CatalogException("NS Descriptor (%s) already exists." % nsd_id)
+
+ for vnf in nsd["vnfs"]:
+ vnfd_id = vnf["properties"]["id"]
+ pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if not pkg:
+ vnfd_name = vnf.get("vnf_id", "undefined")
+ logger.error("[%s] is not distributed.", vnfd_name)
+ raise CatalogException("VNF package(%s) is not distributed." % vnfd_id)
+
+ NSPackageModel(
+ nsPackageId=nsd_info_id,
+ nsdId=nsd_id,
+ nsdName=nsd["metadata"].get("name", nsd_id),
+ nsdDesginer=nsd["metadata"].get("vendor", "undefined"),
+ nsdDescription=nsd["metadata"].get("description", ""),
+ nsdVersion=nsd["metadata"].get("version", "undefined"),
+ nsPackageUri=local_file_name, # TODO
+ sdcCsarId=nsd_info_id,
+ localFilePath=local_file_name,
+ nsdModel=nsd_json
+ ).save()
+
+
+def upload(remote_file, nsd_info_id):
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ raise CatalogException('The NS descriptor (%s) does not exist.' % nsd_info_id)
+
+ local_file_name = remote_file.name
+ local_file_dir = os.path.join(CATALOG_ROOT_PATH, nsd_info_id)
+ local_file_name = os.path.join(local_file_dir, local_file_name)
+ if not os.path.exists(local_file_dir):
+ fileutil.make_dirs(local_file_dir)
+ with open(local_file_name, 'wb') as local_file:
+ if remote_file.multiple_chunks(chunk_size=None):
+ for chunk in remote_file.chunks():
+ local_file.write(chunk)
+ else:
+ data = remote_file.read()
+ local_file.write(data)
+
+
+def download(nsd_info_id):
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ raise CatalogException('The PNF Descriptor (%s) does not exist.' % nsd_info_id)
+ if ns_pkgs[0].onboardingState != 'ONBOARDED':
+ raise CatalogException('The PNF Descriptor (%s) is not ONBOARDED.' % nsd_info_id)
+ local_file_path = ns_pkgs[0].localFilePath
+ return local_file_path
+
+
+def fill_resp_data(ns_pkg):
data = {
- 'id': ns_pkgs[0].nsPackageId,
- 'nsdId': ns_pkgs[0].nsdId,
- 'nsdName': ns_pkgs[0].nsdName,
- 'nsdVersion': ns_pkgs[0].nsdVersion,
- 'nsdDesigner': ns_pkgs[0].nsdDesginer,
+ 'id': ns_pkg.nsPackageId,
+ 'nsdId': ns_pkg.nsdId,
+ 'nsdName': ns_pkg.nsdName,
+ 'nsdVersion': ns_pkg.nsdVersion,
+ 'nsdDesigner': ns_pkg.nsdDesginer,
'nsdInvariantId': None, # TODO
'vnfPkgIds': [],
'pnfdInfoIds': [], # TODO
'nestedNsdInfoIds': [], # TODO
- 'nsdOnboardingState': 'CREATED',
+ 'nsdOnboardingState': ns_pkg.onboardingState,
'onboardingFailureDetails': None, # TODO
- 'nsdOperationalState': ns_pkgs[0].operationalState,
- 'nsdUsageState': ns_pkgs[0].usageState,
+ 'nsdOperationalState': ns_pkg.operationalState,
+ 'nsdUsageState': ns_pkg.usageState,
'userDefinedData': {},
'_links': None # TODO
}
- if ns_pkgs[0].nsdModel:
- ns_pkgs[0]['nsdOnboardingState'] = 'ONBOARDED'
- elif ns_pkgs[0].localFilePath: # TODO: strip()
- ns_pkgs[0]['nsdOnboardingState'] = 'PROCESSING'
- elif ns_pkgs[0].nsdId:
- ns_pkgs[0]['nsdOnboardingState'] = 'UPLOADING'
- ns_pkgs[0]['nsdOnboardingState'] = 'CREATED'
-
- if ns_pkgs[0].nsdModel:
- nsd_model = json.JSONDecoder().decode(ns_pkgs[0].nsdModel)
+ if ns_pkg.nsdModel:
+ nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
vnf_pkg_ids = []
for vnf in nsd_model['vnfs']:
vnfd_id = vnf["properties"]["id"]
@@ -135,42 +170,8 @@ def query_single(nsd_info_id):
vnf_pkg_ids.append(pkg.vnfPackageId)
data['vnfPkgIds'] = vnf_pkg_ids
- if ns_pkgs[0].userDefinedData:
- user_defined_data = json.JSONDecoder().decode(ns_pkgs[0].userDefinedData)
+ if ns_pkg.userDefinedData:
+ user_defined_data = json.JSONDecoder().decode(ns_pkg.userDefinedData)
data['userDefinedData'] = user_defined_data
return data
-
-
-def delete_single(nsd_info_id):
- ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
- if not ns_pkgs.exists():
- raise CatalogException('The NS descriptor (%s) does not exist.' % nsd_info_id)
- if not ns_pkgs[0].nsdModel:
- raise CatalogException('The NS descriptor (%s) is not ONBOARDED.' % nsd_info_id)
- if ns_pkgs[0].operationalState != 'DISABLED':
- raise CatalogException('The NS descriptor (%s) is not DISABLED.' % nsd_info_id)
- if ns_pkgs[0].usageState != 'NOT_IN_USE':
- raise CatalogException('The NS descriptor (%s) is not NOT_IN_USE.' % nsd_info_id)
- ns_pkgs.delete()
-
-
-def upload(files, nsd_info_id):
- remote_files = files
- for remote_file in remote_files:
- local_file_name = remote_file.name
- local_file_dir = os.path.join(CATALOG_ROOT_PATH, nsd_info_id)
- local_file_name = os.path.join(local_file_dir, local_file_name)
- if not os.path.exists(local_file_dir):
- fileutil.make_dirs(local_file_dir)
- with open(local_file_name, 'wb') as local_file:
- if remote_file.multiple_chunks(chunk_size=None):
- for chunk in remote_file.chunks():
- local_file.write(chunk)
- else:
- data = remote_file.read()
- local_file.write(data)
-
-
-def fill_resp_data(ns_pkg):
- pass
diff --git a/catalog/packages/biz/ns_package.py b/catalog/packages/biz/nspackage.py
index cf1f2cd5..cf1f2cd5 100644
--- a/catalog/packages/biz/ns_package.py
+++ b/catalog/packages/biz/nspackage.py
diff --git a/catalog/packages/biz/pnf_descriptor.py b/catalog/packages/biz/pnf_descriptor.py
index d6710c43..eb9a2a97 100644
--- a/catalog/packages/biz/pnf_descriptor.py
+++ b/catalog/packages/biz/pnf_descriptor.py
@@ -13,6 +13,7 @@
# limitations under the License.
+import json
import logging
import os
import uuid
@@ -20,6 +21,9 @@ import uuid
from catalog.pub.config.config import CATALOG_ROOT_PATH
from catalog.pub.utils import fileutil
from catalog.pub.utils.values import ignore_case_get
+from catalog.pub.database.models import NSPackageModel, PnfPackageModel
+from catalog.pub.exceptions import CatalogException
+from catalog.pub.utils import toscaparser
logger = logging.getLogger(__name__)
@@ -33,10 +37,60 @@ def create(data):
'userDefinedData': user_defined_data,
'_links': None # TODO
}
+ PnfPackageModel(
+ pnfPackageId=data['id'],
+ onboardingState=data['pnfdOnboardingState'],
+ usageState=data['pnfdUsageState'],
+ userDefinedData=data['userDefinedData']
+ ).save()
return data
+def query_multiple():
+ pnf_pkgs = PnfPackageModel.objects.all()
+ if not pnf_pkgs.exists():
+ raise CatalogException('PNF descriptors do not exist.')
+ response_data = []
+ for pnf_pkg in pnf_pkgs:
+ data = fill_response_data(pnf_pkg)
+ response_data.append(data)
+ return response_data
+
+
+def query_single(pnfd_info_id):
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ raise CatalogException('PNF descriptor (%s) does not exist.' % pnfd_info_id)
+ return fill_response_data(pnf_pkgs[0])
+
+
+def process(pnfd_info_id, local_file_name): # TODO: onboardingState changes
+ pnfd_json = toscaparser.parse_pnfd(local_file_name)
+ pnfd = json.JSONDecoder().decode(pnfd_json)
+
+ pnfd_id = pnfd["metadata"]["id"]
+ if pnfd_id and PnfPackageModel.objects.filter(pnfdId=pnfd_id): # pnfd_id may not exist
+ raise CatalogException("NS Descriptor (%s) already exists." % pnfd_id)
+
+ PnfPackageModel(
+ pnfPackageId=pnfd_info_id,
+ pnfdId=pnfd_id,
+ pnfdName=pnfd["metadata"].get("name", pnfd_id),
+ pnfdDesginer=pnfd["metadata"].get("vendor", "undefined"),
+ pnfdDescription=pnfd["metadata"].get("description", ""),
+ pnfdVersion=pnfd["metadata"].get("version", "undefined"),
+ nsPackageUri=local_file_name, # TODO
+ sdcCsarId=pnfd_info_id,
+ localFilePath=local_file_name,
+ pnfdModel=pnfd_json
+ ).save()
+
+
def upload(files, pnfd_info_id):
+ ns_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not ns_pkgs.exists():
+ raise CatalogException('The NS descriptor (%s) does not exist.' % pnfd_info_id)
+
remote_files = files
for remote_file in remote_files:
local_file_name = remote_file.name
@@ -51,3 +105,53 @@ def upload(files, pnfd_info_id):
else:
data = remote_file.read()
local_file.write(data)
+
+
+def download(pnfd_info_id):
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ raise CatalogException('The PNF Descriptor (%s) does not exist.' % pnfd_info_id)
+ if pnf_pkgs[0].onboardingState != 'ONBOARDED':
+ raise CatalogException('The PNF Descriptor (%s) is not ONBOARDED.' % pnfd_info_id)
+ local_file_path = pnf_pkgs[0].localFilePath
+ return local_file_path
+
+
+def delete_pnf(pnfd_info_id):
+ # TODO
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.debug('PNF descriptor (%s) is deleted.' % pnfd_info_id)
+ return
+ if pnf_pkgs[0].usageState != 'NOT_IN_USE':
+ raise CatalogException('The PNF descriptor (%s) shall be NOT_IN_USE.' % pnfd_info_id)
+ ns_pkgs = NSPackageModel.objects.all()
+ for ns_pkg in ns_pkgs:
+ if pnfd_info_id in ns_pkg.pnfdInfoIds:
+ raise CatalogException('The PNF descriptor (%s) is referenced.' % pnfd_info_id)
+ break
+ pnf_pkgs.delete()
+ vnf_pkg_path = os.path.join(CATALOG_ROOT_PATH, pnfd_info_id)
+ fileutil.delete_dirs(vnf_pkg_path)
+ logger.debug('PNF descriptor (%s) is deleted.' % pnfd_info_id)
+
+
+def fill_response_data(pnf_pkg):
+ data = {
+ 'id': pnf_pkg.pnfPackageId,
+ 'pnfdId': pnf_pkg.pnfdId,
+ 'pnfdName': pnf_pkg.pnfdProductName, # TODO: check
+ 'pnfdVersion': pnf_pkg.pnfdVersion,
+ 'pnfdProvider': pnf_pkg.pnfVendor, # TODO: check
+ 'pnfdInvariantId': None, # TODO
+ 'pnfdOnboardingState': pnf_pkg.onboardingState,
+ 'onboardingFailureDetails': None, # TODO
+ 'pnfdUsageState': pnf_pkg.usageState,
+ 'userDefinedData': {},
+ '_links': None # TODO
+ }
+ if pnf_pkg.userDefinedData:
+ user_defined_data = json.JSONDecoder().decode(pnf_pkg.userDefinedData)
+ data['userDefinedData'] = user_defined_data
+
+ return data
diff --git a/catalog/packages/biz/vnf_package.py b/catalog/packages/biz/vnf_package.py
new file mode 100644
index 00000000..d3f1f587
--- /dev/null
+++ b/catalog/packages/biz/vnf_package.py
@@ -0,0 +1,196 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import sys
+import threading
+import traceback
+import urllib2
+import uuid
+
+from rest_framework import status
+from django.http import FileResponse, StreamingHttpResponse
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.exceptions import CatalogException
+from catalog.pub.utils.values import ignore_case_get
+from catalog.pub.utils import fileutil, toscaparser
+
+
+logger = logging.getLogger(__name__)
+
+
+def create_vnf_pkg(data):
+ user_defined_data = ignore_case_get(data, "userDefinedData")
+ vnf_pkg_id = str(uuid.uuid4())
+ VnfPackageModel.objects.create(
+ vnfPackageId=vnf_pkg_id,
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData=user_defined_data
+ )
+ data = {
+ "id": vnf_pkg_id,
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": user_defined_data,
+ "_links": None
+ }
+ return data
+
+
+def query_multiple():
+ pkgs_info = []
+ nf_pkgs = VnfPackageModel.objects.filter()
+ if not nf_pkgs.exists():
+ raise CatalogException('VNF packages do not exist.')
+ for nf_pkg in nf_pkgs:
+ ret = fill_response_data(nf_pkg)
+ pkgs_info.append(ret)
+ return pkgs_info
+
+
+def query_single(vnf_pkg_id):
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not nf_pkg.exists():
+ raise CatalogException('VNF package(%s) does not exist.' % vnf_pkg_id)
+ return fill_response_data(nf_pkg[0])
+
+
+def delete_vnf_pkg(vnf_pkg_id):
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not vnf_pkg.exists():
+ logger.debug('VNF package(%s) is deleted.' % vnf_pkg_id)
+ return
+ if vnf_pkg[0].onboardingState != "CREATED":
+ raise CatalogException("The VNF package (%s) is not on-boarded" % vnf_pkg_id)
+ if vnf_pkg[0].operationalState != "DISABLED":
+ raise CatalogException("The VNF package (%s) is not disabled" % vnf_pkg_id)
+ if vnf_pkg[0].usageState != "NOT_IN_USE":
+ raise CatalogException("The VNF package (%s) is in use" % vnf_pkg_id)
+ vnf_pkg.delete()
+ vnf_pkg_path = os.path.join(CATALOG_ROOT_PATH, vnf_pkg_id)
+ fileutil.delete_dirs(vnf_pkg_path)
+
+
+def parse_vnfd_and_save(vnf_pkg_id, vnf_pkg_path):
+ vnfd_json = toscaparser.parse_vnfd(vnf_pkg_path)
+ vnfd = json.JSONDecoder().decode(vnfd_json)
+
+ vnfd_id = vnfd["metadata"]["id"]
+ if VnfPackageModel.objects.filter(vnfdId=vnfd_id):
+ raise CatalogException("VNFD(%s) already exists." % vnfd_id)
+
+ vnfd_ver = vnfd["metadata"].get("vnfd_version")
+ if not vnfd_ver:
+ vnfd_ver = vnfd["metadata"].get("vnfdVersion", "undefined")
+ VnfPackageModel(
+ vnfPackageId=vnf_pkg_id,
+ vnfdId=vnfd_id,
+ vnfVendor=vnfd["metadata"].get("vendor", "undefined"),
+ vnfdVersion=vnfd_ver,
+ vnfSoftwareVersion=vnfd["metadata"].get("version", "undefined"),
+ vnfdModel=vnfd_json,
+ onboardingState="ONBOARDED",
+ operationalState="ENABLED",
+ usageState="NOT_IN_USE",
+ localFilePath=vnf_pkg_path
+ ).save()
+
+
+class VnfPkgUploadThread(threading.Thread):
+ def __init__(self, data, vnf_pkg_id):
+ threading.Thread.__init__(self)
+ self.vnf_pkg_id = vnf_pkg_id
+ self.data = data
+ self.upload_file_name = None
+
+ def run(self):
+ try:
+ self.upload_vnf_pkg_from_uri()
+ parse_vnfd_and_save(self.vnf_pkg_id, self.upload_file_name)
+ except CatalogException as e:
+ logger.error(e.message)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+
+ def upload_vnf_pkg_from_uri(self):
+ logger.debug("UploadVnf %s" % self.vnf_pkg_id)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=self.vnf_pkg_id)
+ if vnf_pkg[0].onboardingState != "CREATED":
+ raise CatalogException("VNF package (%s) is not created" % self.vnf_pkg_id)
+ uri = ignore_case_get(self.data, "addressInformation")
+ upload_path = os.path.join(CATALOG_ROOT_PATH, self.vnf_pkg_id)
+ if not os.path.exists(upload_path):
+ os.makedirs(upload_path, 0o777)
+ r = urllib2.Request(uri)
+ req = urllib2.urlopen(r)
+
+ self.upload_file_name = os.path.join(upload_path, os.path.basename(uri))
+ save_file = open(self.upload_file_name, "wb")
+ save_file.write(req.read())
+ save_file.close()
+ req.close()
+
+
+def fill_response_data(nf_pkg):
+ pkg_info = {}
+ pkg_info["id"] = nf_pkg.vnfPackageId
+ pkg_info["vnfdId"] = nf_pkg.vnfdId
+ pkg_info["vnfProductName"] = nf_pkg.vnfdProductName
+ pkg_info["vnfSoftwareVersion"] = nf_pkg.vnfSoftwareVersion
+ pkg_info["vnfdVersion"] = nf_pkg.vnfdVersion
+ if nf_pkg.checksum:
+ pkg_info["checksum"] = json.JSONDecoder().decode(nf_pkg.checksum)
+ pkg_info["softwareImages"] = None # TODO
+ pkg_info["additionalArtifacts"] = None # TODO
+ pkg_info["onboardingState"] = nf_pkg.onboardingState
+ pkg_info["operationalState"] = nf_pkg.operationalState
+ pkg_info["usageState"] = nf_pkg.usageState
+ if nf_pkg.userDefinedData:
+ pkg_info["userDefinedData"] = json.JSONDecoder().decode(nf_pkg.userDefinedData)
+ pkg_info["_links"] = None # TODO
+ return pkg_info
+
+
+def fetch_vnf_pkg(request, vnf_pkg_id):
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not nf_pkg.exists():
+ raise CatalogException("VNF package (%s) does not exist" % vnf_pkg_id)
+ if nf_pkg[0].onboardingState != "ONBOARDED":
+ raise CatalogException("VNF package (%s) is not on-boarded" % vnf_pkg_id)
+ file_path = nf_pkg[0].localFilePath
+ file_name = file_path.split('/')[-1]
+ file_name = file_name.split('\\')[-1]
+ file_range = request.META.get('RANGE')
+ if file_range:
+ start_end = file_range.split('-')
+ start = int(start_end[0])
+ end = int(start_end[1])
+ f = open(file_path, "rb")
+ f.seek(start, 0)
+ fs = f.read(end - start + 1)
+ response = StreamingHttpResponse(fs, status=status.HTTP_200_OK)
+ response['Content-Type'] = 'application/octet-stream'
+ response['Content-Range'] = file_range
+ else:
+ response = FileResponse(open(file_path, 'rb'), status=status.HTTP_200_OK)
+ response['Content-Disposition'] = 'attachment; filename=%s' % file_name.encode('utf-8')
+ return response
diff --git a/catalog/packages/biz/nf_package.py b/catalog/packages/biz/vnfpackage.py
index 44f2f463..55de8096 100644
--- a/catalog/packages/biz/nf_package.py
+++ b/catalog/packages/biz/vnfpackage.py
@@ -18,8 +18,6 @@ import os
import sys
import threading
import traceback
-import urllib2
-import uuid
from catalog.pub.config.config import CATALOG_ROOT_PATH, CATALOG_URL_PATH, MSB_SERVICE_IP
from catalog.pub.config.config import REG_TO_MSB_REG_PARAM
@@ -29,7 +27,6 @@ from catalog.pub.msapi import sdc
from catalog.pub.utils import fileutil
from catalog.pub.utils import toscaparser
from catalog.pub.utils.jobutil import JobUtil
-from catalog.pub.utils.values import ignore_case_get
logger = logging.getLogger(__name__)
@@ -79,40 +76,6 @@ def parse_vnfd(csar_id, inputs):
return [0, ret]
-def create_vnf_pkg(data):
- user_defined_data = ignore_case_get(data, "userDefinedData")
- vnfPkgId = str(uuid.uuid4())
- VnfPackageModel.objects.create(
- vnfPackageId=vnfPkgId,
- onboardingState="CREATED",
- operationalState="DISABLED",
- usageState="NOT_IN_USE",
- userDefinedData=user_defined_data
- )
- data = {
- "id": vnfPkgId,
- "onboardingState": "CREATED",
- "operationalState": "DISABLED",
- "usageState": "NOT_IN_USE",
- "userDefinedData": user_defined_data,
- "_links": None
- }
- return data
-
-
-def query_multiple():
- # TODO
- data = {
- "id": "1",
- "onboardingState": "CREATED",
- "operationalState": "DISABLED",
- "usageState": "NOT_IN_USE",
- "userDefinedData": "1",
- "_links": None
- }
- return data
-
-
class NfDistributeThread(threading.Thread):
"""
Sdc NF Package Distribute
@@ -232,38 +195,6 @@ class NfPkgDeleteThread(threading.Thread):
JobUtil.add_job_status(self.job_id, 100, "Delete CSAR(%s) successfully." % self.csar_id)
-class VnfpkgUploadThread(threading.Thread):
- def __init__(self, data, vnfPkgId):
- threading.Thread.__init__(self)
- self.vnfPkgId = vnfPkgId
- self.data = data
-
- def run(self):
- try:
- self.upload_vnfPkg_from_uri()
- except CatalogException as e:
- logger.error(e.message)
- except Exception as e:
- logger.error(e.message)
- logger.error(traceback.format_exc())
- logger.error(str(sys.exc_info()))
-
- def upload_vnfPkg_from_uri(self):
- logger.debug("UploadVnf %s" % self.vnfPkgId)
- uri = ignore_case_get(self.data, "addressInformation")
- upload_path = os.path.join(CATALOG_ROOT_PATH, self.vnfPkgId)
- if not os.path.exists(upload_path):
- os.makedirs(upload_path, 0o777)
- r = urllib2.Request(uri)
- req = urllib2.urlopen(r)
-
- upload_file_name = os.path.join(upload_path, os.path.basename(uri))
- save_file = open(upload_file_name, "wb")
- save_file.write(req.read())
- save_file.close()
- req.close()
-
-
class NfPackage(object):
"""
Actions for sdc nf package.
diff --git a/catalog/packages/serializers/pnfd_infos.py b/catalog/packages/serializers/pnfd_infos.py
new file mode 100644
index 00000000..26df221c
--- /dev/null
+++ b/catalog/packages/serializers/pnfd_infos.py
@@ -0,0 +1,17 @@
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from pnfd_info import PnfdInfoSerializer
+
+
+class PnfdInfosSerializer(serializers.ListSerializer):
+ child = PnfdInfoSerializer()
diff --git a/catalog/packages/serializers/vnf_pkg_infos.py b/catalog/packages/serializers/vnf_pkg_infos.py
new file mode 100644
index 00000000..d4cbc655
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_infos.py
@@ -0,0 +1,20 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from vnf_pkg_info import VnfPkgInfoSerializer
+
+
+class VnfPkgInfosSerializer(serializers.ListSerializer):
+ child = VnfPkgInfoSerializer()
diff --git a/catalog/packages/tests/test_ns_descriptor.py b/catalog/packages/tests/test_ns_descriptor.py
new file mode 100644
index 00000000..c6eda2fe
--- /dev/null
+++ b/catalog/packages/tests/test_ns_descriptor.py
@@ -0,0 +1,200 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import json
+import os
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+from catalog.pub.database.models import NSPackageModel
+
+
+class TestNsDescriptor(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ self.user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+
+ def tearDown(self):
+ pass
+
+ def test_nsd_create_normal(self):
+ reqest_data = {
+ 'userDefinedData': self.user_defined_data
+ }
+ expected_reponse_data = {
+ 'nsdOnboardingState': 'CREATED',
+ 'nsdOperationalState': 'DISABLED',
+ 'nsdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': self.user_defined_data,
+ '_links': None
+ }
+ response = self.client.post(
+ '/api/nsd/v1/ns_descriptors',
+ data=reqest_data,
+ format='json'
+ )
+ response.data.pop('id')
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_multiple_nsds_normal(self):
+ expected_reponse_data = [
+ {
+ 'id': '0',
+ 'nsdId': None,
+ 'nsdName': None,
+ 'nsdVersion': None,
+ 'nsdDesigner': None,
+ 'nsdInvariantId': None,
+ 'vnfPkgIds': [],
+ 'pnfdInfoIds': [],
+ 'nestedNsdInfoIds': [],
+ 'nsdOnboardingState': 'CREATED',
+ 'onboardingFailureDetails': None,
+ 'nsdOperationalState': 'DISABLED',
+ 'nsdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ },
+ '_links': None
+ },
+ {
+ 'id': '1',
+ 'nsdId': None,
+ 'nsdName': None,
+ 'nsdVersion': None,
+ 'nsdDesigner': None,
+ 'nsdInvariantId': None,
+ 'vnfPkgIds': [],
+ 'pnfdInfoIds': [],
+ 'nestedNsdInfoIds': [],
+ 'nsdOnboardingState': 'CREATED',
+ 'onboardingFailureDetails': None,
+ 'nsdOperationalState': 'DISABLED',
+ 'nsdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ },
+ '_links': None
+ }
+ ]
+ user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ user_defined_data = json.JSONEncoder().encode(user_defined_data)
+ for i in range(2):
+ NSPackageModel(
+ nsPackageId=str(i),
+ onboardingState='CREATED',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data
+ ).save()
+ response = self.client.get('/api/nsd/v1/ns_descriptors', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_single_nsd_normal(self):
+ expected_reponse_data = {
+ 'id': '22',
+ 'nsdId': None,
+ 'nsdName': None,
+ 'nsdVersion': None,
+ 'nsdDesigner': None,
+ 'nsdInvariantId': None,
+ 'vnfPkgIds': [],
+ 'pnfdInfoIds': [],
+ 'nestedNsdInfoIds': [],
+ 'nsdOnboardingState': 'CREATED',
+ 'onboardingFailureDetails': None,
+ 'nsdOperationalState': 'DISABLED',
+ 'nsdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ },
+ '_links': None
+ }
+ user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ user_defined_data = json.JSONEncoder().encode(user_defined_data)
+ NSPackageModel(
+ nsPackageId='22',
+ onboardingState='CREATED',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data
+ ).save()
+
+ response = self.client.get('/api/nsd/v1/ns_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_delete_single_nsd_normal(self):
+ user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ user_defined_data = json.JSONEncoder().encode(user_defined_data)
+ NSPackageModel(
+ nsPackageId='22',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data,
+ nsdModel='test'
+ ).save()
+ resp = self.client.delete("/api/nsd/v1/ns_descriptors/22", format='json')
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual({}, resp.data)
+
+ def test_nsd_content_upload_normal(self):
+ user_defined_data_json = json.JSONEncoder().encode(self.user_defined_data)
+ NSPackageModel(
+ nsPackageId='22',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data_json,
+ ).save()
+ with open('nsd_content.txt', 'wb') as fp:
+ fp.write('test')
+
+ with open('nsd_content.txt', 'rb') as fp:
+ resp = self.client.put(
+ "/api/nsd/v1/ns_descriptors/22/nsd_content",
+ {'file': fp},
+ )
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual({}, resp.data)
+
+ os.remove('nsd_content.txt')
+
+ def test_nsd_content_upload_failure(self):
+ pass
diff --git a/catalog/packages/tests/test_nsd.py b/catalog/packages/tests/test_nsd.py
deleted file mode 100644
index 268f382f..00000000
--- a/catalog/packages/tests/test_nsd.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import os
-
-from django.test import TestCase
-from rest_framework import status
-from rest_framework.test import APIClient
-
-
-class TestNsDescriptor(TestCase):
- def setUp(self):
- self.client = APIClient()
-
- def tearDown(self):
- pass
-
- def test_nsd_create_normal(self):
- reqest_data = {
- 'userDefinedData': {
- 'key1': 'value1',
- 'key2': 'value2',
- 'key3': 'value3',
- }
- }
- expected_reponse_data = {
- 'nsdOnboardingState': 'CREATED',
- 'nsdOperationalState': 'DISABLED',
- 'nsdUsageState': 'NOT_IN_USE',
- 'userDefinedData': {
- 'key1': 'value1',
- 'key2': 'value2',
- 'key3': 'value3',
- },
- '_links': None
- }
- response = self.client.post(
- '/api/nsd/v1/ns_descriptors',
- data=reqest_data,
- format='json'
- )
- response.data.pop('id')
- self.assertEqual(response.status_code, status.HTTP_201_CREATED)
- self.assertEqual(expected_reponse_data, response.data)
-
- def test_nsd_content_upload_normal(self):
- with open('nsd_content.txt', 'wb') as fp:
- fp.write('test')
-
- with open('nsd_content.txt', 'rb') as fp:
- resp = self.client.put(
- "/api/nsd/v1/ns_descriptors/22/nsd_content",
- {'file': fp},
- )
- self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
- self.assertEqual({}, resp.data)
-
- os.remove('nsd_content.txt')
-
- def test_nsd_content_upload_failure(self):
- pass
diff --git a/catalog/packages/tests/test_ns.py b/catalog/packages/tests/test_nspackage.py
index 9af82312..9af82312 100644
--- a/catalog/packages/tests/test_ns.py
+++ b/catalog/packages/tests/test_nspackage.py
diff --git a/catalog/packages/tests/test_pnf_descriptor.py b/catalog/packages/tests/test_pnf_descriptor.py
new file mode 100644
index 00000000..4245645e
--- /dev/null
+++ b/catalog/packages/tests/test_pnf_descriptor.py
@@ -0,0 +1,181 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import json
+import os
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+from catalog.pub.database.models import PnfPackageModel
+
+
+class TestPnfDescriptor(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ self.user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+
+ def tearDown(self):
+ pass
+
+ def test_pnfd_create_normal(self):
+ request_data = {'userDefinedData': self.user_defined_data}
+ expected_reponse_data = {
+ 'pnfdOnboardingState': 'CREATED',
+ 'pnfdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': self.user_defined_data,
+ '_links': None
+ }
+ response = self.client.post(
+ '/api/nsd/v1/pnf_descriptors',
+ data=request_data,
+ format='json'
+ )
+ response.data.pop('id')
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_multiple_pnfds_normal(self):
+ expected_reponse_data = [
+ {
+ 'id': '0',
+ 'pnfdId': None,
+ 'pnfdName': None,
+ 'pnfdVersion': None,
+ 'pnfdProvider': None,
+ 'pnfdInvariantId': None,
+ 'pnfdOnboardingState': 'CREATED',
+ 'onboardingFailureDetails': None,
+ 'pnfdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ },
+ '_links': None
+ },
+ {
+ 'id': '1',
+ 'pnfdId': None,
+ 'pnfdName': None,
+ 'pnfdVersion': None,
+ 'pnfdProvider': None,
+ 'pnfdInvariantId': None,
+ 'pnfdOnboardingState': 'CREATED',
+ 'onboardingFailureDetails': None,
+ 'pnfdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ },
+ '_links': None
+ }
+ ]
+ user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ user_defined_data = json.JSONEncoder().encode(user_defined_data)
+ for i in range(2):
+ PnfPackageModel(
+ pnfPackageId=str(i),
+ onboardingState='CREATED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data
+ ).save()
+ response = self.client.get('/api/nsd/v1/pnf_descriptors', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_single_pnfd_normal(self):
+ expected_reponse_data = {
+ 'id': '22',
+ 'pnfdId': None,
+ 'pnfdName': None,
+ 'pnfdVersion': None,
+ 'pnfdProvider': None,
+ 'pnfdInvariantId': None,
+ 'pnfdOnboardingState': 'CREATED',
+ 'onboardingFailureDetails': None,
+ 'pnfdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ },
+ '_links': None
+ }
+ user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ user_defined_data = json.JSONEncoder().encode(user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ onboardingState='CREATED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data
+ ).save()
+
+ response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_delete_single_pnfd_normal(self):
+ user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ user_defined_data = json.JSONEncoder().encode(user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data,
+ pnfdModel='test'
+ ).save()
+ resp = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, resp.data)
+
+ def test_pnfd_content_upload_normal(self):
+ user_defined_data_json = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data_json,
+ ).save()
+ with open('pnfd_content.txt', 'wb') as fp:
+ fp.write('test')
+
+ with open('pnfd_content.txt', 'rb') as fp:
+ resp = self.client.put(
+ "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
+ {'file': fp},
+ )
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual({}, resp.data)
+
+ os.remove('pnfd_content.txt')
+
+ def test_pnfd_content_upload_failure(self):
+ pass
diff --git a/catalog/packages/tests/test_pnfd.py b/catalog/packages/tests/test_pnfd.py
deleted file mode 100644
index 35e7720f..00000000
--- a/catalog/packages/tests/test_pnfd.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-from django.test import TestCase
-from rest_framework import status
-from rest_framework.test import APIClient
-
-
-class TestPnfDescriptor(TestCase):
- def setUp(self):
- self.client = APIClient()
-
- def tearDown(self):
- pass
-
- def test_pnfd_create_normal(self):
- reqest_data = {
- 'userDefinedData': {
- 'key1': 'value1',
- 'key2': 'value2',
- 'key3': 'value3',
- }
- }
- expected_reponse_data = {
- 'pnfdOnboardingState': 'CREATED',
- 'pnfdUsageState': 'NOT_IN_USE',
- 'userDefinedData': {
- 'key1': 'value1',
- 'key2': 'value2',
- 'key3': 'value3',
- },
- '_links': None
- }
- response = self.client.post(
- '/api/nsd/v1/pnf_descriptors',
- data=reqest_data,
- format='json'
- )
- response.data.pop('id')
- self.assertEqual(response.status_code, status.HTTP_201_CREATED)
- self.assertEqual(expected_reponse_data, response.data)
-
- def test_pnfd_content_upload_normal(self):
- with open('pnfd_content.txt', 'wb') as fp:
- fp.write('test')
-
- with open('pnfd_content.txt', 'rb') as fp:
- resp = self.client.put(
- "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
- {'file': fp},
- )
- self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
- self.assertEqual({}, resp.data)
-
- os.remove('pnfd_content.txt')
-
- def test_pnfd_content_upload_failure(self):
- pass
diff --git a/catalog/packages/tests/test_vnf_package.py b/catalog/packages/tests/test_vnf_package.py
new file mode 100644
index 00000000..efde444d
--- /dev/null
+++ b/catalog/packages/tests/test_vnf_package.py
@@ -0,0 +1,430 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import json
+import mock
+import urllib2
+
+from rest_framework.test import APIClient
+from django.test import TestCase
+from rest_framework import status
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.packages.biz.vnf_package import VnfPkgUploadThread
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.utils import toscaparser
+
+
+class MockReq():
+ def read(self):
+ return "1"
+
+ def close(self):
+ pass
+
+
+class TestVnfPackage(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ self.vnfd_data = {
+ "volume_storages": [
+ {
+ "properties": {
+ "size_of_storage": {
+ "factor": 10,
+ "value": 10000000000,
+ "unit": "GB",
+ "unit_size": 1000000000
+ },
+ "type_of_storage": "volume",
+ "rdma_enabled": False,
+ "size": "10 GB"
+ },
+ "volume_storage_id": "vNAT_Storage_6wdgwzedlb6sq18uzrr41sof7",
+ "description": ""
+ }
+ ],
+ "inputs": {},
+ "vdus": [
+ {
+ "volume_storages": [
+ "vNAT_Storage_6wdgwzedlb6sq18uzrr41sof7"
+ ],
+ "description": "",
+ "dependencies": [],
+ "vls": [],
+ "properties": {
+ "name": "vNat",
+ "configurable_properties": {
+ "test": {
+ "additional_vnfc_configurable_properties": {
+ "aaa": "1",
+ "bbb": "2",
+ "ccc": "3"
+ }
+ }
+ },
+ "description": "the virtual machine of vNat",
+ "nfvi_constraints": [
+ "test"
+ ],
+ "boot_order": [
+ "vNAT_Storage"
+ ]
+ },
+ "vdu_id": "vdu_vNat",
+ "artifacts": [
+ {
+ "artifact_name": "vNatVNFImage",
+ "type": "tosca.artifacts.nfv.SwImage",
+ "properties": {
+ "operating_system": "linux",
+ "sw_image": "/swimages/vRouterVNF_ControlPlane.qcow2",
+ "name": "vNatVNFImage",
+ "container_format": "bare",
+ "min_ram": "1 GB",
+ "disk_format": "qcow2",
+ "supported_virtualisation_environments": [
+ "test_0"
+ ],
+ "version": "1.0",
+ "checksum": "5000",
+ "min_disk": "10 GB",
+ "size": "10 GB"
+ },
+ "file": "/swimages/vRouterVNF_ControlPlane.qcow2"
+ }
+ ],
+ "nfv_compute": {
+ "flavor_extra_specs": {
+ "hw:cpu_sockets": "2",
+ "sw:ovs_dpdk": "true",
+ "hw:cpu_threads": "2",
+ "hw:numa_mem.1": "3072",
+ "hw:numa_mem.0": "1024",
+ "hw:numa_nodes": "2",
+ "hw:numa_cpus.0": "0,1",
+ "hw:numa_cpus.1": "2,3,4,5",
+ "hw:cpu_cores": "2",
+ "hw:cpu_threads_policy": "isolate"
+ },
+ "cpu_frequency": "2.4 GHz",
+ "num_cpus": 2,
+ "mem_size": "10 GB"
+ },
+ "local_storages": [],
+ "image_file": "vNatVNFImage",
+ "cps": []
+ }
+ ],
+ "image_files": [
+ {
+ "properties": {
+ "operating_system": "linux",
+ "sw_image": "/swimages/vRouterVNF_ControlPlane.qcow2",
+ "name": "vNatVNFImage",
+ "container_format": "bare",
+ "min_ram": "1 GB",
+ "disk_format": "qcow2",
+ "supported_virtualisation_environments": [
+ "test_0"
+ ],
+ "version": "1.0",
+ "checksum": "5000",
+ "min_disk": "10 GB",
+ "size": "10 GB"
+ },
+ "image_file_id": "vNatVNFImage",
+ "description": ""
+ }
+ ],
+ "routers": [],
+ "local_storages": [],
+ "vnf_exposed": {
+ "external_cps": [
+ {
+ "key_name": "sriov_plane",
+ "cp_id": "SRIOV_Port"
+ }
+ ],
+ "forward_cps": []
+ },
+ "vls": [
+ {
+ "route_id": "",
+ "vl_id": "sriov_link",
+ "route_external": False,
+ "description": "",
+ "properties": {
+ "vl_flavours": {
+ "vl_id": "aaaa"
+ },
+ "connectivity_type": {
+ "layer_protocol": "ipv4",
+ "flow_pattern": "flat"
+ },
+ "description": "sriov_link",
+ "test_access": [
+ "test"
+ ]
+ }
+ }
+ ],
+ "cps": [
+ {
+ "vl_id": "sriov_link",
+ "vdu_id": "vdu_vNat",
+ "description": "",
+ "cp_id": "SRIOV_Port",
+ "properties": {
+ "address_data": [
+ {
+ "address_type": "ip_address",
+ "l3_address_data": {
+ "ip_address_type": "ipv4",
+ "floating_ip_activated": False,
+ "number_of_ip_address": 1,
+ "ip_address_assignment": True
+ }
+ }
+ ],
+ "description": "sriov port",
+ "layer_protocol": "ipv4",
+ "virtual_network_interface_requirements": [
+ {
+ "requirement": {
+ "SRIOV": "true"
+ },
+ "support_mandatory": False,
+ "name": "sriov",
+ "description": "sriov"
+ },
+ {
+ "requirement": {
+ "SRIOV": "False"
+ },
+ "support_mandatory": False,
+ "name": "normal",
+ "description": "normal"
+ }
+ ],
+ "role": "root",
+ "bitrate_requirement": 10
+ }
+ }
+ ],
+ "metadata": {
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfProductName": "zte",
+ "localizationLanguage": [
+ "english",
+ "chinese"
+ ],
+ "vnfProvider": "zte",
+ "vnfmInfo": "zte",
+ "defaultLocalizationLanguage": "english",
+ "vnfdId": "zte-hss-1.0",
+ "id": "zte-hss-1.0",
+ "vnfProductInfoDescription": "hss",
+ "vnfdVersion": "1.0.0",
+ "vnfProductInfoName": "hss"
+ }
+ }
+
+ def tearDown(self):
+ pass
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_upload_vnf_pkg(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "empty.txt"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(self.vnfd_data)
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ vnf_pkg1 = VnfPackageModel.objects.filter(vnfPackageId="222")
+ self.assertEqual("zte-hss-1.0", vnf_pkg1[0].vnfdId)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+
+ os.remove(vnf_pkg1[0].localFilePath)
+ os.removedirs(os.path.join(CATALOG_ROOT_PATH, vnf_pkg1[0].vnfPackageId))
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ @mock.patch.object(urllib2, 'urlopen')
+ def test_upload_nf_pkg_from_uri(self, mock_urlopen, mock_parse_vnfd):
+ vnf_pkg = VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(self.vnfd_data)
+ req_data = {"addressInformation": "https://127.0.0.1:1234/sdc/v1/hss.csar"}
+ mock_urlopen.return_value = MockReq()
+ vnf_pkg_id = vnf_pkg.vnfPackageId
+ VnfPkgUploadThread(req_data, vnf_pkg_id).run()
+ vnf_pkg1 = VnfPackageModel.objects.filter(vnfPackageId="222")
+ self.assertEqual("zte-hss-1.0", vnf_pkg1[0].vnfdId)
+
+ os.remove(vnf_pkg1[0].localFilePath)
+ os.removedirs(os.path.join(CATALOG_ROOT_PATH, vnf_pkg1[0].vnfPackageId))
+
+ def test_create_vnf_pkg(self):
+ req_data = {
+ "userDefinedData": {"a": "A"}
+ }
+ response = self.client.post("/api/vnfpkgm/v1/vnf_packages", data=req_data, format="json")
+ resp_data = json.loads(response.content)
+ expect_resp_data = {
+ "id": resp_data.get("id"),
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None # TODO
+ }
+ self.assertEqual(expect_resp_data, resp_data)
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+
+ def test_query_single_vnf(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222")
+ expect_data = {
+ "id": "222",
+ "vnfdId": "zte-hss-1.0",
+ "vnfProductName": "hss",
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfdVersion": "1.0.0",
+ "checksum": {"algorithm": "111", "hash": "11"},
+ "softwareImages": None,
+ "additionalArtifacts": None,
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None
+ }
+ self.assertEqual(response.data, expect_data)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ def test_query_multiple_vnf(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="111",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages")
+ expect_data = [
+ {
+ "id": "111",
+ "vnfdId": "zte-hss-1.0",
+ "vnfProductName": "hss",
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfdVersion": "1.0.0",
+ "checksum": {"algorithm": "111", "hash": "11"},
+ "softwareImages": None,
+ "additionalArtifacts": None,
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None
+ },
+ {
+ "id": "222",
+ "vnfdId": "zte-hss-1.0",
+ "vnfProductName": "hss",
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfdVersion": "1.0.0",
+ "checksum": {"algorithm": "111", "hash": "11"},
+ "softwareImages": None,
+ "additionalArtifacts": None,
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None
+ }
+ ]
+ self.assertEqual(response.data, expect_data)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ def test_delete_single_vnf_pkg(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ response = self.client.delete("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(response.data, None)
+
+ def test_fetch_vnf_pkg(self):
+ pass
+
+ def test_fetch_partical_vnf_pkg(self):
+ with open("vnfPackage.csar", "wb") as fp:
+ fp.writelines("AAAABBBBCCCCDDDD")
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="ONBOARDED",
+ localFilePath="vnfPackage.csar"
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content", RANGE="4-7")
+ partial_file_content = ''
+ for data in response.streaming_content:
+ partial_file_content = partial_file_content + data
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual('BBBB', partial_file_content)
+ os.remove("vnfPackage.csar")
diff --git a/catalog/packages/tests/test_nf.py b/catalog/packages/tests/test_vnfpackage.py
index 576c956c..a3cbe94a 100644
--- a/catalog/packages/tests/test_nf.py
+++ b/catalog/packages/tests/test_vnfpackage.py
@@ -13,28 +13,15 @@
# limitations under the License.
import json
-import os
import mock
-import urllib2
from rest_framework.test import APIClient
from django.test import TestCase
from rest_framework import status
-
-from catalog.packages.biz.nf_package import NfDistributeThread, NfPkgDeleteThread
+from catalog.packages.biz.vnfpackage import NfDistributeThread, NfPkgDeleteThread
from catalog.pub.database.models import JobStatusModel, JobModel
from catalog.pub.database.models import VnfPackageModel
from catalog.pub.msapi import sdc
from catalog.pub.utils import restcall, toscaparser
-from catalog.pub.config.config import CATALOG_ROOT_PATH
-from catalog.packages.biz.nf_package import VnfpkgUploadThread
-
-
-class MockReq():
- def read(self):
- return "1"
-
- def close(self):
- pass
class TestNfPackage(TestCase):
@@ -406,32 +393,3 @@ class TestNfPackage(TestCase):
resp = self.client.post("/api/catalog/v1/parservnfd", req_data, format='json')
self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
self.assertEqual(resp.data, {"error": "VNF CSAR(1) does not exist."})
-
- def test_upload_vnfPkg(self):
- data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "empty.txt"), "rb")}
- response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
- self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
-
- @mock.patch.object(urllib2, 'urlopen')
- def test_upload_nf_pkg(self, mock_urlopen):
- req_data = {"addressInformation": "https://127.0.0.1:1234/sdc/v1/hss.csar"}
- mock_urlopen.return_value = MockReq()
- vnfPkgId = "222"
- VnfpkgUploadThread(req_data, vnfPkgId).run()
-
- def test_create_vnf_pkg(self):
- req_data = {
- "userDefinedData": {"a": "A"}
- }
- response = self.client.post("/api/vnfpkgm/v1/vnf_packages", data=req_data, format="json")
- resp_data = json.loads(response.content)
- expect_resp_data = {
- "id": resp_data.get("id"),
- "onboardingState": "CREATED",
- "operationalState": "DISABLED",
- "usageState": "NOT_IN_USE",
- "userDefinedData": {"a": "A"},
- "_links": None # TODO
- }
- self.assertEqual(expect_resp_data, resp_data)
- self.assertEqual(response.status_code, status.HTTP_201_CREATED)
diff --git a/catalog/packages/urls.py b/catalog/packages/urls.py
index 3dec2ebe..d6589b50 100644
--- a/catalog/packages/urls.py
+++ b/catalog/packages/urls.py
@@ -14,10 +14,8 @@
from django.conf.urls import url
-from catalog.packages.views.vnfpkg_views import package_content, upload_from_uri, vnf_packages
-from catalog.packages.views import (catalog_views, ns_descriptor_views,
- nsd_content_views, pnf_descriptor_views,
- pnfd_content_views)
+from catalog.packages.views import vnf_package_views
+from catalog.packages.views import catalog_views, ns_descriptor_views, pnf_descriptor_views
urlpatterns = [
@@ -31,24 +29,24 @@ urlpatterns = [
# NSD
url(r'^api/nsd/v1/ns_descriptors$', ns_descriptor_views.ns_descriptors_rc, name='ns_descriptors_rc'),
url(r'^api/nsd/v1/ns_descriptors/(?P<nsdInfoId>[0-9a-zA-Z\-\_]+)$', ns_descriptor_views.ns_info_rd, name='ns_info_rd'),
- url(r'^api/nsd/v1/ns_descriptors/(?P<nsdInfoId>[0-9a-zA-Z\-\_]+)/nsd_content$', nsd_content_views.upload_nsd_content, name='nsd_content_ru'),
+ url(r'^api/nsd/v1/ns_descriptors/(?P<nsdInfoId>[0-9a-zA-Z\-\_]+)/nsd_content$', ns_descriptor_views.nsd_content_ru, name='nsd_content_ru'),
# PNF
- url(r'^api/nsd/v1/pnf_descriptors$', pnf_descriptor_views.create_pnf_descriptors, name='pnf_descriptors_rc'),
- url(r'^api/nsd/v1/pnf_descriptors/(?P<pnfdInfoId>[0-9a-zA-Z\-\_]+)/pnfd_content$', pnfd_content_views.upload_pnfd_content, name='pnfd_content_ru'),
+ url(r'^api/nsd/v1/pnf_descriptors$', pnf_descriptor_views.pnf_descriptors_rc, name='pnf_descriptors_rc'),
+ url(r'^api/nsd/v1/pnf_descriptors/(?P<pnfdInfoId>[0-9a-zA-Z\-\_]+)/pnfd_content$', pnf_descriptor_views.pnfd_content_ru, name='pnfd_content_ru'),
# TODO SOL005 & SOL003
- # url(r'^api/nsd/v1/pnf_descriptors/(?P<pnfdInfoId>[0-9a-zA-Z\-\_]+)$', pnfd_info.as_view(), name='pnfd_info_rd'),
+ url(r'^api/nsd/v1/pnf_descriptors/(?P<pnfdInfoId>[0-9a-zA-Z\-\_]+)$', pnf_descriptor_views.pnfd_info_rd, name='pnfd_info_rd'),
# url(r'^api/nsd/v1/subscriptions', nsd_subscriptions.as_view(), name='subscriptions_rc'),
# url(r'^api/nsd/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', nsd_subscription.as_view(), name='subscription_rd'),
- url(r'^api/vnfpkgm/v1/vnf_packages$', vnf_packages.as_view(), name='vnf_packages_rc'),
- # url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)$', vnf_package.as_view(), name='vnf_package_rd'),
+ url(r'^api/vnfpkgm/v1/vnf_packages$', vnf_package_views.vnf_packages_rc, name='vnf_packages_rc'),
+ url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)$', vnf_package_views.vnf_package_rd, name='vnf_package_rd'),
# url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/vnfd$', vnfd.as_view(), name='vnfd_r'),
url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/package_content$',
- package_content.as_view(), name='package_content_ru'),
+ vnf_package_views.upload_vnf_pkg_content, name='package_content_ru'),
url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/package_content/upload_from_uri$',
- upload_from_uri.as_view(), name='upload_from_uri_c'),
+ vnf_package_views.upload_vnf_pkg_from_uri, name='upload_from_uri_c'),
# url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/artifacts/artifactPath$', artifacts.as_view(), name='artifacts_r'),
# url(r'^api/vnfpkgm/v1/subscriptions', vnfpkg_subscriptions.as_view(), name='subscriptions_rc'),
# url(r'^api/vnfpkgm/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', vnfpkg_subscription.as_view(), name='subscription_rd'),
diff --git a/catalog/packages/views/catalog_views.py b/catalog/packages/views/catalog_views.py
index 5fc62622..157af9fd 100644
--- a/catalog/packages/views/catalog_views.py
+++ b/catalog/packages/views/catalog_views.py
@@ -20,8 +20,7 @@ from drf_yasg.utils import no_body, swagger_auto_schema
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
-
-from catalog.packages.biz import nf_package, ns_package
+from catalog.packages.biz import vnfpackage, nspackage
from catalog.packages.serializers.catalog_serializers import InternalErrorRequestSerializer
from catalog.packages.serializers.catalog_serializers import NfPackageDistributeRequestSerializer
from catalog.packages.serializers.catalog_serializers import NfPackageSerializer
@@ -60,7 +59,7 @@ def nspackages_rc(request, *args, **kwargs):
if request.method == 'GET':
# Gets ns package list
- ret = ns_package.ns_get_csars()
+ ret = nspackage.ns_get_csars()
normal_status = status.HTTP_200_OK
if ret[0] == 0:
@@ -78,7 +77,7 @@ def nspackages_rc(request, *args, **kwargs):
csar_id = ignore_case_get(request.data, "csarId")
logger.debug("csar_id is %s", csar_id)
- ret = ns_package.ns_on_distribute(csar_id)
+ ret = nspackage.ns_on_distribute(csar_id)
normal_status = status.HTTP_202_ACCEPTED
logger.debug("Leave %s, Return value is %s", fun_name(), ret)
@@ -114,7 +113,7 @@ def nfpackages_rc(request, *args, **kwargs):
request.method)
ret, normal_status, response_serializer, validation_error = None, None, None, None
if request.method == 'GET':
- ret = nf_package.nf_get_csars()
+ ret = vnfpackage.nf_get_csars()
normal_status = status.HTTP_200_OK
response_serializer = NfPackagesSerializer(data=ret[1])
elif request.method == 'POST':
@@ -129,7 +128,7 @@ def nfpackages_rc(request, *args, **kwargs):
vim_ids = ignore_case_get(request_serivalizer.data, "vimIds")
lab_vim_id = ignore_case_get(request_serivalizer.data, "labVimId")
job_id = str(uuid.uuid4())
- nf_package.NfDistributeThread(
+ vnfpackage.NfDistributeThread(
csar_id, vim_ids, lab_vim_id, job_id).start()
ret = [0, {"jobId": job_id}]
normal_status = status.HTTP_202_ACCEPTED
@@ -190,7 +189,7 @@ def ns_rd_csar(request, *args, **kwargs):
fun_name(), request.method, csar_id)
ret, normal_status, response_serializer, validation_error = None, None, None, None
if request.method == 'GET':
- ret = ns_package.ns_get_csar(csar_id)
+ ret = nspackage.ns_get_csar(csar_id)
normal_status = status.HTTP_200_OK
if ret[0] == 0:
response_serializer = NsPackageSerializer(data=ret[1])
@@ -198,7 +197,7 @@ def ns_rd_csar(request, *args, **kwargs):
if validation_error:
return validation_error
elif request.method == 'DELETE':
- ret = ns_package.ns_delete_csar(csar_id)
+ ret = nspackage.ns_delete_csar(csar_id)
normal_status = status.HTTP_200_OK
logger.info("Leave %s, Return value is %s", fun_name(), ret)
if ret[0] != 0:
@@ -249,13 +248,13 @@ def nf_rd_csar(request, *args, **kwargs):
ret, normal_status, response_serializer, validation_error = None, None, None, None
if request.method == 'GET':
- ret = nf_package.nf_get_csar(csar_id)
+ ret = vnfpackage.nf_get_csar(csar_id)
normal_status = status.HTTP_200_OK
response_serializer = NfPackageSerializer(data=ret[1])
elif request.method == 'DELETE':
job_id = str(uuid.uuid4())
- nf_package.NfPkgDeleteThread(csar_id, job_id).start()
+ vnfpackage.NfPkgDeleteThread(csar_id, job_id).start()
ret = [0, {"jobId": job_id}]
normal_status = status.HTTP_202_ACCEPTED
response_serializer = PostJobResponseSerializer(data=ret[1])
@@ -291,7 +290,7 @@ def ns_model_parser(request, *args, **kwargs):
fun_name(),
csar_id,
inputs)
- ret = ns_package.parse_nsd(csar_id, inputs)
+ ret = nspackage.parse_nsd(csar_id, inputs)
logger.info("Leave %s, Return value is %s", fun_name(), ret)
if ret[0] != 0:
return Response(
@@ -324,7 +323,7 @@ def vnf_model_parser(request, *args, **kwargs):
fun_name(),
csar_id,
inputs)
- ret = nf_package.parse_vnfd(csar_id, inputs)
+ ret = vnfpackage.parse_vnfd(csar_id, inputs)
logger.info("Leave %s, Return value is %s", fun_name(), ret)
if ret[0] != 0:
return Response(
diff --git a/catalog/packages/views/ns_descriptor_views.py b/catalog/packages/views/ns_descriptor_views.py
index 455b01ae..9ba5538c 100644
--- a/catalog/packages/views/ns_descriptor_views.py
+++ b/catalog/packages/views/ns_descriptor_views.py
@@ -19,8 +19,9 @@ from drf_yasg.utils import no_body, swagger_auto_schema
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
+from django.http import FileResponse
-from catalog.packages.biz.ns_descriptor import create, query_multiple, query_single, delete_single
+from catalog.packages.biz.ns_descriptor import create, query_multiple, query_single, delete_single, upload, download
from catalog.packages.serializers.create_nsd_info_request import \
CreateNsdInfoRequestSerializer
from catalog.packages.serializers.nsd_info import NsdInfoSerializer
@@ -123,3 +124,49 @@ def ns_descriptors_rc(request, *args, **kwargs):
data={'error': 'Query of multiple NS descriptor resources failed.'},
status=status.HTTP_500_INTERNAL_SERVER_ERROR
)
+
+
+@swagger_auto_schema(
+ method='PUT',
+ operation_description="Upload NSD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: {},
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Fetch NSD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: {},
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['PUT', 'GET'])
+def nsd_content_ru(request, *args, **kwargs):
+ nsd_info_id = kwargs.get("nsdInfoId")
+ if request.method == 'PUT':
+ files = request.FILES.getlist('file')
+ try:
+ upload(files[0], nsd_info_id)
+ return Response(data={}, status=status.HTTP_204_NO_CONTENT)
+ except IOError:
+ logger.error(traceback.format_exc())
+ raise CatalogException
+ return Response(data={'error': 'Uploading nsd content failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ if request.method == 'GET':
+ try:
+ file_path = download(nsd_info_id)
+ file_name = file_path.split('/')[-1]
+ file_name = file_name.split('\\')[-1]
+ response = FileResponse(open(file_path, 'rb'), status=status.HTTP_200_OK)
+ response['Content-Disposition'] = 'attachment; filename=%s' % file_name.encode('utf-8')
+ return response
+ except IOError:
+ logger.error(traceback.format_exc())
+ raise CatalogException
+ return Response(data={'error': 'Downloading nsd content failed.'},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/catalog/packages/views/nsd_content_views.py b/catalog/packages/views/nsd_content_views.py
deleted file mode 100644
index 2c49f141..00000000
--- a/catalog/packages/views/nsd_content_views.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import traceback
-
-from drf_yasg.utils import no_body, swagger_auto_schema
-from rest_framework import status
-from rest_framework.decorators import api_view
-from rest_framework.response import Response
-
-from catalog.packages.biz.ns_descriptor import upload
-from catalog.pub.exceptions import CatalogException
-
-logger = logging.getLogger(__name__)
-
-
-@swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
-)
-def get(self, request):
- # TODO
- return None
-
-
-@swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
-)
-def post(self, request):
- # TODO
- return None
-
-
-@swagger_auto_schema(
- method='PUT',
- operation_description="Upload NSD content",
- request_body=no_body,
- responses={
- status.HTTP_204_NO_CONTENT: {},
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
-)
-@api_view(http_method_names=['PUT'])
-def upload_nsd_content(request, *args, **kwargs):
- nsd_info_id = kwargs.get("nsdInfoId")
- files = request.FILES.getlist('file')
- try:
- upload(files, nsd_info_id)
- return Response(data={}, status=status.HTTP_204_NO_CONTENT)
- except IOError:
- logger.error(traceback.format_exc())
- raise CatalogException
- return Response(data={'error': 'Uploading nsd content failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/catalog/packages/views/nsd_views.py b/catalog/packages/views/nsd_views.py
deleted file mode 100644
index c5134cde..00000000
--- a/catalog/packages/views/nsd_views.py
+++ /dev/null
@@ -1,201 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from drf_yasg.utils import swagger_auto_schema
-from rest_framework import status
-from rest_framework.views import APIView
-
-
-class ns_descriptors(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class ns_info(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class nsd_content(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class pnf_descriptors(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class pnfd_info(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class pnfd_content(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class nsd_subscriptions(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class nsd_subscription(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
diff --git a/catalog/packages/views/pnf_descriptor_views.py b/catalog/packages/views/pnf_descriptor_views.py
index 2e56c33c..b571f01e 100644
--- a/catalog/packages/views/pnf_descriptor_views.py
+++ b/catalog/packages/views/pnf_descriptor_views.py
@@ -15,42 +15,73 @@
import logging
import traceback
-from drf_yasg.utils import swagger_auto_schema
+from drf_yasg.utils import no_body, swagger_auto_schema
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
+from django.http import FileResponse
-from catalog.packages.biz.pnf_descriptor import create
+
+from catalog.packages.biz.pnf_descriptor import create, query_multiple, query_single, upload, download, delete_pnf
from catalog.packages.serializers.create_pnfd_info_request import \
CreatePnfdInfoRequestSerializer
from catalog.packages.serializers.pnfd_info import PnfdInfoSerializer
+from catalog.packages.serializers.pnfd_infos import PnfdInfosSerializer
from catalog.pub.exceptions import CatalogException
logger = logging.getLogger(__name__)
@swagger_auto_schema(
+ method='GET',
+ operation_description="Query an individual PNF descriptor resource",
+ request_body=no_body,
responses={
- # status.HTTP_200_OK: Serializer(),
+ status.HTTP_200_OK: PnfdInfoSerializer(),
status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
}
)
-# @api_view(http_method_names=['GET'])
-def query_multiple_pnfds(self, request):
- # TODO
- return None
-
-
@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete an individual PNF descriptor resource",
+ request_body=no_body,
responses={
- # status.HTTP_200_OK: Serializer(),
+ status.HTTP_204_NO_CONTENT: None,
status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
}
)
-# @api_view(http_method_names=['GET'])
-def query_single_pnfd(self, request):
- # TODO
- return None
+@api_view(http_method_names=['GET', 'DELETE'])
+def pnfd_info_rd(request, pnfdInfoId):
+ if request.method == 'GET':
+ logger.debug("Query an individual PNF descriptor> %s" % request.data)
+ try:
+ res = query_single(pnfdInfoId)
+ query_serializer = PnfdInfoSerializer(data=res)
+ if not query_serializer.is_valid():
+ raise CatalogException
+ return Response(data=query_serializer.data, status=status.HTTP_200_OK)
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'Query an individual PNF descriptor failed.'},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ if request.method == 'DELETE':
+ logger.debug("Delete an individual PNFD resource> %s" % request.data)
+ try:
+ delete_pnf(pnfdInfoId)
+ return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'Delete an individual PNFD resource failed.'},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
@swagger_auto_schema(
@@ -62,17 +93,87 @@ def query_single_pnfd(self, request):
status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
}
)
-@api_view(http_method_names=['POST'])
-def create_pnf_descriptors(request, *args, **kwargs):
- try:
- create_pnfd_info_request = CreatePnfdInfoRequestSerializer(data=request.data)
- if not create_pnfd_info_request.is_valid():
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query multiple PNF descriptor resources",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: PnfdInfosSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['POST', 'GET'])
+def pnf_descriptors_rc(request, *args, **kwargs):
+ if request.method == 'POST':
+ try:
+ create_pnfd_info_request = CreatePnfdInfoRequestSerializer(data=request.data)
+ if not create_pnfd_info_request.is_valid():
+ raise CatalogException
+ data = create(create_pnfd_info_request.data)
+ pnfd_info = PnfdInfoSerializer(data=data)
+ if not pnfd_info.is_valid():
+ raise CatalogException
+ return Response(data=pnfd_info.data, status=status.HTTP_201_CREATED)
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'Creating pnfd info failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ if request.method == 'GET':
+ try:
+ data = query_multiple()
+ pnfd_infos = PnfdInfosSerializer(data=data)
+ if not pnfd_infos.is_valid():
+ raise CatalogException
+ return Response(data=pnfd_infos.data, status=status.HTTP_200_OK)
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(
+ data={'error': 'Query of multiple PNF descriptor resources failed.'},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR
+ )
+
+
+@swagger_auto_schema(
+ method='PUT',
+ operation_description="Upload PNFD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: {},
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Fetch PNFD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: {},
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['PUT', 'GET'])
+def pnfd_content_ru(request, *args, **kwargs):
+ pnfd_info_id = kwargs.get("pnfdInfoId")
+ if request.method == 'PUT':
+ files = request.FILES.getlist('file')
+ try:
+ upload(files, pnfd_info_id)
+ return Response(data={}, status=status.HTTP_204_NO_CONTENT)
+ except IOError:
+ logger.error(traceback.format_exc())
raise CatalogException
- data = create(create_pnfd_info_request.data)
- pnfd_info = PnfdInfoSerializer(data=data)
- if not pnfd_info.is_valid():
+ return Response(data={'error': 'Uploading pnfd content failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ if request.method == 'GET':
+ try:
+ file_path = download(pnfd_info_id)
+ file_name = file_path.split('/')[-1]
+ file_name = file_name.split('\\')[-1]
+ response = FileResponse(open(file_path, 'rb'), status=status.HTTP_200_OK)
+ response['Content-Disposition'] = 'attachment; filename=%s' % file_name.encode('utf-8')
+ return response
+ except IOError:
+ logger.error(traceback.format_exc())
raise CatalogException
- return Response(data=pnfd_info.data, status=status.HTTP_201_CREATED)
- except CatalogException:
- logger.error(traceback.format_exc())
- return Response(data={'error': 'Creating pnfd info failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ return Response(data={'error': 'Downloading pnfd content failed.'},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/catalog/packages/views/pnfd_content_views.py b/catalog/packages/views/pnfd_content_views.py
deleted file mode 100644
index b991c763..00000000
--- a/catalog/packages/views/pnfd_content_views.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import traceback
-
-from drf_yasg.utils import no_body, swagger_auto_schema
-from rest_framework import status
-from rest_framework.decorators import api_view
-from rest_framework.response import Response
-
-from catalog.packages.biz.pnf_descriptor import upload
-from catalog.pub.exceptions import CatalogException
-
-logger = logging.getLogger(__name__)
-
-
-@swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
-)
-def get(self, request):
- # TODO
- return None
-
-
-@swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
-)
-def post(self, request):
- # TODO
- return None
-
-
-@swagger_auto_schema(
- method='PUT',
- operation_description="Upload PNFD content",
- request_body=no_body,
- responses={
- status.HTTP_204_NO_CONTENT: {},
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
-)
-@api_view(http_method_names=['PUT'])
-def upload_pnfd_content(request, *args, **kwargs):
- pnfd_info_id = kwargs.get("pnfdInfoId")
- files = request.FILES.getlist('file')
- try:
- upload(files, pnfd_info_id)
- return Response(data={}, status=status.HTTP_204_NO_CONTENT)
- except IOError:
- logger.error(traceback.format_exc())
- raise CatalogException
- return Response(data={'error': 'Uploading pnfd content failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/catalog/packages/views/vnf_package_views.py b/catalog/packages/views/vnf_package_views.py
new file mode 100644
index 00000000..2bdd3eb8
--- /dev/null
+++ b/catalog/packages/views/vnf_package_views.py
@@ -0,0 +1,228 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import traceback
+import logging
+import os
+
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from drf_yasg.utils import swagger_auto_schema, no_body
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+from catalog.pub.exceptions import CatalogException
+from catalog.packages.serializers.upload_vnf_pkg_from_uri_req import UploadVnfPackageFromUriRequestSerializer
+from catalog.packages.serializers.create_vnf_pkg_info_req import CreateVnfPkgInfoRequestSerializer
+from catalog.packages.serializers.vnf_pkg_info import VnfPkgInfoSerializer
+from catalog.packages.serializers.vnf_pkg_infos import VnfPkgInfosSerializer
+from catalog.packages.biz.vnf_package import create_vnf_pkg, query_multiple, VnfPkgUploadThread, \
+ query_single, delete_vnf_pkg, parse_vnfd_and_save, fetch_vnf_pkg
+from catalog.pub.database.models import VnfPackageModel
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+ method="GET",
+ operation_description="Query multiple VNF package resource",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: VnfPkgInfosSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method="POST",
+ operation_description="Create an individual VNF package resource",
+ request_body=CreateVnfPkgInfoRequestSerializer,
+ responses={
+ status.HTTP_201_CREATED: VnfPkgInfoSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=["GET", "POST"])
+def vnf_packages_rc(request):
+ if request.method == 'GET':
+ logger.debug("Query VNF packages> %s" % request.data)
+ try:
+ res = query_multiple()
+ query_serializer = VnfPkgInfosSerializer(data=res)
+ if not query_serializer.is_valid():
+ raise CatalogException
+ return Response(data=query_serializer.data, status=status.HTTP_200_OK)
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'Query VNF package failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ if request.method == 'POST':
+ logger.debug("Create VNF package> %s" % request.data)
+ try:
+ req_serializer = CreateVnfPkgInfoRequestSerializer(data=request.data)
+ if not req_serializer.is_valid():
+ raise CatalogException
+ res = create_vnf_pkg(req_serializer.data)
+ create_vnf_pkg_resp_serializer = VnfPkgInfoSerializer(data=res)
+ if not create_vnf_pkg_resp_serializer.is_valid():
+ raise CatalogException
+ return Response(data=create_vnf_pkg_resp_serializer.data, status=status.HTTP_201_CREATED)
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'Create VNF package failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+
+@swagger_auto_schema(
+ method='PUT',
+ operation_description="Upload VNF package content",
+ request_body=no_body,
+ responses={
+ status.HTTP_202_ACCEPTED: "Successfully",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method="GET",
+ operation_description="Fetch VNF package content",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: VnfPkgInfosSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=["PUT", "GET"])
+def upload_vnf_pkg_content(request, vnfPkgId):
+ if request.method == "PUT":
+ logger.debug("Upload VNF package %s" % vnfPkgId)
+ try:
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfPkgId)
+ if vnf_pkg[0].onboardingState != "CREATED":
+ raise CatalogException("VNF package (%s) is not created" % vnfPkgId)
+ file_object = request.FILES.get('file')
+ upload_path = os.path.join(CATALOG_ROOT_PATH, vnfPkgId)
+ if not os.path.exists(upload_path):
+ os.makedirs(upload_path, 0o777)
+
+ upload_file_name = os.path.join(upload_path, file_object.name)
+ with open(upload_file_name, 'wb+') as dest_file:
+ for chunk in file_object.chunks():
+ dest_file.write(chunk)
+
+ parse_vnfd_and_save(vnfPkgId, upload_file_name)
+ return Response(None, status=status.HTTP_202_ACCEPTED)
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'Upload VNF package failed.'},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ if request.method == "GET":
+ try:
+ response = fetch_vnf_pkg(request, vnfPkgId)
+ return response
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'Fetch VNF package failed.'},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Upload VNF package content from uri",
+ request_body=UploadVnfPackageFromUriRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: "Successfully",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['POST'])
+def upload_vnf_pkg_from_uri(request, vnfPkgId):
+ try:
+ req_serializer = UploadVnfPackageFromUriRequestSerializer(data=request.data)
+ if not req_serializer.is_valid():
+ raise CatalogException
+ VnfPkgUploadThread(req_serializer.data, vnfPkgId).start()
+ return Response(None, status=status.HTTP_202_ACCEPTED)
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'Upload VNF package failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query an individual VNF package resource",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: VnfPkgInfoSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete an individual VNF package resource",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: None,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+def vnf_package_rd(request, vnfPkgId):
+ if request.method == 'GET':
+ logger.debug("Query an individual VNF package> %s" % request.data)
+ try:
+ res = query_single(vnfPkgId)
+ query_serializer = VnfPkgInfoSerializer(data=res)
+ if not query_serializer.is_valid():
+ raise CatalogException
+ return Response(data=query_serializer.data, status=status.HTTP_200_OK)
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'Query an individual VNF package failed.'},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ if request.method == 'DELETE':
+ logger.debug("Delete an individual VNF package> %s" % request.data)
+ try:
+ delete_vnf_pkg(vnfPkgId)
+ return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+ except CatalogException:
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'Delete an individual VNF package failed.'},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ except Exception as e:
+ logger.error(e.message)
+ logger.error(traceback.format_exc())
+ return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/catalog/packages/views/vnfpkg_views.py b/catalog/packages/views/vnfpkg_views.py
deleted file mode 100644
index fc019770..00000000
--- a/catalog/packages/views/vnfpkg_views.py
+++ /dev/null
@@ -1,268 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import traceback
-import logging
-from catalog.pub.config.config import CATALOG_ROOT_PATH
-from drf_yasg.utils import swagger_auto_schema
-from rest_framework import status
-from rest_framework.views import APIView
-from rest_framework.response import Response
-from catalog.packages.biz.nf_package import VnfpkgUploadThread
-from catalog.pub.exceptions import CatalogException
-from catalog.packages.serializers.upload_vnf_pkg_from_uri_req import UploadVnfPackageFromUriRequestSerializer
-from catalog.packages.serializers.create_vnf_pkg_info_req import CreateVnfPkgInfoRequestSerializer
-from catalog.packages.serializers.vnf_pkg_info import VnfPkgInfoSerializer
-from catalog.packages.biz.nf_package import create_vnf_pkg, query_multiple
-
-logger = logging.getLogger(__name__)
-
-
-class vnf_packages(APIView):
- @swagger_auto_schema(
- responses={
- status.HTTP_200_OK: VnfPkgInfoSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- logger.debug("Query VNF Packages> %s" % request.data)
- try:
- res = query_multiple()
- query_serializer = VnfPkgInfoSerializer(data=res)
- if not query_serializer.is_valid():
- raise CatalogException
- return Response(data=query_serializer.data, status=status.HTTP_200_OK)
- except CatalogException:
- logger.error(traceback.format_exc())
- return Response(data={'error': 'Querying vnfPkg failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
- except Exception as e:
- logger.error(e.message)
- logger.error(traceback.format_exc())
- return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
-
- @swagger_auto_schema(
- request_body=CreateVnfPkgInfoRequestSerializer(),
- responses={
- status.HTTP_201_CREATED: VnfPkgInfoSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- logger.debug("CreateVnfPkg> %s" % request.data)
- try:
- req_serializer = CreateVnfPkgInfoRequestSerializer(data=request.data)
- if not req_serializer.is_valid():
- raise CatalogException
- res = create_vnf_pkg(req_serializer.data)
- create_vnf_pkg_resp_serializer = VnfPkgInfoSerializer(data=res)
- if not create_vnf_pkg_resp_serializer.is_valid():
- raise CatalogException
- return Response(data=create_vnf_pkg_resp_serializer.data, status=status.HTTP_201_CREATED)
- except CatalogException:
- logger.error(traceback.format_exc())
- return Response(data={'error': 'Creating vnfPkg failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
- except Exception as e:
- logger.error(e.message)
- logger.error(traceback.format_exc())
- return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
-
-
-class vnf_package(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class vnfd(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class package_content(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
- def put(self, request, vnfPkgId):
- logger.debug("UploadVnf %s" % vnfPkgId)
- file_object = request.FILES.get('file')
- upload_path = os.path.join(CATALOG_ROOT_PATH, vnfPkgId)
- if not os.path.exists(upload_path):
- os.makedirs(upload_path, 0o777)
- try:
- upload_file_name = os.path.join(upload_path, file_object.name)
- with open(upload_file_name, 'wb+') as dest_file:
- for chunk in file_object.chunks():
- dest_file.write(chunk)
- except Exception as e:
- logger.error("File upload exception.[%s:%s]" % (type(e), str(e)))
- logger.error("%s", traceback.format_exc())
- return Response(None, status.HTTP_202_ACCEPTED)
-
-
-class upload_from_uri(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- request_body=UploadVnfPackageFromUriRequestSerializer(),
- responses={
- status.HTTP_202_ACCEPTED: "Successfully",
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request, vnfPkgId):
- try:
- req_serializer = UploadVnfPackageFromUriRequestSerializer(data=request.data)
- if not req_serializer.is_valid():
- raise CatalogException
- VnfpkgUploadThread(req_serializer.data, vnfPkgId).start()
- return Response(None, status=status.HTTP_202_ACCEPTED)
- except CatalogException:
- logger.error(traceback.format_exc())
- return Response(data={'error': 'Upload vnfPkg failed.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
- except Exception as e:
- logger.error(e.message)
- logger.error(traceback.format_exc())
- return Response(data={'error': 'unexpected exception'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
-
-
-class artifacts(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class vnfpkg_subscriptions(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
-
-
-class vnfpkg_subscription(APIView):
- @swagger_auto_schema(
- responses={
- # status.HTTP_200_OK: Serializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def get(self, request):
- # TODO
- return None
-
- @swagger_auto_schema(
- # request_body=CreateVnfReqSerializer(),
- responses={
- # status.HTTP_201_CREATED: CreateVnfRespSerializer(),
- status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
- }
- )
- def post(self, request):
- # TODO
- return None
diff --git a/catalog/pub/database/models.py b/catalog/pub/database/models.py
index 889a7e6d..3b9e909a 100644
--- a/catalog/pub/database/models.py
+++ b/catalog/pub/database/models.py
@@ -20,6 +20,7 @@ class NSPackageModel(models.Model):
nsPackageUri = models.CharField(db_column='NSPACKAGEURI', max_length=300, null=True, blank=True)
checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum
sdcCsarId = models.CharField(db_column='SDCCSARID', max_length=50, null=True, blank=True) # SdcCSARUri
+ onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True) # operationalState
usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState
deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending
@@ -59,6 +60,28 @@ class VnfPackageModel(models.Model):
db_table = 'CATALOG_VNFPACKAGE'
+class PnfPackageModel(models.Model):
+ # uuid = models.CharField(db_column='UUID', primary_key=True, max_length=255)
+ pnfPackageId = models.CharField(db_column='PNFPACKAGEID', primary_key=True, max_length=50) # onboardedPnfPkgInfoId
+ pnfPackageUri = models.CharField(db_column='PNFPACKAGEURI', max_length=300, null=True, blank=True) # downloadUri
+ SdcCSARUri = models.CharField(db_column='SDCCSARURI', max_length=300, null=True, blank=True) # SdcCSARUri
+ checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum
+ onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
+ usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState
+ deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending
+ pnfdId = models.CharField(db_column='PNFDID', max_length=50, blank=True, null=True) # pnfdId
+ pnfVendor = models.CharField(db_column='VENDOR', max_length=50, blank=True, null=True) # pnfProvider
+ pnfdProductName = models.CharField(db_column='PNFDPRODUCTNAME', max_length=50, blank=True, null=True) # pnfProductName
+ pnfdVersion = models.CharField(db_column='PNFDVERSION', max_length=20, blank=True, null=True) # pnfdVersion
+ pnfSoftwareVersion = models.CharField(db_column='PNFSOFTWAREVERSION', max_length=20, blank=True, null=True) # pnfSoftwareVersion
+ userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData
+ localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
+ pnfdModel = models.TextField(db_column='PNFDMODEL', max_length=65535, blank=True, null=True) # pnfd
+
+ class Meta:
+ db_table = 'CATALOG_PNFPACKAGE'
+
+
class SoftwareImageModel(models.Model):
imageid = models.CharField(db_column='IMAGEID', primary_key=True, max_length=50)
containerFormat = models.CharField(db_column='CONTAINERFORMAT', max_length=20)
diff --git a/catalog/pub/utils/toscaparser/__init__.py b/catalog/pub/utils/toscaparser/__init__.py
index 56c020ed..604bb23f 100644
--- a/catalog/pub/utils/toscaparser/__init__.py
+++ b/catalog/pub/utils/toscaparser/__init__.py
@@ -30,3 +30,7 @@ def parse_vnfd(path, input_parameters=[]):
strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
strResponse = strResponse.replace(': null', ': ""')
return strResponse
+
+
+def parse_pnfd(path, input_parameters=[]):
+ pass
diff --git a/docker/build_image.sh b/docker/build_image.sh
index b42369f1..57fa076f 100755
--- a/docker/build_image.sh
+++ b/docker/build_image.sh
@@ -6,7 +6,7 @@ cd ${DOCKER_BUILD_DIR}
BUILD_ARGS="--no-cache"
ORG="onap"
-VERSION="1.1.0"
+VERSION="1.2.0"
PROJECT="vfc"
IMAGE="catalog"
DOCKER_REPOSITORY="nexus3.onap.org:10003"
diff --git a/pom.xml b/pom.xml
index 54b03c6b..98a69e59 100644
--- a/pom.xml
+++ b/pom.xml
@@ -18,12 +18,12 @@
<parent>
<groupId>org.onap.oparent</groupId>
<artifactId>oparent</artifactId>
- <version>1.1.0</version>
+ <version>1.2.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<groupId>org.onap.vfc.nfvo.catalog</groupId>
<artifactId>vfc-nfvo-catalog</artifactId>
- <version>1.1.0-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>vfc-nfvo-catalog</name>
<description>vfc nfvo catalog</description>
diff --git a/version.properties b/version.properties
index e1751011..ad491ae4 100644
--- a/version.properties
+++ b/version.properties
@@ -17,7 +17,7 @@
# because they are used in Jenkins, whose plug-in doesn't support
major=1
-minor=1
+minor=2
patch=0
base_version=${major}.${minor}.${patch}