aboutsummaryrefslogtreecommitdiffstats
path: root/catalog
diff options
context:
space:
mode:
Diffstat (limited to 'catalog')
-rw-r--r--catalog/__init__.py16
-rw-r--r--catalog/jobs/__init__.py13
-rw-r--r--catalog/jobs/job_get.py46
-rw-r--r--catalog/jobs/tests/__init__.py13
-rw-r--r--catalog/jobs/tests/tests.py40
-rw-r--r--catalog/jobs/urls.py20
-rw-r--r--catalog/jobs/views.py124
-rw-r--r--catalog/log.yml54
-rw-r--r--catalog/middleware.py60
-rw-r--r--catalog/packages/__init__.py13
-rw-r--r--catalog/packages/biz/__init__.py13
-rw-r--r--catalog/packages/biz/common.py51
-rw-r--r--catalog/packages/biz/ns_descriptor.py239
-rw-r--r--catalog/packages/biz/nsdm_subscription.py218
-rw-r--r--catalog/packages/biz/pnf_descriptor.py226
-rw-r--r--catalog/packages/biz/sdc_ns_package.py172
-rw-r--r--catalog/packages/biz/sdc_service_package.py124
-rw-r--r--catalog/packages/biz/sdc_vnf_package.py254
-rw-r--r--catalog/packages/biz/service_descriptor.py129
-rw-r--r--catalog/packages/biz/vnf_package.py227
-rw-r--r--catalog/packages/biz/vnf_pkg_artifacts.py43
-rw-r--r--catalog/packages/biz/vnf_pkg_subscription.py190
-rw-r--r--catalog/packages/const.py78
-rw-r--r--catalog/packages/serializers/__init__.py13
-rw-r--r--catalog/packages/serializers/catalog_serializers.py442
-rw-r--r--catalog/packages/serializers/checksum.py30
-rw-r--r--catalog/packages/serializers/create_nsd_info_request.py29
-rw-r--r--catalog/packages/serializers/create_pnfd_info_request.py29
-rw-r--r--catalog/packages/serializers/create_vnf_pkg_info_req.py27
-rw-r--r--catalog/packages/serializers/link.py24
-rw-r--r--catalog/packages/serializers/nsd_info.py161
-rw-r--r--catalog/packages/serializers/nsd_infos.py20
-rw-r--r--catalog/packages/serializers/nsdm_filter_data.py177
-rw-r--r--catalog/packages/serializers/nsdm_subscription.py84
-rw-r--r--catalog/packages/serializers/pnfd_info.py107
-rw-r--r--catalog/packages/serializers/pnfd_infos.py20
-rw-r--r--catalog/packages/serializers/problem_details.py58
-rw-r--r--catalog/packages/serializers/response.py51
-rw-r--r--catalog/packages/serializers/subscription_auth_data.py77
-rw-r--r--catalog/packages/serializers/upload_vnf_pkg_from_uri_req.py36
-rw-r--r--catalog/packages/serializers/vnf_pkg_artifact_info.py39
-rw-r--r--catalog/packages/serializers/vnf_pkg_info.py127
-rw-r--r--catalog/packages/serializers/vnf_pkg_infos.py20
-rw-r--r--catalog/packages/serializers/vnf_pkg_notifications.py117
-rw-r--r--catalog/packages/serializers/vnf_pkg_software_image_info.py96
-rw-r--r--catalog/packages/serializers/vnf_pkg_subscription.py93
-rw-r--r--catalog/packages/tests/__init__.py13
-rw-r--r--catalog/packages/tests/const.py596
-rw-r--r--catalog/packages/tests/test_health_check.py50
-rw-r--r--catalog/packages/tests/test_ns_descriptor.py300
-rw-r--r--catalog/packages/tests/test_nsdm_subscription.py521
-rw-r--r--catalog/packages/tests/test_nspackage.py246
-rw-r--r--catalog/packages/tests/test_pnf_descriptor.py286
-rw-r--r--catalog/packages/tests/test_service_descriptor.py95
-rw-r--r--catalog/packages/tests/test_servicepackage.py481
-rw-r--r--catalog/packages/tests/test_vnf_package.py382
-rw-r--r--catalog/packages/tests/test_vnf_pkg_subscription.py183
-rw-r--r--catalog/packages/tests/test_vnfpackage.py258
-rw-r--r--catalog/packages/urls.py76
-rw-r--r--catalog/packages/views/__init__.py13
-rw-r--r--catalog/packages/views/catalog_views.py535
-rw-r--r--catalog/packages/views/common.py123
-rw-r--r--catalog/packages/views/health_check_views.py31
-rw-r--r--catalog/packages/views/ns_descriptor_views.py139
-rw-r--r--catalog/packages/views/nsdm_subscription_views.py127
-rw-r--r--catalog/packages/views/pnf_descriptor_views.py166
-rw-r--r--catalog/packages/views/vnf_package_artifact_views.py54
-rw-r--r--catalog/packages/views/vnf_package_subscription_views.py120
-rw-r--r--catalog/packages/views/vnf_package_views.py168
-rw-r--r--catalog/pub/__init__.py13
-rw-r--r--catalog/pub/config/__init__.py13
-rw-r--r--catalog/pub/config/config.py86
-rw-r--r--catalog/pub/database/__init__.py13
-rw-r--r--catalog/pub/database/admin.py361
-rw-r--r--catalog/pub/database/migrations/0001_initial.py229
-rw-r--r--catalog/pub/database/migrations/__init__.py13
-rw-r--r--catalog/pub/database/models.py234
-rw-r--r--catalog/pub/exceptions.py57
-rw-r--r--catalog/pub/msapi/__init__.py13
-rw-r--r--catalog/pub/msapi/extsys.py175
-rw-r--r--catalog/pub/msapi/sdc.py129
-rw-r--r--catalog/pub/redisco/__init__.py58
-rw-r--r--catalog/pub/redisco/containers.py116
-rw-r--r--catalog/pub/ssl/cert/foobar.crt20
-rw-r--r--catalog/pub/ssl/cert/foobar.csr18
-rw-r--r--catalog/pub/ssl/cert/foobar.key27
-rw-r--r--catalog/pub/utils/__init__.py13
-rw-r--r--catalog/pub/utils/fileutil.py78
-rw-r--r--catalog/pub/utils/idutil.py20
-rw-r--r--catalog/pub/utils/jobutil.py145
-rw-r--r--catalog/pub/utils/restcall.py114
-rw-r--r--catalog/pub/utils/syscomm.py19
-rw-r--r--catalog/pub/utils/tests.py221
-rw-r--r--catalog/pub/utils/timeutil.py19
-rw-r--r--catalog/pub/utils/toscaparser/__init__.py54
-rw-r--r--catalog/pub/utils/toscaparser/basemodel.py534
-rw-r--r--catalog/pub/utils/toscaparser/const.py30
-rw-r--r--catalog/pub/utils/toscaparser/dataentityext.py33
-rw-r--r--catalog/pub/utils/toscaparser/graph.py74
-rw-r--r--catalog/pub/utils/toscaparser/nsdmodel.py220
-rw-r--r--catalog/pub/utils/toscaparser/pnfmodel.py53
-rw-r--r--catalog/pub/utils/toscaparser/sdmodel.py93
-rw-r--r--catalog/pub/utils/toscaparser/servicemodel.py188
-rw-r--r--catalog/pub/utils/toscaparser/testdata/ns/ran.csarbin0 -> 3007 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/ns/service-vIMS.csarbin0 -> 47518 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/pnf/ran-du.csarbin0 -> 2688 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vSBC.csarbin0 -> 11516 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/infra.csarbin0 -> 15716 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbng.csarbin0 -> 15357 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbrgemu.csarbin0 -> 14527 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgmux.csarbin0 -> 14970 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgw.csarbin0 -> 15008 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/infra.csarbin0 -> 15432 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbng.csarbin0 -> 15410 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbrgemu.csarbin0 -> 14569 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgmux.csarbin0 -> 15023 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgw.csarbin0 -> 23182 bytes
-rw-r--r--catalog/pub/utils/toscaparser/tests.py101
-rw-r--r--catalog/pub/utils/toscaparser/vnfdmodel.py48
-rw-r--r--catalog/pub/utils/toscaparser/vnfdparser/__init__.py23
-rw-r--r--catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py300
-rw-r--r--catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py236
-rw-r--r--catalog/pub/utils/values.py33
-rw-r--r--catalog/samples/__init__.py13
-rw-r--r--catalog/samples/tests.py45
-rw-r--r--catalog/samples/urls.py22
-rw-r--r--catalog/samples/views.py66
-rw-r--r--catalog/settings.py197
-rw-r--r--catalog/swagger/__init__.py13
-rw-r--r--catalog/swagger/management/__init__.py13
-rw-r--r--catalog/swagger/management/commands/__init__.py13
-rw-r--r--catalog/swagger/management/commands/export_swagger.py36
-rw-r--r--catalog/swagger/tests.py28
-rw-r--r--catalog/swagger/urls.py43
-rw-r--r--catalog/swagger/vfc.catalog.swagger.json793
-rw-r--r--catalog/swagger/views.py28
-rw-r--r--catalog/urls.py37
-rw-r--r--catalog/wsgi.py21
138 files changed, 14795 insertions, 0 deletions
diff --git a/catalog/__init__.py b/catalog/__init__.py
new file mode 100644
index 0000000..68cf954
--- /dev/null
+++ b/catalog/__init__.py
@@ -0,0 +1,16 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import pymysql
+
+pymysql.install_as_MySQLdb()
diff --git a/catalog/jobs/__init__.py b/catalog/jobs/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/jobs/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/jobs/job_get.py b/catalog/jobs/job_get.py
new file mode 100644
index 0000000..32ee243
--- /dev/null
+++ b/catalog/jobs/job_get.py
@@ -0,0 +1,46 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from catalog.pub.utils.jobutil import JobUtil
+
+logger = logging.getLogger(__name__)
+
+
+class GetJobInfoService(object):
+ def __init__(self, job_id, response_id=0):
+ self.job_id = job_id
+ self.response_id = response_id if response_id else 0
+
+ def do_biz(self):
+ logger.debug("[getjob]job_id=%s, response_id=%s", self.job_id, self.response_id)
+ jobs = JobUtil.query_job_status(self.job_id, self.response_id)
+ if not jobs:
+ return {"jobId": self.job_id}
+ ret = {
+ "jobId": self.job_id,
+ "responseDescriptor": {
+ "status": jobs[0].status,
+ "progress": jobs[0].progress,
+ "statusDescription": jobs[0].descp,
+ "errorCode": jobs[0].errcode,
+ "responseId": jobs[0].indexid,
+ "responseHistoryList": [
+ {
+ "status": job.status,
+ "progress": job.progress,
+ "statusDescription": job.descp,
+ "errorCode": job.errcode,
+ "responseId": job.indexid} for job in jobs[1:]]}}
+ return ret
diff --git a/catalog/jobs/tests/__init__.py b/catalog/jobs/tests/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/jobs/tests/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/jobs/tests/tests.py b/catalog/jobs/tests/tests.py
new file mode 100644
index 0000000..460c854
--- /dev/null
+++ b/catalog/jobs/tests/tests.py
@@ -0,0 +1,40 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from django.test import TestCase, Client
+from rest_framework import status
+
+from catalog.pub.database.models import JobModel, JobStatusModel
+
+
+class JobsViewTest(TestCase):
+ def setUp(self):
+ self.job_id = 'test_job_id'
+ self.client = Client()
+
+ def tearDown(self):
+ JobModel.objects.all().delete()
+
+ def test_job_normal(self):
+ JobModel(jobid=self.job_id, jobtype='VNF', jobaction='INST', resid='1').save()
+ JobStatusModel(indexid=1, jobid=self.job_id, status='inst', errcode='0', progress=20, descp='inst').save()
+ response = self.client.get("/api/catalog/v1/jobs/%s" % self.job_id)
+ self.assertEqual(status.HTTP_200_OK, response.status_code)
+
+ def test_job_when_jobid_not_exist(self):
+ job_id = 'test_new_job_id'
+ JobModel(jobid=self.job_id, jobtype='VNF', jobaction='INST', resid='1').save()
+ JobStatusModel(indexid=1, jobid=self.job_id, status='inst', progress=20, descp='inst').save()
+ response = self.client.get("/api/catalog/v1/jobs/%s" % job_id)
+ self.assertIn('jobId', response.data)
+ self.assertNotIn('responseDescriptor', response.data)
diff --git a/catalog/jobs/urls.py b/catalog/jobs/urls.py
new file mode 100644
index 0000000..ea1fcd6
--- /dev/null
+++ b/catalog/jobs/urls.py
@@ -0,0 +1,20 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from django.conf.urls import url
+
+from catalog.jobs.views import JobView
+
+urlpatterns = [
+ url(r'^api/catalog/v1/jobs/(?P<job_id>[0-9a-zA-Z_-]+)$', JobView.as_view()),
+]
diff --git a/catalog/jobs/views.py b/catalog/jobs/views.py
new file mode 100644
index 0000000..123af00
--- /dev/null
+++ b/catalog/jobs/views.py
@@ -0,0 +1,124 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from drf_yasg import openapi
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+from catalog.jobs.job_get import GetJobInfoService
+from catalog.packages.serializers.catalog_serializers import GetJobResponseSerializer
+from catalog.packages.serializers.catalog_serializers import PostJobRequestSerializer
+from catalog.packages.serializers.catalog_serializers import PostJobResponseResultSerializer
+from catalog.pub.utils.jobutil import JobUtil
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+class JobView(APIView):
+
+ input_job_id = openapi.Parameter(
+ 'job_id',
+ openapi.IN_QUERY,
+ description="job id",
+ type=openapi.TYPE_STRING)
+ input_response_id = openapi.Parameter(
+ 'responseId',
+ openapi.IN_QUERY,
+ description="response id",
+ type=openapi.TYPE_STRING)
+
+ @swagger_auto_schema(
+ operation_description="Get job status",
+ manual_parameters=[input_job_id, input_response_id],
+ responses={
+ status.HTTP_200_OK: GetJobResponseSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: PostJobResponseResultSerializer()
+ })
+ def get(self, request, job_id):
+ response_id = ignore_case_get(request.META, 'responseId')
+ ret = GetJobInfoService(job_id, response_id).do_biz()
+ response_serializer = GetJobResponseSerializer(data=ret)
+ validataion_error = self.handleValidatonError(
+ response_serializer, False)
+ if validataion_error:
+ return validataion_error
+
+ return Response(
+ data=response_serializer.data,
+ status=status.HTTP_200_OK)
+
+ @swagger_auto_schema(
+ request_body=PostJobRequestSerializer(),
+ operation_description="Update job status",
+ manual_parameters=[input_job_id],
+ responses={
+ status.HTTP_202_ACCEPTED: PostJobResponseResultSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: PostJobResponseResultSerializer()
+ }
+ )
+ def post(self, request, job_id):
+ job_result_ok = {'result': 'ok'}
+
+ logger.debug("Enter JobView:post, %s, %s ", job_id, request.data)
+ jobs = JobUtil.query_job_status(job_id)
+ if len(jobs) > 0 and jobs[-1].errcode == '255':
+ return Response(data=job_result_ok)
+
+ request_serializer = PostJobRequestSerializer(data=request.data)
+ validataion_error = self.handleValidatonError(
+ request_serializer, True)
+ if not validataion_error:
+ return validataion_error
+
+ requestData = request_serializer.data
+ progress = ignore_case_get(requestData, "progress")
+ desc = ignore_case_get(requestData, "desc", '%s' % progress)
+ errcode = '0' if ignore_case_get(
+ requestData, 'errcode') in (
+ 'true', 'active') else '255'
+ logger.debug("errcode=%s", errcode)
+ JobUtil.add_job_status(job_id, progress, desc, error_code=errcode)
+
+ response_serializer = PostJobResponseResultSerializer(
+ data=job_result_ok)
+ validataion_error = self.handleValidatonError(
+ response_serializer, False)
+ if validataion_error:
+ return validataion_error
+
+ return Response(
+ data=response_serializer.data,
+ status=status.HTTP_202_ACCEPTED)
+
+ def handleValidatonError(self, base_serializer, is_request):
+ response = None
+
+ if not base_serializer.is_valid():
+ errormessage = base_serializer.errors
+ logger.error(errormessage)
+
+ if is_request:
+ message = 'Invalid request'
+ else:
+ message = 'Invalid response'
+ logger.error(message)
+
+ Response(
+ data={'result': message, 'msg': errormessage},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ return response
diff --git a/catalog/log.yml b/catalog/log.yml
new file mode 100644
index 0000000..5ac5fef
--- /dev/null
+++ b/catalog/log.yml
@@ -0,0 +1,54 @@
+version: 1
+disable_existing_loggers: False
+
+loggers:
+ catalog:
+ handlers: [cataloglocal_handler, catalog_handler]
+ level: "DEBUG"
+ propagate: False
+ django:
+ handlers: [django_handler]
+ level: "DEBUG"
+ propagate: False
+ tosca:
+ handlers: [cataloglocal_handler, catalog_handler]
+ level: "DEBUG"
+ propagate: False
+handlers:
+ cataloglocal_handler:
+ level: "DEBUG"
+ class:
+ "logging.handlers.RotatingFileHandler"
+ filename: "logs/runtime_catalog.log"
+ formatter:
+ "standard"
+ maxBytes: 52428800
+ backupCount: 10
+ catalog_handler:
+ level: "DEBUG"
+ class:
+ "logging.handlers.RotatingFileHandler"
+ filename: "/var/log/onap/modeling/etsicatalog/runtime_catalog.log"
+ formatter:
+ "mdcFormat"
+ maxBytes: 52428800
+ backupCount: 10
+ django_handler:
+ level: "DEBUG"
+ class:
+ "logging.handlers.RotatingFileHandler"
+ filename: "logs/django.log"
+ formatter:
+ "standard"
+ maxBytes: 52428800
+ backupCount: 10
+formatters:
+ standard:
+ format:
+ "%(asctime)s:[%(name)s]:[%(filename)s]-[%(lineno)d] [%(levelname)s]:%(message)s"
+ mdcFormat:
+ format:
+ "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s \t"
+ mdcfmt: "{requestID} {invocationID} {serviceName} {serviceIP}"
+ datefmt: "%Y-%m-%d %H:%M:%S"
+ (): onaplogging.mdcformatter.MDCFormatter
diff --git a/catalog/middleware.py b/catalog/middleware.py
new file mode 100644
index 0000000..cd6aa8b
--- /dev/null
+++ b/catalog/middleware.py
@@ -0,0 +1,60 @@
+# Copyright (c) 2017-2018 ZTE, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+
+import uuid
+from onaplogging.mdcContext import MDC
+
+from catalog.pub.config.config import FORWARDED_FOR_FIELDS, SERVICE_NAME
+
+
+class LogContextMiddleware(object):
+ # the last IP behind multiple proxies, if no exist proxies
+ # get local host ip.
+ def _getLastIp(self, request):
+
+ ip = ""
+ try:
+ for field in FORWARDED_FOR_FIELDS:
+ if field in request.META:
+ if ',' in request.META[field]:
+ parts = request.META[field].split(',')
+ ip = parts[-1].strip().split(":")[0]
+ else:
+ ip = request.META[field].split(":")[0]
+
+ if ip == "":
+ ip = request.META.get("HTTP_HOST").split(":")[0]
+
+ except Exception:
+ pass
+
+ return ip
+
+ def process_request(self, request):
+ # Fetch TRANSACTIONID Id and pass to plugin server
+ ReqeustID = request.META.get("HTTP_X_ONAP-RequestID", None)
+ if ReqeustID is None:
+ ReqeustID = uuid.uuid3(uuid.NAMESPACE_URL, SERVICE_NAME)
+ request.META["HTTP_X_ONAP-RequestID"] = ReqeustID
+ MDC.put("requestID", ReqeustID)
+ # generate the unique id
+ InovocationID = uuid.uuid3(uuid.NAMESPACE_DNS, SERVICE_NAME)
+ MDC.put("invocationID", InovocationID)
+ MDC.put("serviceName", SERVICE_NAME)
+ # access ip
+ MDC.put("serviceIP", self._getLastIp(request))
+
+ return None
+
+ def process_response(self, request, response):
+ MDC.clear()
+ return response
diff --git a/catalog/packages/__init__.py b/catalog/packages/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/packages/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/packages/biz/__init__.py b/catalog/packages/biz/__init__.py
new file mode 100644
index 0000000..342c2a8
--- /dev/null
+++ b/catalog/packages/biz/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/packages/biz/common.py b/catalog/packages/biz/common.py
new file mode 100644
index 0000000..ce77a41
--- /dev/null
+++ b/catalog/packages/biz/common.py
@@ -0,0 +1,51 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.utils import fileutil
+
+CHUNK_SIZE = 1024 * 8
+
+
+def save(remote_file, vnf_pkg_id):
+ local_file_name = remote_file.name
+ local_file_dir = os.path.join(CATALOG_ROOT_PATH, vnf_pkg_id)
+ local_file_name = os.path.join(local_file_dir, local_file_name)
+ if not os.path.exists(local_file_dir):
+ fileutil.make_dirs(local_file_dir)
+ with open(local_file_name, 'wb') as local_file:
+ for chunk in remote_file.chunks(chunk_size=CHUNK_SIZE):
+ local_file.write(chunk)
+ return local_file_name
+
+
+def read(file_path, start, end):
+ fp = open(file_path, 'rb')
+ fp.seek(start)
+ pos = start
+ while pos + CHUNK_SIZE < end:
+ yield fp.read(CHUNK_SIZE)
+ pos = fp.tell()
+ yield fp.read(end - pos)
+
+
+def parse_file_range(file_path, file_range):
+ start, end = 0, os.path.getsize(file_path)
+ if file_range:
+ [start, range_end] = file_range.split('-')
+ range_end = range_end.strip() if range_end.strip() else end
+ start, end = int(start.strip()), int(range_end)
+ return start, end
diff --git a/catalog/packages/biz/ns_descriptor.py b/catalog/packages/biz/ns_descriptor.py
new file mode 100644
index 0000000..f0e0572
--- /dev/null
+++ b/catalog/packages/biz/ns_descriptor.py
@@ -0,0 +1,239 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import json
+import logging
+import os
+import uuid
+
+from catalog.packages.biz.common import parse_file_range, read, save
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import NSPackageModel, PnfPackageModel, VnfPackageModel
+from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
+from catalog.pub.utils import fileutil, toscaparser
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+METADATA = "metadata"
+
+
+class NsDescriptor(object):
+
+ def __init__(self):
+ pass
+
+ def create(self, data, id=None):
+ logger.info('Start to create a NSD...')
+ user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+ data = {
+ 'id': id if id else str(uuid.uuid4()),
+ 'nsdOnboardingState': PKG_STATUS.CREATED,
+ 'nsdOperationalState': PKG_STATUS.DISABLED,
+ 'nsdUsageState': PKG_STATUS.NOT_IN_USE,
+ 'userDefinedData': user_defined_data,
+ '_links': None # TODO
+ }
+ NSPackageModel.objects.create(
+ nsPackageId=data['id'],
+ onboardingState=data['nsdOnboardingState'],
+ operationalState=data['nsdOperationalState'],
+ usageState=data['nsdUsageState'],
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ logger.info('A NSD(%s) has been created.' % data['id'])
+ return data
+
+ def query_multiple(self, nsdId=None):
+ if nsdId:
+ ns_pkgs = NSPackageModel.objects.filter(nsdId=nsdId)
+ else:
+ ns_pkgs = NSPackageModel.objects.all()
+ response_data = []
+ for ns_pkg in ns_pkgs:
+ data = self.fill_resp_data(ns_pkg)
+ response_data.append(data)
+ return response_data
+
+ def query_single(self, nsd_info_id):
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.error('NSD(%s) does not exist.' % nsd_info_id)
+ raise ResourceNotFoundException('NSD(%s) does not exist.' % nsd_info_id)
+ return self.fill_resp_data(ns_pkgs[0])
+
+ def delete_single(self, nsd_info_id):
+ logger.info('Start to delete NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.info('NSD(%s) has been deleted.' % nsd_info_id)
+ return
+ '''
+ if ns_pkgs[0].operationalState != PKG_STATUS.DISABLED:
+ logger.error('NSD(%s) shall be DISABLED.' % nsd_info_id)
+ raise CatalogException('NSD(%s) shall be DISABLED.' % nsd_info_id)
+ if ns_pkgs[0].usageState != PKG_STATUS.NOT_IN_USE:
+ logger.error('NSD(%s) shall be NOT_IN_USE.' % nsd_info_id)
+ raise CatalogException('NSD(%s) shall be NOT_IN_USE.' % nsd_info_id)
+ '''
+ ns_pkgs.delete()
+ ns_pkg_path = os.path.join(CATALOG_ROOT_PATH, nsd_info_id)
+ fileutil.delete_dirs(ns_pkg_path)
+ logger.info('NSD(%s) has been deleted.' % nsd_info_id)
+
+ def upload(self, nsd_info_id, remote_file):
+ logger.info('Start to upload NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.error('NSD(%s) does not exist.' % nsd_info_id)
+ raise CatalogException('NSD(%s) does not exist.' % nsd_info_id)
+ ns_pkgs.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ local_file_name = save(remote_file, nsd_info_id)
+ logger.info('NSD(%s) content has been uploaded.' % nsd_info_id)
+ return local_file_name
+
+ def download(self, nsd_info_id, file_range):
+ logger.info('Start to download NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ if not ns_pkgs.exists():
+ logger.error('NSD(%s) does not exist.' % nsd_info_id)
+ raise ResourceNotFoundException('NSD(%s) does not exist.' % nsd_info_id)
+ if ns_pkgs[0].onboardingState != PKG_STATUS.ONBOARDED:
+ logger.error('NSD(%s) is not ONBOARDED.' % nsd_info_id)
+ raise CatalogException('NSD(%s) is not ONBOARDED.' % nsd_info_id)
+
+ local_file_path = ns_pkgs[0].localFilePath
+ start, end = parse_file_range(local_file_path, file_range)
+ logger.info('NSD(%s) has been downloaded.' % nsd_info_id)
+ return read(local_file_path, start, end)
+
+ def parse_nsd_and_save(self, nsd_info_id, local_file_name):
+ logger.info('Start to process NSD(%s)...' % nsd_info_id)
+ ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ ns_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+
+ nsd_json = toscaparser.parse_nsd(local_file_name)
+ logger.debug("%s", nsd_json)
+ nsd = json.JSONDecoder().decode(nsd_json)
+
+ nsd_id = nsd.get("ns", {}).get("properties", {}).get("descriptor_id", "")
+ nsd_name = nsd.get("ns", {}).get("properties", {}).get("name", "")
+ nsd_version = nsd.get("ns", {}).get("properties", {}).get("version", "")
+ nsd_designer = nsd.get("ns", {}).get("properties", {}).get("designer", "")
+ invariant_id = nsd.get("ns", {}).get("properties", {}).get("invariant_id", "")
+ if nsd_id == "":
+ raise CatalogException("nsd_id(%s) does not exist in metadata." % nsd_id)
+ other_nspkg = NSPackageModel.objects.filter(nsdId=nsd_id)
+ if other_nspkg and other_nspkg[0].nsPackageId != nsd_info_id:
+ logger.warn("NSD(%s,%s) already exists.", nsd_id, other_nspkg[0].nsPackageId)
+ raise CatalogException("NSD(%s) already exists." % nsd_id)
+
+ for vnf in nsd["vnfs"]:
+ vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+ if vnfd_id == "undefined":
+ vnfd_id = vnf["properties"].get("id", "undefined")
+ pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if not pkg:
+ pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+ if not pkg:
+ vnfd_name = vnf.get("vnf_id", "undefined")
+ logger.error("[%s] is not distributed.", vnfd_name)
+ raise CatalogException("VNF package(%s) is not distributed." % vnfd_id)
+
+ for pnf in nsd["pnfs"]:
+ pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+ if pnfd_id == "undefined":
+ pnfd_id = pnf["properties"].get("id", "undefined")
+ pkg = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if not pkg:
+ pkg = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+ if not pkg:
+ pnfd_name = pnf.get("pnf_id", "undefined")
+ logger.error("[%s] is not distributed.", pnfd_name)
+ raise CatalogException("PNF package(%s) is not distributed." % pnfd_name)
+
+ ns_pkgs.update(
+ nsdId=nsd_id,
+ nsdName=nsd_name,
+ nsdDesginer=nsd_designer,
+ nsdDescription=nsd.get("description", ""),
+ nsdVersion=nsd_version,
+ invariantId=invariant_id,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ nsPackageUri=local_file_name,
+ sdcCsarId=nsd_info_id,
+ localFilePath=local_file_name,
+ nsdModel=nsd_json
+ )
+ logger.info('NSD(%s) has been processed.' % nsd_info_id)
+
+ def fill_resp_data(self, ns_pkg):
+ data = {
+ 'id': ns_pkg.nsPackageId,
+ 'nsdId': ns_pkg.nsdId,
+ 'nsdName': ns_pkg.nsdName,
+ 'nsdVersion': ns_pkg.nsdVersion,
+ 'nsdDesigner': ns_pkg.nsdDesginer,
+ 'nsdInvariantId': ns_pkg.invariantId,
+ 'vnfPkgIds': [],
+ 'pnfdInfoIds': [], # TODO
+ 'nestedNsdInfoIds': [], # TODO
+ 'nsdOnboardingState': ns_pkg.onboardingState,
+ 'onboardingFailureDetails': None, # TODO
+ 'nsdOperationalState': ns_pkg.operationalState,
+ 'nsdUsageState': ns_pkg.usageState,
+ 'userDefinedData': {},
+ '_links': None # TODO
+ }
+
+ if ns_pkg.nsdModel:
+ nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+ vnf_pkg_ids = []
+ for vnf in nsd_model['vnfs']:
+ vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+ if vnfd_id == "undefined":
+ vnfd_id = vnf["properties"].get("id", "undefined")
+ pkgs = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if not pkgs:
+ pkgs = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+ for pkg in pkgs:
+ vnf_pkg_ids.append(pkg.vnfPackageId)
+ data['vnfPkgIds'] = vnf_pkg_ids
+
+ pnf_info_ids = []
+ for pnf in nsd_model['pnfs']:
+ pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+ if pnfd_id == "undefined":
+ pnfd_id = pnf["properties"].get("id", "undefined")
+ pkgs = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if not pkgs:
+ pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+ for pkg in pkgs:
+ pnf_info_ids.append(pkg.pnfPackageId)
+ data['pnfInfoIds'] = pnf_info_ids # TODO: need reconfirming
+
+ if ns_pkg.userDefinedData:
+ user_defined_data = json.JSONDecoder().decode(ns_pkg.userDefinedData)
+ data['userDefinedData'] = user_defined_data
+
+ return data
+
+ def handle_upload_failed(self, nsd_info_id):
+ ns_pkg = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+ ns_pkg.update(onboardingState=PKG_STATUS.CREATED)
diff --git a/catalog/packages/biz/nsdm_subscription.py b/catalog/packages/biz/nsdm_subscription.py
new file mode 100644
index 0000000..e2af6e4
--- /dev/null
+++ b/catalog/packages/biz/nsdm_subscription.py
@@ -0,0 +1,218 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ast
+import json
+import logging
+import requests
+import uuid
+
+from collections import Counter
+
+from rest_framework import status
+
+from catalog.packages import const
+from catalog.pub.database.models import NsdmSubscriptionModel
+from catalog.pub.exceptions import CatalogException, \
+ ResourceNotFoundException, \
+ NsdmBadRequestException, NsdmDuplicateSubscriptionException
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+PARAMSBASICKEYS = ["userName", "password"]
+
+PARAMSOAUTH2CLIENTCREDENTIALSKEYS = ["clientId", "clientPassword",
+ "tokenEndpoint"]
+
+
+def is_filter_type_equal(new_filter, existing_filter):
+ return Counter(list(set(new_filter))) == Counter(existing_filter)
+
+
+class NsdmSubscription:
+
+ def __init__(self):
+ pass
+
+ def query_single_subscription(self, subscription_id):
+ logger.debug("Start Query Subscription... ")
+ subscription = \
+ NsdmSubscriptionModel.objects.filter(
+ subscriptionid=subscription_id)
+ if not subscription.exists():
+ raise ResourceNotFoundException(
+ "Subscription(%s) doesn't exists" % subscription_id)
+ logger.debug("Subscription found... ")
+ return self.fill_resp_data(subscription[0])
+
+ def delete_single_subscription(self, subscription_id):
+ logger.debug("Start Delete Subscription... ")
+ subscription = \
+ NsdmSubscriptionModel.objects.filter(
+ subscriptionid=subscription_id)
+ if not subscription.exists():
+ raise ResourceNotFoundException(
+ "Subscription(%s) doesn't exists" % subscription_id)
+ subscription.delete()
+ logger.debug("Deleted Subscription... ")
+
+ def query_multi_subscriptions(self, query_params):
+ self.params = query_params
+ query_data = {}
+ logger.debug("Start QueryMultiSubscriptions get --> "
+ "Check for filters in query params" % self.params)
+ for query, value in list(self.params.items()):
+ if query in const.NSDM_NOTIFICATION_FILTERS and value:
+ query_data[query + '__icontains'] = json.dumps(list(set(value)))
+ # Query the database with filters if the request
+ # has fields in request params, else fetch all records
+ if query_data:
+ subscriptions = NsdmSubscriptionModel.objects.filter(**query_data)
+ else:
+ subscriptions = NsdmSubscriptionModel.objects.all()
+ if not subscriptions.exists():
+ raise ResourceNotFoundException("Subscriptions doesn't exist")
+ return [self.fill_resp_data(subscription)
+ for subscription in subscriptions]
+
+ def check_callbackuri_connection(self):
+ logger.debug("Create Subscription --> Test Callback URI --"
+ "Sending GET request to %s" % self.callback_uri)
+ try:
+ response = requests.get(self.callback_uri, timeout=2)
+ if response.status_code != status.HTTP_204_NO_CONTENT:
+ raise CatalogException("callbackUri %s returns %s status "
+ "code." % (self.callback_uri,
+ response.status_code))
+ except Exception:
+ raise CatalogException("callbackUri %s didn't return 204 status"
+ "code." % self.callback_uri)
+
+ def fill_resp_data(self, subscription):
+ subscription_filter = dict()
+ for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+ if subscription.__dict__[filter_type]:
+ subscription_filter[filter_type] = \
+ ast.literal_eval(subscription.__dict__[filter_type])
+ resp_data = {
+ 'id': subscription.subscriptionid,
+ 'callbackUri': subscription.callback_uri,
+ 'filter': subscription_filter,
+ '_links': json.loads(subscription.links)
+ }
+ return resp_data
+
+ def create(self, data):
+ logger.debug("Start Create Subscription... ")
+ self.filter = ignore_case_get(data, "filter", {})
+ self.callback_uri = ignore_case_get(data, "callbackUri")
+ self.authentication = ignore_case_get(data, "authentication", {})
+ self.subscription_id = str(uuid.uuid4())
+ self.check_callbackuri_connection()
+ self.check_valid_auth_info()
+ self.check_filter_types()
+ self.check_valid()
+ self.save_db()
+ subscription = \
+ NsdmSubscriptionModel.objects.get(
+ subscriptionid=self.subscription_id)
+ return self.fill_resp_data(subscription)
+
+ def check_filter_types(self):
+ # Check if both nsdId and nsdInfoId
+ # or pnfdId and pnfdInfoId are present
+ logger.debug("Create Subscription --> Validating Filters... ")
+ if self.filter and \
+ self.filter.get("nsdId", "") and \
+ self.filter.get("nsdInfoId", ""):
+ raise NsdmBadRequestException("Notification Filter should contain"
+ " either nsdId or nsdInfoId")
+ if self.filter and \
+ self.filter.get("pnfdId", "") and \
+ self.filter.get("pnfdInfoIds", ""):
+ raise NsdmBadRequestException("Notification Filter should contain"
+ " either pnfdId or pnfdInfoIds")
+
+ def check_valid_auth_info(self):
+ logger.debug("Create Subscription --> Validating Auth "
+ "details if provided... ")
+ if self.authentication.get("paramsBasic", {}) and \
+ const.BASIC not in self.authentication.get("authType", ''):
+ raise NsdmBadRequestException('Auth type should be ' + const.BASIC)
+ if self.authentication.get("paramsOauth2ClientCredentials", {}) and \
+ const.OAUTH2_CLIENT_CREDENTIALS not in \
+ self.authentication.get("authType", ''):
+ raise NsdmBadRequestException('Auth type should '
+ 'be ' + const.OAUTH2_CLIENT_CREDENTIALS)
+ if const.BASIC in self.authentication.get("authType", '') and \
+ "paramsBasic" in list(self.authentication.keys()) and \
+ not is_filter_type_equal(PARAMSBASICKEYS, list(
+ self.authentication.get("paramsBasic").keys())):
+ raise NsdmBadRequestException('userName and password needed '
+ 'for ' + const.BASIC)
+ if const.OAUTH2_CLIENT_CREDENTIALS in \
+ self.authentication.get("authType", '') and \
+ "paramsOauth2ClientCredentials" in \
+ list(self.authentication.keys()) and \
+ not is_filter_type_equal(PARAMSOAUTH2CLIENTCREDENTIALSKEYS, list(
+ self.authentication.get("paramsOauth2ClientCredentials").keys())):
+ raise NsdmBadRequestException('clientId, clientPassword and '
+ 'tokenEndpoint required '
+ 'for ' + const.OAUTH2_CLIENT_CREDENTIALS)
+
+ def check_filter_exists(self, subscription):
+ for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+ if not is_filter_type_equal(self.filter.get(filter_type, []),
+ ast.literal_eval(
+ getattr(subscription,
+ filter_type))):
+ return False
+ return True
+
+ def check_valid(self):
+ logger.debug("Create Subscription --> Checking DB if "
+ "same subscription exists already exists... ")
+ subscriptions = \
+ NsdmSubscriptionModel.objects.filter(
+ callback_uri=self.callback_uri)
+ if not subscriptions.exists():
+ return
+ for subscription in subscriptions:
+ if self.check_filter_exists(subscription):
+ raise NsdmDuplicateSubscriptionException(
+ "Already Subscription exists with the "
+ "same callbackUri and filter")
+
+ def save_db(self):
+ logger.debug("Create Subscription --> Saving the subscription "
+ "%s to the database" % self.subscription_id)
+ links = {
+ "self": {
+ "href":
+ const.NSDM_SUBSCRIPTION_ROOT_URI + self.subscription_id
+ }
+ }
+ subscription_save_db = {
+ "subscriptionid": self.subscription_id,
+ "callback_uri": self.callback_uri,
+ "auth_info": self.authentication,
+ "links": json.dumps(links)
+ }
+ for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+ if self.filter:
+ subscription_save_db[filter_type] = json.dumps(
+ list(set(self.filter.get(filter_type, []))))
+ NsdmSubscriptionModel.objects.create(**subscription_save_db)
+ logger.debug('Create Subscription[%s] success', self.subscription_id)
diff --git a/catalog/packages/biz/pnf_descriptor.py b/catalog/packages/biz/pnf_descriptor.py
new file mode 100644
index 0000000..547c198
--- /dev/null
+++ b/catalog/packages/biz/pnf_descriptor.py
@@ -0,0 +1,226 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import json
+import logging
+import os
+import uuid
+
+from catalog.packages.biz.common import read, save
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import NSPackageModel, PnfPackageModel
+from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
+from catalog.pub.utils import fileutil, toscaparser
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+class PnfDescriptor(object):
+
+ def __init__(self):
+ pass
+
+ def create(self, data):
+ logger.info('Start to create a PNFD...')
+ user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+ data = {
+ 'id': str(uuid.uuid4()),
+ 'pnfdOnboardingState': PKG_STATUS.CREATED,
+ 'pnfdUsageState': PKG_STATUS.NOT_IN_USE,
+ 'userDefinedData': user_defined_data,
+ '_links': None # TODO
+ }
+ PnfPackageModel.objects.create(
+ pnfPackageId=data['id'],
+ onboardingState=data['pnfdOnboardingState'],
+ usageState=data['pnfdUsageState'],
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ logger.info('A PNFD(%s) has been created.' % data['id'])
+ return data
+
+ def query_multiple(self, request):
+ pnfdId = request.query_params.get('pnfdId')
+ if pnfdId:
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfdId=pnfdId)
+ else:
+ pnf_pkgs = PnfPackageModel.objects.all()
+ response_data = []
+ for pnf_pkg in pnf_pkgs:
+ data = self.fill_response_data(pnf_pkg)
+ response_data.append(data)
+ return response_data
+
+ def query_single(self, pnfd_info_id):
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
+ raise ResourceNotFoundException('PNFD(%s) does not exist.' % pnfd_info_id)
+ return self.fill_response_data(pnf_pkgs[0])
+
+ def upload(self, remote_file, pnfd_info_id):
+ logger.info('Start to upload PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.info('PNFD(%s) does not exist.' % pnfd_info_id)
+ raise CatalogException('PNFD (%s) does not exist.' % pnfd_info_id)
+ pnf_pkgs.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ local_file_name = save(remote_file, pnfd_info_id)
+ logger.info('PNFD(%s) content has been uploaded.' % pnfd_info_id)
+ return local_file_name
+
+ def delete_single(self, pnfd_info_id):
+ logger.info('Start to delete PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.info('PNFD(%s) has been deleted.' % pnfd_info_id)
+ return
+ '''
+ if pnf_pkgs[0].usageState != PKG_STATUS.NOT_IN_USE:
+ logger.info('PNFD(%s) shall be NOT_IN_USE.' % pnfd_info_id)
+ raise CatalogException('PNFD(%s) shall be NOT_IN_USE.' % pnfd_info_id)
+ '''
+ del_pnfd_id = pnf_pkgs[0].pnfdId
+ ns_pkgs = NSPackageModel.objects.all()
+ for ns_pkg in ns_pkgs:
+ nsd_model = None
+ if ns_pkg.nsdModel:
+ nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+ if not nsd_model:
+ continue
+ for pnf in nsd_model['pnfs']:
+ if del_pnfd_id == pnf["properties"]["id"]:
+ logger.warn("PNFD(%s) is referenced in NSD", del_pnfd_id)
+ raise CatalogException('PNFD(%s) is referenced.' % pnfd_info_id)
+ pnf_pkgs.delete()
+ pnf_pkg_path = os.path.join(CATALOG_ROOT_PATH, pnfd_info_id)
+ fileutil.delete_dirs(pnf_pkg_path)
+ logger.debug('PNFD(%s) has been deleted.' % pnfd_info_id)
+
+ def download(self, pnfd_info_id):
+ logger.info('Start to download PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ if not pnf_pkgs.exists():
+ logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
+ raise ResourceNotFoundException('PNFD(%s) does not exist.' % pnfd_info_id)
+ if pnf_pkgs[0].onboardingState != PKG_STATUS.ONBOARDED:
+ logger.error('PNFD(%s) is not ONBOARDED.' % pnfd_info_id)
+ raise CatalogException('PNFD(%s) is not ONBOARDED.' % pnfd_info_id)
+
+ local_file_path = pnf_pkgs[0].localFilePath
+ start, end = 0, os.path.getsize(local_file_path)
+ logger.info('PNFD(%s) has been downloaded.' % pnfd_info_id)
+ return read(local_file_path, start, end)
+
+ def parse_pnfd_and_save(self, pnfd_info_id, local_file_name):
+ logger.info('Start to process PNFD(%s)...' % pnfd_info_id)
+ pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+ pnf_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+ pnfd_json = toscaparser.parse_pnfd(local_file_name)
+ pnfd = json.JSONDecoder().decode(pnfd_json)
+
+ logger.debug("pnfd_json is %s" % pnfd_json)
+ pnfd_id = ""
+ pnfdVersion = ""
+ pnfdProvider = ""
+ pnfdName = ""
+ if pnfd.get("pnf", "") != "":
+ if pnfd["pnf"].get("properties", "") != "":
+ pnfd_id = pnfd["pnf"].get("properties", {}).get("descriptor_id", "")
+ pnfdVersion = pnfd["pnf"].get("properties", {}).get("version", "")
+ pnfdProvider = pnfd["pnf"].get("properties", {}).get("provider", "")
+ pnfdName = pnfd["pnf"].get("properties", {}).get("name", "")
+ if pnfd_id == "":
+ pnfd_id = pnfd["metadata"].get("descriptor_id", "")
+ if pnfd_id == "":
+ pnfd_id = pnfd["metadata"].get("id", "")
+ if pnfd_id == "":
+ pnfd_id = pnfd["metadata"].get("UUID", "")
+ if pnfd_id == "":
+ raise CatalogException('pnfd_id is Null.')
+
+ if pnfdVersion == "":
+ pnfdVersion = pnfd["metadata"].get("template_version", "")
+ if pnfdVersion == "":
+ pnfdVersion = pnfd["metadata"].get("version", "")
+
+ if pnfdProvider == "":
+ pnfdProvider = pnfd["metadata"].get("template_author", "")
+ if pnfdVersion == "":
+ pnfdVersion = pnfd["metadata"].get("provider", "")
+
+ if pnfdName == "":
+ pnfdName = pnfd["metadata"].get("template_name", "")
+ if pnfdVersion == "":
+ pnfdName = pnfd["metadata"].get("name", "")
+
+ other_pnf = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if other_pnf and other_pnf[0].pnfPackageId != pnfd_info_id:
+ logger.info('PNFD(%s) already exists.' % pnfd_id)
+ raise CatalogException("PNFD(%s) already exists." % pnfd_id)
+
+ pnf_pkgs.update(
+ pnfdId=pnfd_id,
+ pnfdName=pnfdName,
+ pnfdVersion=pnfdVersion,
+ pnfVendor=pnfdProvider,
+ pnfPackageUri=local_file_name,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ localFilePath=local_file_name,
+ pnfdModel=pnfd_json
+ )
+ logger.info('PNFD(%s) has been processed.' % pnfd_info_id)
+
+ def fill_response_data(self, pnf_pkg):
+ data = {
+ 'id': pnf_pkg.pnfPackageId,
+ 'pnfdId': pnf_pkg.pnfdId,
+ 'pnfdName': pnf_pkg.pnfdName,
+ 'pnfdVersion': pnf_pkg.pnfdVersion,
+ 'pnfdProvider': pnf_pkg.pnfVendor,
+ 'pnfdInvariantId': None, # TODO
+ 'pnfdOnboardingState': pnf_pkg.onboardingState,
+ 'onboardingFailureDetails': None, # TODO
+ 'pnfdUsageState': pnf_pkg.usageState,
+ 'userDefinedData': {},
+ '_links': None # TODO
+ }
+ if pnf_pkg.userDefinedData:
+ user_defined_data = json.JSONDecoder().decode(pnf_pkg.userDefinedData)
+ data['userDefinedData'] = user_defined_data
+
+ return data
+
+ def handle_upload_failed(self, pnf_pkg_id):
+ pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId=pnf_pkg_id)
+ pnf_pkg.update(onboardingState=PKG_STATUS.CREATED)
+
+ def parse_pnfd(self, csar_id, inputs):
+ try:
+ pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId=csar_id)
+ if not pnf_pkg:
+ raise CatalogException("PNF CSAR(%s) does not exist." % csar_id)
+ csar_path = pnf_pkg[0].localFilePath
+ ret = {"model": toscaparser.parse_pnfd(csar_path, inputs)}
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ return [1, e.args[0]]
+ return [0, ret]
diff --git a/catalog/packages/biz/sdc_ns_package.py b/catalog/packages/biz/sdc_ns_package.py
new file mode 100644
index 0000000..4f9d460
--- /dev/null
+++ b/catalog/packages/biz/sdc_ns_package.py
@@ -0,0 +1,172 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import sys
+import traceback
+
+from catalog.pub.config.config import CATALOG_ROOT_PATH, CATALOG_URL_PATH
+from catalog.pub.config.config import REG_TO_MSB_REG_PARAM
+from catalog.pub.database.models import NSPackageModel
+from catalog.pub.exceptions import CatalogException
+from catalog.pub.msapi import sdc
+from catalog.pub.utils import toscaparser
+from catalog.packages.biz.ns_descriptor import NsDescriptor
+from catalog.pub.utils import fileutil
+
+logger = logging.getLogger(__name__)
+
+STATUS_SUCCESS, STATUS_FAILED = "success", "failed"
+
+METADATA = "metadata"
+
+
+def fmt_ns_pkg_rsp(status, desc, error_code="500"):
+ return [0, {"status": status, "statusDescription": desc, "errorCode": error_code}]
+
+
+def ns_on_distribute(csar_id):
+ ret = None
+ try:
+ ret = NsPackage().on_distribute(csar_id)
+ except CatalogException as e:
+ NsPackage().delete_csar(csar_id)
+ return fmt_ns_pkg_rsp(STATUS_FAILED, e.args[0])
+ except:
+ logger.error(traceback.format_exc())
+ NsPackage().delete_csar(csar_id)
+ return fmt_ns_pkg_rsp(STATUS_FAILED, str(sys.exc_info()))
+ if ret[0]:
+ return fmt_ns_pkg_rsp(STATUS_FAILED, ret[1])
+ return fmt_ns_pkg_rsp(STATUS_SUCCESS, ret[1], "")
+
+
+def ns_delete_csar(csar_id):
+ ret = None
+ try:
+ ret = NsPackage().delete_csar(csar_id)
+ except CatalogException as e:
+ return fmt_ns_pkg_rsp(STATUS_FAILED, e.args[0])
+ except:
+ logger.error(traceback.format_exc())
+ return fmt_ns_pkg_rsp(STATUS_FAILED, str(sys.exc_info()))
+ return fmt_ns_pkg_rsp(STATUS_SUCCESS, ret[1], "")
+
+
+def ns_get_csars():
+ ret = None
+ try:
+ ret = NsPackage().get_csars()
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except:
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def ns_get_csar(csar_id):
+ ret = None
+ try:
+ ret = NsPackage().get_csar(csar_id)
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def parse_nsd(csar_id, inputs):
+ ret = None
+ try:
+ ns_pkg = NSPackageModel.objects.filter(nsPackageId=csar_id)
+ if not ns_pkg:
+ raise CatalogException("NS CSAR(%s) does not exist." % csar_id)
+ csar_path = ns_pkg[0].localFilePath
+ ret = {"model": toscaparser.parse_nsd(csar_path, inputs)}
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return [0, ret]
+
+
+class NsPackage(object):
+ """
+ Actions for sdc ns package.
+ """
+
+ def __init__(self):
+ pass
+
+ def on_distribute(self, csar_id):
+ if NSPackageModel.objects.filter(nsPackageId=csar_id):
+ return [1, "NS CSAR(%s) already exists." % csar_id]
+
+ artifact = sdc.get_artifact(sdc.ASSETTYPE_SERVICES, csar_id)
+ local_path = os.path.join(CATALOG_ROOT_PATH, csar_id)
+ csar_name = "%s.csar" % artifact.get("name", csar_id)
+ local_file_name = sdc.download_artifacts(artifact["toscaModelURL"], local_path, csar_name)
+ if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+ artifact_vnf_file = fileutil.unzip_file(local_file_name, local_path, "Artifacts/Deployment/OTHER/ns.csar")
+ if os.path.exists(artifact_vnf_file):
+ local_file_name = artifact_vnf_file
+
+ data = {
+ 'userDefinedData': {}
+ }
+ nsd = NsDescriptor()
+ nsd.create(data, csar_id)
+ nsd.parse_nsd_and_save(csar_id, local_file_name)
+ return [0, "CSAR(%s) distributed successfully." % csar_id]
+
+ def delete_csar(self, csar_id):
+ nsd = NsDescriptor()
+ nsd.delete_single(csar_id)
+ return [0, "Delete CSAR(%s) successfully." % csar_id]
+
+ def get_csars(self):
+ csars = []
+ nss = NSPackageModel.objects.filter()
+ for ns in nss:
+ ret = self.get_csar(ns.nsPackageId)
+ csars.append(ret[1])
+ return [0, csars]
+
+ def get_csar(self, csar_id):
+ package_info = {}
+ csars = NSPackageModel.objects.filter(nsPackageId=csar_id)
+ if csars:
+ package_info["nsdId"] = csars[0].nsdId
+ package_info["nsPackageId"] = csars[0].nsPackageId
+ package_info["nsdProvider"] = csars[0].nsdDesginer
+ package_info["nsdVersion"] = csars[0].nsdVersion
+ package_info["csarName"] = csars[0].nsPackageUri
+ package_info["nsdModel"] = csars[0].nsdModel
+ package_info["nsdInvariantId"] = csars[0].invariantId
+ package_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+ CATALOG_URL_PATH,
+ csar_id,
+ csars[0].nsPackageUri)
+ else:
+ raise CatalogException("Ns package[%s] not Found." % csar_id)
+
+ return [0, {"csarId": csar_id, "packageInfo": package_info}]
diff --git a/catalog/packages/biz/sdc_service_package.py b/catalog/packages/biz/sdc_service_package.py
new file mode 100644
index 0000000..d0db6bf
--- /dev/null
+++ b/catalog/packages/biz/sdc_service_package.py
@@ -0,0 +1,124 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import traceback
+
+from coverage.xmlreport import os
+
+from catalog.packages.biz.service_descriptor import ServiceDescriptor
+from catalog.pub.config.config import CATALOG_ROOT_PATH, REG_TO_MSB_REG_PARAM, CATALOG_URL_PATH
+from catalog.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.exceptions import CatalogException, PackageNotFoundException, \
+ PackageHasExistsException
+from catalog.pub.msapi import sdc
+from catalog.pub.utils import fileutil, toscaparser
+
+logger = logging.getLogger(__name__)
+
+
+class ServicePackage(object):
+ """
+ Actions for sdc service package.
+ """
+
+ def __init__(self):
+ pass
+
+ def on_distribute(self, csar_id):
+ if ServicePackageModel.objects.filter(servicePackageId=csar_id):
+ raise PackageHasExistsException("Service CSAR(%s) already exists." % csar_id)
+
+ try:
+ service = sdc.get_asset(sdc.ASSETTYPE_SERVICES, csar_id)
+ # check if the related resources exist
+ resources = service.get('resources', None)
+ if resources:
+ for resource in resources:
+ if not VnfPackageModel.objects.filter(vnfPackageId=resource['resourceUUID']) and \
+ not PnfPackageModel.objects.filter(pnfPackageId=resource['resourceUUID']):
+ logger.error("Resource [%s] is not distributed.", resource['resourceUUID'])
+ raise CatalogException("Resource (%s) is not distributed." % resource['resourceUUID'])
+
+ # download csar package
+ local_path = os.path.join(CATALOG_ROOT_PATH, csar_id)
+ csar_name = "%s.csar" % service.get("name", csar_id)
+ local_file_name = sdc.download_artifacts(service["toscaModelURL"], local_path, csar_name)
+ if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+ fileutil.unzip_file(local_file_name, local_path, "")
+ data = {
+ 'userDefinedData': {}
+ }
+ serviced = ServiceDescriptor()
+ serviced.create(data, csar_id)
+ serviced.parse_serviced_and_save(csar_id, local_file_name)
+
+ except Exception as e:
+ logger.error(traceback.format_exc())
+ if ServicePackageModel.objects.filter(servicePackageId=csar_id):
+ ServicePackage().delete_csar(csar_id)
+ raise e
+
+ def delete_csar(self, csar_id):
+ serviced = ServiceDescriptor()
+ serviced.delete_single(csar_id)
+
+ def get_csars(self):
+ csars = []
+ packages = ServicePackageModel.objects.filter()
+ for package in packages:
+ csar = self.get_csar(package.servicePackageId)
+ csars.append(csar)
+ return csars
+
+ def get_csar(self, csar_id):
+ package_info = {}
+ csars = ServicePackageModel.objects.filter(servicePackageId=csar_id)
+ if csars:
+ package_info["servicedId"] = csars[0].servicedId
+ package_info["servicePackageId"] = csars[0].servicePackageId
+ package_info["servicedProvider"] = csars[0].servicedDesigner
+ package_info["servicedVersion"] = csars[0].servicedVersion
+ package_info["csarName"] = csars[0].servicePackageUri
+ package_info["servicedModel"] = csars[0].servicedModel
+ package_info["servicedInvariantId"] = csars[0].invariantId
+ package_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+ CATALOG_URL_PATH,
+ csar_id,
+ csars[0].servicePackageUri)
+ else:
+ error_message = "Service package[%s] not Found." % csar_id
+ logger.error(error_message)
+ raise PackageNotFoundException(error_message)
+
+ return {"csarId": csar_id, "packageInfo": package_info}
+
+ def parse_serviced(self, csar_id, inputs):
+ service_pkg = ServicePackageModel.objects.filter(servicePackageId=csar_id)
+ if not service_pkg:
+ raise PackageNotFoundException("Service CSAR(%s) does not exist." % csar_id)
+
+ try:
+ csar_path = service_pkg[0].localFilePath
+ ret = {"model": toscaparser.parse_sd(csar_path, inputs)}
+ return ret
+ except CatalogException as e:
+ logger.error(e.args[0])
+ raise e
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ raise e
diff --git a/catalog/packages/biz/sdc_vnf_package.py b/catalog/packages/biz/sdc_vnf_package.py
new file mode 100644
index 0000000..571c3bb
--- /dev/null
+++ b/catalog/packages/biz/sdc_vnf_package.py
@@ -0,0 +1,254 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import sys
+import threading
+import traceback
+
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.config.config import CATALOG_ROOT_PATH, CATALOG_URL_PATH
+from catalog.pub.config.config import REG_TO_MSB_REG_PARAM
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.exceptions import CatalogException
+from catalog.pub.msapi import sdc
+from catalog.pub.utils import fileutil
+from catalog.pub.utils import toscaparser
+from catalog.pub.utils.jobutil import JobUtil
+
+logger = logging.getLogger(__name__)
+
+JOB_ERROR = 255
+
+
+def nf_get_csars():
+ ret = None
+ try:
+ ret = NfPackage().get_csars()
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def nf_get_csar(csar_id):
+ ret = None
+ try:
+ ret = NfPackage().get_csar(csar_id)
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return ret
+
+
+def parse_vnfd(csar_id, inputs):
+ ret = None
+ try:
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=csar_id)
+ if not nf_pkg:
+ raise CatalogException("VNF CSAR(%s) does not exist." % csar_id)
+ csar_path = nf_pkg[0].localFilePath
+ ret = {"model": toscaparser.parse_vnfd(csar_path, inputs)}
+ except CatalogException as e:
+ return [1, e.args[0]]
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return [1, str(sys.exc_info())]
+ return [0, ret]
+
+
+class NfDistributeThread(threading.Thread):
+ """
+ Sdc NF Package Distribute
+ """
+
+ def __init__(self, csar_id, vim_ids, lab_vim_id, job_id):
+ threading.Thread.__init__(self)
+ self.csar_id = csar_id
+ self.vim_ids = vim_ids
+ self.lab_vim_id = lab_vim_id
+ self.job_id = job_id
+
+ self.csar_save_path = os.path.join(CATALOG_ROOT_PATH, csar_id)
+
+ def run(self):
+ try:
+ self.on_distribute()
+ except CatalogException as e:
+ self.rollback_distribute()
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, e.args[0])
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+ self.rollback_distribute()
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, "Failed to distribute CSAR(%s)" % self.csar_id)
+
+ def on_distribute(self):
+ JobUtil.create_job(
+ inst_type='nf',
+ jobaction='on_distribute',
+ inst_id=self.csar_id,
+ job_id=self.job_id)
+ JobUtil.add_job_status(self.job_id, 5, "Start CSAR(%s) distribute." % self.csar_id)
+
+ if VnfPackageModel.objects.filter(vnfPackageId=self.csar_id):
+ err_msg = "NF CSAR(%s) already exists." % self.csar_id
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, err_msg)
+ return
+
+ artifact = sdc.get_artifact(sdc.ASSETTYPE_RESOURCES, self.csar_id)
+ local_path = os.path.join(CATALOG_ROOT_PATH, self.csar_id)
+ csar_name = "%s.csar" % artifact.get("name", self.csar_id)
+ local_file_name = sdc.download_artifacts(artifact["toscaModelURL"], local_path, csar_name)
+ if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+ artifact_vnf_file = fileutil.unzip_file(local_file_name, local_path, "Artifacts/Deployment/OTHER/vnf.csar")
+ if os.path.exists(artifact_vnf_file):
+ local_file_name = artifact_vnf_file
+
+ vnfd_json = toscaparser.parse_vnfd(local_file_name)
+ vnfd = json.JSONDecoder().decode(vnfd_json)
+
+ if not vnfd.get("vnf"):
+ raise CatalogException("VNF properties and metadata in VNF Package(id=%s) are empty." % self.csar_id)
+
+ vnfd_id = vnfd["vnf"]["properties"].get("descriptor_id", "")
+ if VnfPackageModel.objects.filter(vnfdId=vnfd_id):
+ logger.error("VNF package(%s) already exists.", vnfd_id)
+ raise CatalogException("VNF package(%s) already exists." % vnfd_id)
+ JobUtil.add_job_status(self.job_id, 30, "Save CSAR(%s) to database." % self.csar_id)
+ vnfd_ver = vnfd["vnf"]["properties"].get("descriptor_version", "")
+ vnf_provider = vnfd["vnf"]["properties"].get("provider", "")
+ vnf_software_version = vnfd["vnf"]["properties"].get("software_version", "")
+ vnfd_product_name = vnfd["vnf"]["properties"].get("product_name", "")
+ VnfPackageModel(
+ vnfPackageId=self.csar_id,
+ vnfdId=vnfd_id,
+ vnfVendor=vnf_provider,
+ vnfdProductName=vnfd_product_name,
+ vnfdVersion=vnfd_ver,
+ vnfSoftwareVersion=vnf_software_version,
+ vnfdModel=vnfd_json,
+ localFilePath=local_file_name,
+ vnfPackageUri=csar_name,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE
+ ).save()
+ JobUtil.add_job_status(self.job_id, 100, "CSAR(%s) distribute successfully." % self.csar_id)
+
+ def rollback_distribute(self):
+ try:
+ VnfPackageModel.objects.filter(vnfPackageId=self.csar_id).delete()
+ fileutil.delete_dirs(self.csar_save_path)
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+
+
+class NfPkgDeleteThread(threading.Thread):
+ """
+ Sdc NF Package Deleting
+ """
+
+ def __init__(self, csar_id, job_id):
+ threading.Thread.__init__(self)
+ self.csar_id = csar_id
+ self.job_id = job_id
+
+ def run(self):
+ try:
+ self.delete_csar()
+ except CatalogException as e:
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, e.args[0])
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, "Failed to delete CSAR(%s)" % self.csar_id)
+
+ def delete_csar(self):
+ JobUtil.create_job(
+ inst_type='nf',
+ jobaction='delete',
+ inst_id=self.csar_id,
+ job_id=self.job_id)
+ JobUtil.add_job_status(self.job_id, 5, "Start to delete CSAR(%s)." % self.csar_id)
+
+ VnfPackageModel.objects.filter(vnfPackageId=self.csar_id).delete()
+
+ JobUtil.add_job_status(self.job_id, 50, "Delete local CSAR(%s) file." % self.csar_id)
+
+ csar_save_path = os.path.join(CATALOG_ROOT_PATH, self.csar_id)
+ fileutil.delete_dirs(csar_save_path)
+
+ JobUtil.add_job_status(self.job_id, 100, "Delete CSAR(%s) successfully." % self.csar_id)
+
+
+class NfPackage(object):
+ """
+ Actions for sdc nf package.
+ """
+
+ def __init__(self):
+ pass
+
+ def get_csars(self):
+ csars = []
+ nf_pkgs = VnfPackageModel.objects.filter()
+ for nf_pkg in nf_pkgs:
+ ret = self.get_csar(nf_pkg.vnfPackageId)
+ csars.append(ret[1])
+ return [0, csars]
+
+ def get_csar(self, csar_id):
+ pkg_info = {}
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=csar_id)
+ if not nf_pkg:
+ nf_pkg = VnfPackageModel.objects.filter(vnfdId=csar_id)
+
+ if nf_pkg:
+ db_csar_id = nf_pkg[0].vnfPackageId
+ pkg_info["vnfdId"] = nf_pkg[0].vnfdId
+ pkg_info["vnfPackageId"] = nf_pkg[0].vnfPackageId
+ pkg_info["vnfdProvider"] = nf_pkg[0].vnfVendor
+ pkg_info["vnfdVersion"] = nf_pkg[0].vnfdVersion
+ pkg_info["vnfVersion"] = nf_pkg[0].vnfSoftwareVersion
+ pkg_info["csarName"] = nf_pkg[0].vnfPackageUri
+ pkg_info["vnfdModel"] = nf_pkg[0].vnfdModel
+ pkg_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+ REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+ CATALOG_URL_PATH,
+ db_csar_id,
+ nf_pkg[0].vnfPackageUri)
+ else:
+ raise CatalogException("Vnf package[%s] not Found." % csar_id)
+
+ csar_info = {
+ "csarId": db_csar_id,
+ "packageInfo": pkg_info,
+ "imageInfo": []
+ }
+ return [0, csar_info]
diff --git a/catalog/packages/biz/service_descriptor.py b/catalog/packages/biz/service_descriptor.py
new file mode 100644
index 0000000..11b6fb3
--- /dev/null
+++ b/catalog/packages/biz/service_descriptor.py
@@ -0,0 +1,129 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import uuid
+
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.exceptions import CatalogException, PackageNotFoundException
+from catalog.pub.utils import toscaparser, fileutil
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+class ServiceDescriptor(object):
+ """
+ Action for Service Descriptor
+ """
+
+ def __init__(self):
+ pass
+
+ def create(self, data, csar_id=None):
+ logger.info('Start to create a ServiceD...')
+ user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+ data = {
+ 'id': csar_id if csar_id else str(uuid.uuid4()),
+ 'servicedOnboardingState': PKG_STATUS.CREATED,
+ 'servicedOperationalState': PKG_STATUS.DISABLED,
+ 'servicedUsageState': PKG_STATUS.NOT_IN_USE,
+ 'userDefinedData': user_defined_data,
+ '_links': None # TODO
+ }
+ ServicePackageModel.objects.create(
+ servicePackageId=data['id'],
+ onboardingState=data['servicedOnboardingState'],
+ operationalState=data['servicedOperationalState'],
+ usageState=data['servicedUsageState'],
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ logger.info('A ServiceD(%s) has been created.' % data['id'])
+ return data
+
+ def parse_serviced_and_save(self, serviced_info_id, local_file_name):
+ logger.info('Start to process ServiceD(%s)...' % serviced_info_id)
+ service_pkgs = ServicePackageModel.objects.filter(servicePackageId=serviced_info_id)
+ service_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+
+ serviced_json = toscaparser.parse_sd(local_file_name)
+ serviced = json.JSONDecoder().decode(serviced_json)
+
+ serviced_id = serviced.get("service", {}).get("properties", {}).get("descriptor_id", "")
+ serviced_name = serviced.get("service", {}).get("properties", {}).get("name", "")
+ serviced_version = serviced.get("service", {}).get("properties", {}).get("version", "")
+ serviced_designer = serviced.get("service", {}).get("properties", {}).get("designer", "")
+ invariant_id = serviced.get("service", {}).get("properties", {}).get("invariant_id", "")
+ if serviced_id == "":
+ raise CatalogException("serviced_id(%s) does not exist in metadata." % serviced_id)
+ other_nspkg = ServicePackageModel.objects.filter(servicedId=serviced_id)
+ if other_nspkg and other_nspkg[0].servicePackageId != serviced_info_id:
+ logger.warn("ServiceD(%s,%s) already exists.", serviced_id, other_nspkg[0].servicePackageId)
+ raise CatalogException("ServiceD(%s) already exists." % serviced_id)
+
+ for vnf in serviced["vnfs"]:
+ vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+ if vnfd_id == "undefined":
+ vnfd_id = vnf["properties"].get("id", "undefined")
+ pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if not pkg:
+ pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+ if not pkg:
+ vnfd_name = vnf.get("vnf_id", "undefined")
+ logger.error("[%s] is not distributed.", vnfd_name)
+ raise CatalogException("VNF package(%s) is not distributed." % vnfd_id)
+
+ for pnf in serviced["pnfs"]:
+ pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+ if pnfd_id == "undefined":
+ pnfd_id = pnf["properties"].get("id", "undefined")
+ pkg = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+ if not pkg:
+ pkg = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+ if not pkg:
+ pnfd_name = pnf.get("pnf_id", "undefined")
+ logger.error("[%s] is not distributed.", pnfd_name)
+ raise CatalogException("PNF package(%s) is not distributed." % pnfd_name)
+
+ service_pkgs.update(
+ servicedId=serviced_id,
+ servicedName=serviced_name,
+ servicedDesigner=serviced_designer,
+ servicedDescription=serviced.get("description", ""),
+ servicedVersion=serviced_version,
+ invariantId=invariant_id,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ servicePackageUri=local_file_name,
+ sdcCsarId=serviced_info_id,
+ localFilePath=local_file_name,
+ servicedModel=serviced_json
+ )
+ logger.info('ServiceD(%s) has been processed.' % serviced_info_id)
+
+ def delete_single(self, serviced_info_id):
+ logger.info('Start to delete ServiceD(%s)...' % serviced_info_id)
+ service_pkgs = ServicePackageModel.objects.filter(servicePackageId=serviced_info_id)
+ if not service_pkgs.exists():
+ logger.warn('ServiceD(%s) not found.' % serviced_info_id)
+ raise PackageNotFoundException("Service package[%s] not Found." % serviced_info_id)
+ service_pkgs.delete()
+ service_pkg_path = os.path.join(CATALOG_ROOT_PATH, serviced_info_id)
+ fileutil.delete_dirs(service_pkg_path)
+ logger.info('ServiceD(%s) has been deleted.' % serviced_info_id)
diff --git a/catalog/packages/biz/vnf_package.py b/catalog/packages/biz/vnf_package.py
new file mode 100644
index 0000000..585a599
--- /dev/null
+++ b/catalog/packages/biz/vnf_package.py
@@ -0,0 +1,227 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import sys
+import threading
+import traceback
+import urllib
+import uuid
+
+from catalog.packages.biz.common import parse_file_range, read, save
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import VnfPackageModel, NSPackageModel
+from catalog.pub.exceptions import CatalogException, ResourceNotFoundException
+from catalog.pub.utils.values import ignore_case_get
+from catalog.pub.utils import fileutil, toscaparser
+from catalog.packages.const import PKG_STATUS
+
+
+logger = logging.getLogger(__name__)
+
+
+class VnfPackage(object):
+
+ def __init__(self):
+ pass
+
+ def create_vnf_pkg(self, data):
+ user_defined_data = ignore_case_get(data, "userDefinedData", {})
+ vnf_pkg_id = str(uuid.uuid4())
+ VnfPackageModel.objects.create(
+ vnfPackageId=vnf_pkg_id,
+ onboardingState=PKG_STATUS.CREATED,
+ operationalState=PKG_STATUS.DISABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ userDefinedData=json.dumps(user_defined_data)
+ )
+ data = {
+ "id": vnf_pkg_id,
+ "onboardingState": PKG_STATUS.CREATED,
+ "operationalState": PKG_STATUS.DISABLED,
+ "usageState": PKG_STATUS.NOT_IN_USE,
+ "userDefinedData": user_defined_data,
+ "_links": None
+ }
+ return data
+
+ def query_multiple(self):
+ pkgs_info = []
+ nf_pkgs = VnfPackageModel.objects.filter()
+ for nf_pkg in nf_pkgs:
+ ret = fill_response_data(nf_pkg)
+ pkgs_info.append(ret)
+ return pkgs_info
+
+ def query_single(self, vnf_pkg_id):
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not nf_pkg.exists():
+ logger.error('VNF package(%s) does not exist.' % vnf_pkg_id)
+ raise ResourceNotFoundException('VNF package(%s) does not exist.' % vnf_pkg_id)
+ return fill_response_data(nf_pkg[0])
+
+ def delete_vnf_pkg(self, vnf_pkg_id):
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not vnf_pkg.exists():
+ logger.debug('VNF package(%s) has been deleted.' % vnf_pkg_id)
+ return
+ '''
+ if vnf_pkg[0].operationalState != PKG_STATUS.DISABLED:
+ raise CatalogException("The VNF package (%s) is not disabled" % vnf_pkg_id)
+ if vnf_pkg[0].usageState != PKG_STATUS.NOT_IN_USE:
+ raise CatalogException("The VNF package (%s) is in use" % vnf_pkg_id)
+ '''
+ del_vnfd_id = vnf_pkg[0].vnfdId
+ ns_pkgs = NSPackageModel.objects.all()
+ for ns_pkg in ns_pkgs:
+ nsd_model = None
+ if ns_pkg.nsdModel:
+ nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+ if not nsd_model:
+ continue
+ for vnf in nsd_model['vnfs']:
+ if del_vnfd_id == vnf["properties"]["descriptor_id"]:
+ raise CatalogException('VNFD(%s) is referenced.' % del_vnfd_id)
+ vnf_pkg.delete()
+ vnf_pkg_path = os.path.join(CATALOG_ROOT_PATH, vnf_pkg_id)
+ fileutil.delete_dirs(vnf_pkg_path)
+ logger.info('VNF package(%s) has been deleted.' % vnf_pkg_id)
+
+ def upload(self, vnf_pkg_id, remote_file):
+ logger.info('Start to upload VNF package(%s)...' % vnf_pkg_id)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ # if vnf_pkg[0].onboardingState != PKG_STATUS.CREATED:
+ # logger.error("VNF package(%s) is not CREATED" % vnf_pkg_id)
+ # raise CatalogException("VNF package(%s) is not CREATED" % vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ local_file_name = save(remote_file, vnf_pkg_id)
+ logger.info('VNF package(%s) has been uploaded.' % vnf_pkg_id)
+ return local_file_name
+
+ def download(self, vnf_pkg_id, file_range):
+ logger.info('Start to download VNF package(%s)...' % vnf_pkg_id)
+ nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ if not nf_pkg.exists():
+ logger.error('VNF package(%s) does not exist.' % vnf_pkg_id)
+ raise ResourceNotFoundException('VNF package(%s) does not exist.' % vnf_pkg_id)
+ if nf_pkg[0].onboardingState != PKG_STATUS.ONBOARDED:
+ raise CatalogException("VNF package (%s) is not on-boarded" % vnf_pkg_id)
+
+ local_file_path = nf_pkg[0].localFilePath
+ start, end = parse_file_range(local_file_path, file_range)
+ logger.info('VNF package (%s) has been downloaded.' % vnf_pkg_id)
+ return read(local_file_path, start, end)
+
+
+class VnfPkgUploadThread(threading.Thread):
+ def __init__(self, data, vnf_pkg_id):
+ threading.Thread.__init__(self)
+ self.vnf_pkg_id = vnf_pkg_id
+ self.data = data
+ self.upload_file_name = None
+
+ def run(self):
+ try:
+ self.upload_vnf_pkg_from_uri()
+ parse_vnfd_and_save(self.vnf_pkg_id, self.upload_file_name)
+ except CatalogException as e:
+ logger.error(e.args[0])
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ logger.error(str(sys.exc_info()))
+
+ def upload_vnf_pkg_from_uri(self):
+ logger.info("Start to upload VNF packge(%s) from URI..." % self.vnf_pkg_id)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=self.vnf_pkg_id)
+ if vnf_pkg[0].onboardingState != PKG_STATUS.CREATED:
+ logger.error("VNF package(%s) is not CREATED" % self.vnf_pkg_id)
+ raise CatalogException("VNF package (%s) is not created" % self.vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.UPLOADING)
+
+ uri = ignore_case_get(self.data, "addressInformation")
+ response = urllib.request.urlopen(uri)
+
+ local_file_dir = os.path.join(CATALOG_ROOT_PATH, self.vnf_pkg_id)
+ self.upload_file_name = os.path.join(local_file_dir, os.path.basename(uri))
+ if not os.path.exists(local_file_dir):
+ fileutil.make_dirs(local_file_dir)
+ with open(self.upload_file_name, "wt") as local_file:
+ local_file.write(response.read())
+ response.close()
+ logger.info('VNF packge(%s) has been uploaded.' % self.vnf_pkg_id)
+
+
+def fill_response_data(nf_pkg):
+ pkg_info = {}
+ pkg_info["id"] = nf_pkg.vnfPackageId
+ pkg_info["vnfdId"] = nf_pkg.vnfdId
+ pkg_info["vnfProductName"] = nf_pkg.vnfdProductName
+ pkg_info["vnfSoftwareVersion"] = nf_pkg.vnfSoftwareVersion
+ pkg_info["vnfdVersion"] = nf_pkg.vnfdVersion
+ if nf_pkg.checksum:
+ pkg_info["checksum"] = json.JSONDecoder().decode(nf_pkg.checksum)
+ pkg_info["softwareImages"] = None # TODO
+ pkg_info["additionalArtifacts"] = None # TODO
+ pkg_info["onboardingState"] = nf_pkg.onboardingState
+ pkg_info["operationalState"] = nf_pkg.operationalState
+ pkg_info["usageState"] = nf_pkg.usageState
+ if nf_pkg.userDefinedData:
+ pkg_info["userDefinedData"] = json.JSONDecoder().decode(nf_pkg.userDefinedData)
+ pkg_info["_links"] = None # TODO
+ return pkg_info
+
+
+def parse_vnfd_and_save(vnf_pkg_id, vnf_pkg_path):
+ logger.info('Start to process VNF package(%s)...' % vnf_pkg_id)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.PROCESSING)
+ vnfd_json = toscaparser.parse_vnfd(vnf_pkg_path)
+ vnfd = json.JSONDecoder().decode(vnfd_json)
+
+ if vnfd.get("vnf", "") != "":
+ vnfd_id = vnfd["vnf"]["properties"].get("descriptor_id", "")
+ other_pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+ if other_pkg and other_pkg[0].vnfPackageId != vnf_pkg_id:
+ logger.error("VNF package(%s,%s) already exists.", other_pkg[0].vnfPackageId, vnfd_id)
+ raise CatalogException("VNF package(%s) already exists." % vnfd_id)
+ vnf_provider = vnfd["vnf"]["properties"].get("provider", "")
+ vnfd_ver = vnfd["vnf"]["properties"].get("descriptor_version", "")
+ vnf_software_version = vnfd["vnf"]["properties"].get("software_version", "")
+ vnfd_product_name = vnfd["vnf"]["properties"].get("product_name", "")
+ vnf_pkg.update(
+ vnfPackageId=vnf_pkg_id,
+ vnfdId=vnfd_id,
+ vnfdProductName=vnfd_product_name,
+ vnfVendor=vnf_provider,
+ vnfdVersion=vnfd_ver,
+ vnfSoftwareVersion=vnf_software_version,
+ vnfdModel=vnfd_json,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ operationalState=PKG_STATUS.ENABLED,
+ usageState=PKG_STATUS.NOT_IN_USE,
+ localFilePath=vnf_pkg_path,
+ vnfPackageUri=os.path.split(vnf_pkg_path)[-1]
+ )
+ else:
+ raise CatalogException("VNF propeties and metadata in VNF Package(id=%s) are empty." % vnf_pkg_id)
+ logger.info('VNF package(%s) has been processed(done).' % vnf_pkg_id)
+
+
+def handle_upload_failed(vnf_pkg_id):
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+ vnf_pkg.update(onboardingState=PKG_STATUS.CREATED)
diff --git a/catalog/packages/biz/vnf_pkg_artifacts.py b/catalog/packages/biz/vnf_pkg_artifacts.py
new file mode 100644
index 0000000..37021eb
--- /dev/null
+++ b/catalog/packages/biz/vnf_pkg_artifacts.py
@@ -0,0 +1,43 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.exceptions import ResourceNotFoundException, ArtifactNotFoundException
+from catalog.pub.utils import fileutil
+
+logger = logging.getLogger(__name__)
+
+
+class FetchVnfPkgArtifact(object):
+ def fetch(self, vnfPkgId, artifactPath):
+ logger.debug("FetchVnfPkgArtifact--get--single--artifact--biz::>"
+ "ID: %s path: %s" % (vnfPkgId, artifactPath))
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfPkgId)
+ if not vnf_pkg.exists():
+ err_msg = "NF Package (%s) doesn't exists." % vnfPkgId
+ raise ResourceNotFoundException(err_msg)
+ vnf_pkg = vnf_pkg.get()
+ local_path = vnf_pkg.localFilePath
+ if local_path.endswith(".csar") or local_path.endswith(".zip"):
+ vnf_extract_path = fileutil.unzip_csar_to_tmp(local_path)
+ artifact_path = fileutil.get_artifact_path(vnf_extract_path, artifactPath)
+ if not artifact_path:
+ raise ArtifactNotFoundException("Couldn't artifact %s" % artifactPath)
+ with open(artifact_path, 'rt') as f:
+ file_content = f.read()
+ else:
+ raise ArtifactNotFoundException("NF Package format is not csar or zip")
+ return file_content
diff --git a/catalog/packages/biz/vnf_pkg_subscription.py b/catalog/packages/biz/vnf_pkg_subscription.py
new file mode 100644
index 0000000..29ef92e
--- /dev/null
+++ b/catalog/packages/biz/vnf_pkg_subscription.py
@@ -0,0 +1,190 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ast
+import json
+import logging
+import os
+import requests
+import uuid
+
+from collections import Counter
+from rest_framework import status
+
+from catalog.packages import const
+from catalog.pub.database.models import VnfPkgSubscriptionModel
+from catalog.pub.exceptions import VnfPkgSubscriptionException,\
+ VnfPkgDuplicateSubscriptionException, SubscriptionDoesNotExistsException
+from catalog.pub.utils.values import ignore_case_get
+
+
+logger = logging.getLogger(__name__)
+
+ROOT_FILTERS = {
+ "notificationTypes": "notification_types",
+ "vnfdId": "vnfd_id",
+ "vnfPkgId": "vnf_pkg_id",
+ "operationalState": "operation_states",
+ "usageState": "usage_states"
+}
+
+
+def is_filter_type_equal(new_filter, existing_filter):
+ return Counter(new_filter) == Counter(existing_filter)
+
+
+class CreateSubscription(object):
+
+ def __init__(self, data):
+ self.data = data
+ self.filter = ignore_case_get(self.data, "filters", {})
+ self.callback_uri = ignore_case_get(self.data, "callbackUri")
+ self.authentication = ignore_case_get(self.data, "authentication", {})
+ self.notification_types = ignore_case_get(self.filter, "notificationTypes", [])
+ self.operation_states = ignore_case_get(self.filter, "operationalState", [])
+ self.usage_states = ignore_case_get(self.filter, "usageState", [])
+ self.vnfd_id = ignore_case_get(self.filter, "vnfdId", [])
+ self.vnf_pkg_id = ignore_case_get(self.filter, "vnfPkgId", [])
+ self.vnf_products_from_provider = \
+ ignore_case_get(self.filter, "vnfProductsFromProviders", {})
+
+ def check_callbackuri_connection(self):
+ logger.debug("SubscribeNotification-post::> Sending GET request "
+ "to %s" % self.callback_uri)
+ try:
+ response = requests.get(self.callback_uri, timeout=2)
+ if response.status_code != status.HTTP_204_NO_CONTENT:
+ raise VnfPkgSubscriptionException(
+ "callbackUri %s returns %s status code." % (
+ self.callback_uri,
+ response.status_code
+ )
+ )
+ except Exception:
+ raise VnfPkgSubscriptionException(
+ "callbackUri %s didn't return 204 status code." % self.callback_uri
+ )
+
+ def do_biz(self):
+ self.subscription_id = str(uuid.uuid4())
+ self.check_callbackuri_connection()
+ self.check_valid_auth_info()
+ self.check_valid()
+ self.save_db()
+ subscription = VnfPkgSubscriptionModel.objects.get(
+ subscription_id=self.subscription_id
+ )
+ if subscription:
+ return subscription.toDict()
+
+ def check_valid_auth_info(self):
+ logger.debug("SubscribeNotification--post::> Validating Auth "
+ "details if provided")
+ if self.authentication.get("paramsBasic", {}) and \
+ const.BASIC not in self.authentication.get("authType"):
+ raise VnfPkgSubscriptionException('Auth type should be ' + const.BASIC)
+ if self.authentication.get("paramsOauth2ClientCredentials", {}) and \
+ const.OAUTH2_CLIENT_CREDENTIALS not in self.authentication.get("authType"):
+ raise VnfPkgSubscriptionException('Auth type should be ' + const.OAUTH2_CLIENT_CREDENTIALS)
+
+ def check_filter_exists(self, sub):
+ # Check the usage states, operationStates
+ for filter_type in ["operation_states", "usage_states"]:
+ if not is_filter_type_equal(getattr(self, filter_type),
+ ast.literal_eval(getattr(sub, filter_type))):
+ return False
+ # If all the above types are same then check id filters
+ for id_filter in ["vnfd_id", "vnf_pkg_id"]:
+ if not is_filter_type_equal(getattr(self, id_filter),
+ ast.literal_eval(getattr(sub, id_filter))):
+ return False
+ return True
+
+ def check_valid(self):
+ logger.debug("SubscribeNotification--post::> Checking DB if "
+ "callbackUri already exists")
+ subscriptions = VnfPkgSubscriptionModel.objects.filter(callback_uri=self.callback_uri)
+ if not subscriptions.exists():
+ return True
+ for subscription in subscriptions:
+ if self.check_filter_exists(subscription):
+ raise VnfPkgDuplicateSubscriptionException(
+ "Already Subscription (%s) exists with the "
+ "same callbackUri and filter" % subscription.subscription_id)
+ return True
+
+ def save_db(self):
+ logger.debug("SubscribeNotification--post::> Saving the subscription "
+ "%s to the database" % self.subscription_id)
+ links = {
+ "self": {
+ "href": os.path.join(const.VNFPKG_SUBSCRIPTION_ROOT_URI, self.subscription_id)
+ }
+ }
+ VnfPkgSubscriptionModel.objects.create(
+ subscription_id=self.subscription_id,
+ callback_uri=self.callback_uri,
+ notification_types=json.dumps(self.notification_types),
+ auth_info=json.dumps(self.authentication),
+ usage_states=json.dumps(self.usage_states),
+ operation_states=json.dumps(self.operation_states),
+ vnf_products_from_provider=json.dumps(self.vnf_products_from_provider),
+ vnfd_id=json.dumps(self.vnfd_id),
+ vnf_pkg_id=json.dumps(self.vnf_pkg_id),
+ links=json.dumps(links))
+ logger.debug('Create Subscription[%s] success', self.subscription_id)
+
+
+class QuerySubscription(object):
+
+ def query_multi_subscriptions(self, params):
+ query_data = {}
+ logger.debug("QuerySubscription--get--multi--subscriptions--biz::> Check "
+ "for filters in query params %s" % params)
+ for query, value in list(params.items()):
+ if query in ROOT_FILTERS:
+ query_data[ROOT_FILTERS[query] + '__icontains'] = value
+ # Query the database with filters if the request has fields in request params, else fetch all records
+ if query_data:
+ subscriptions = VnfPkgSubscriptionModel.objects.filter(**query_data)
+ else:
+ subscriptions = VnfPkgSubscriptionModel.objects.all()
+ if not subscriptions.exists():
+ return []
+ return [subscription.toDict() for subscription in subscriptions]
+
+ def query_single_subscription(self, subscription_id):
+ logger.debug("QuerySingleSubscriptions--get--single--subscription--biz::> "
+ "ID: %s" % subscription_id)
+
+ subscription = VnfPkgSubscriptionModel.objects.filter(
+ subscription_id=subscription_id)
+ if not subscription.exists():
+ raise SubscriptionDoesNotExistsException("Subscription with ID: %s "
+ "does not exists" % subscription_id)
+ return subscription[0].toDict()
+
+
+class TerminateSubscription(object):
+
+ def terminate(self, subscription_id):
+ logger.debug("TerminateSubscriptions--delete--biz::> "
+ "ID: %s" % subscription_id)
+
+ subscription = VnfPkgSubscriptionModel.objects.filter(
+ subscription_id=subscription_id)
+ if not subscription.exists():
+ raise SubscriptionDoesNotExistsException("Subscription with ID: %s "
+ "does not exists" % subscription_id)
+ subscription[0].delete()
diff --git a/catalog/packages/const.py b/catalog/packages/const.py
new file mode 100644
index 0000000..cd09b40
--- /dev/null
+++ b/catalog/packages/const.py
@@ -0,0 +1,78 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from catalog.pub.utils.jobutil import enum
+
+PKG_STATUS = enum(
+ CREATED="CREATED",
+ UPLOADING="UPLOADING",
+ PROCESSING="PROCESSING",
+ ONBOARDED="ONBOARDED",
+ IN_USE="IN_USE",
+ NOT_IN_USE="NOT_IN_USE",
+ ENABLED="ENABLED",
+ DISABLED="DISABLED"
+)
+
+AUTH_TYPES = [
+ "BASIC",
+ "OAUTH2_CLIENT_CREDENTIALS",
+ "TLS_CERT"
+]
+
+BASIC = "BASIC"
+
+OAUTH2_CLIENT_CREDENTIALS = "OAUTH2_CLIENT_CREDENTIALS"
+
+NOTIFICATION_TYPES = [
+ "VnfPackageOnboardingNotification",
+ "VnfPackageChangeNotification"
+]
+
+VNFPKG_SUBSCRIPTION_ROOT_URI = "api/vnfpkgm/v1/subscriptions/"
+
+NSDM_SUBSCRIPTION_ROOT_URI = "api/nsd/v1/subscriptions/"
+
+NSDM_NOTIFICATION_FILTERS = [
+ "notificationTypes",
+ "nsdInfoId",
+ "nsdName",
+ "nsdId",
+ "nsdVersion",
+ "nsdDesigner",
+ "nsdInvariantId",
+ "vnfPkgIds",
+ "pnfdInfoIds",
+ "nestedNsdInfoIds",
+ "nsdOnboardingState",
+ "nsdOperationalState",
+ "nsdUsageState",
+ "pnfdId",
+ "pnfdName",
+ "pnfdVersion",
+ "pnfdProvider",
+ "pnfdInvariantId",
+ "pnfdOnboardingState",
+ "pnfdUsageState"
+]
+
+NSDM_NOTIFICATION_TYPES = [
+ "NsdOnBoardingNotification",
+ "NsdOnboardingFailureNotification",
+ "NsdChangeNotification",
+ "NsdDeletionNotification",
+ "PnfdOnBoardingNotification",
+ "PnfdOnBoardingFailureNotification",
+ "PnfdDeletionNotification"
+]
diff --git a/catalog/packages/serializers/__init__.py b/catalog/packages/serializers/__init__.py
new file mode 100644
index 0000000..342c2a8
--- /dev/null
+++ b/catalog/packages/serializers/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/packages/serializers/catalog_serializers.py b/catalog/packages/serializers/catalog_serializers.py
new file mode 100644
index 0000000..f53b06d
--- /dev/null
+++ b/catalog/packages/serializers/catalog_serializers.py
@@ -0,0 +1,442 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from catalog.pub.utils.toscaparser.nsdmodel import EtsiNsdInfoModel
+from catalog.pub.utils.toscaparser.vnfdmodel import EtsiVnfdInfoModel
+
+
+class PostJobRequestSerializer(serializers.Serializer):
+ progress = serializers.CharField(
+ help_text="Job Progress",
+ required=False
+ )
+ desc = serializers.CharField(
+ help_text="Description",
+ required=False
+ )
+ errcode = serializers.CharField(
+ help_text="Error Code",
+ required=False
+ )
+
+
+class JobResponseHistoryListSerializer(serializers.Serializer):
+ status = serializers.CharField(
+ help_text="Status",
+ required=False
+ )
+ progress = serializers.CharField(
+ help_text="Job Progress",
+ required=False
+ )
+ statusDescription = serializers.CharField(
+ help_text="Status Description",
+ required=False
+ )
+ errorCode = serializers.CharField(
+ help_text="Error Code",
+ required=False,
+ allow_null=True
+ )
+ responseId = serializers.CharField(
+ help_text="Response Id",
+ required=False
+ )
+
+
+class JobResponseDescriptorSerializer(serializers.Serializer):
+ status = serializers.CharField(
+ help_text="Status",
+ required=False
+ )
+ progress = serializers.CharField(
+ help_text="Job Progress",
+ required=False
+ )
+ statusDescription = serializers.CharField(
+ help_text="Status Description",
+ required=False
+ )
+ errorCode = serializers.CharField(
+ help_text="Error Code",
+ required=False,
+ allow_null=True
+ )
+ responseId = serializers.CharField(
+ help_text="Response Id",
+ required=False
+ )
+ responseHistoryList = JobResponseHistoryListSerializer(
+ help_text="Response History List",
+ many=True,
+ required=False
+ )
+
+
+class GetJobResponseSerializer(serializers.Serializer):
+ jobId = serializers.CharField(
+ help_text="Job Id",
+ required=False
+ )
+ responseDescriptor = JobResponseDescriptorSerializer(
+ help_text="Job Response Descriptor",
+ required=False
+ )
+
+
+class PostJobResponseResultSerializer(serializers.Serializer):
+ result = serializers.CharField(
+ help_text="Result",
+ required=True
+ )
+ msg = serializers.CharField(
+ help_text="Message",
+ required=False
+ )
+
+
+class InternalErrorRequestSerializer(serializers.Serializer):
+ error = serializers.CharField(
+ help_text="Error",
+ required=True
+ )
+ errorMessage = serializers.CharField(
+ help_text="Error Message",
+ required=False
+ )
+
+
+class NsPackageDistributeRequestSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="csarId",
+ required=True
+ )
+
+
+class NsPackageDistributeResponseSerializer(serializers.Serializer):
+ status = serializers.CharField(
+ help_text="status",
+ required=True
+ )
+ statusDescription = serializers.CharField(
+ help_text="statusDescription",
+ required=True
+ )
+ errorCode = serializers.CharField(
+ help_text="errorCode",
+ required=True,
+ allow_null=True
+ )
+
+
+class NsPackageInfoSerializer(serializers.Serializer):
+ nsdId = serializers.CharField(
+ help_text="NSD ID",
+ required=False,
+ allow_null=True
+ )
+ nsPackageId = serializers.CharField(
+ help_text="NS Package ID",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ nsdProvider = serializers.CharField(
+ help_text="NSD Provider",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ nsdVersion = serializers.CharField(
+ help_text="NSD Version",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ csarName = serializers.CharField(
+ help_text="CSAR name",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ nsdModel = serializers.CharField(
+ help_text="NSD Model",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ downloadUrl = serializers.CharField(
+ help_text="URL to download NSD Model",
+ required=False,
+ allow_null=True
+ )
+
+
+class NsPackageSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="CSAR ID",
+ required=False,
+ allow_null=True
+ )
+ packageInfo = NsPackageInfoSerializer(
+ help_text="NS Package Info",
+ required=False,
+ allow_null=True
+ )
+
+
+class NsPackagesSerializer(serializers.ListSerializer):
+ child = NsPackageSerializer()
+
+
+class ServicePackageDistributeRequestSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="csarId",
+ required=True
+ )
+
+
+class ServicePackageInfoSerializer(serializers.Serializer):
+ servicedId = serializers.CharField(
+ help_text="ServiceD ID",
+ required=False,
+ allow_null=True
+ )
+ servicePackageId = serializers.CharField(
+ help_text="Service Package ID",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ servicedProvider = serializers.CharField(
+ help_text="ServiceD Provider",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ servicedVersion = serializers.CharField(
+ help_text="ServiceD Version",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ csarName = serializers.CharField(
+ help_text="CSAR name",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ servicedModel = serializers.CharField(
+ help_text="ServiceD Model",
+ allow_blank=True,
+ required=False,
+ allow_null=True
+ )
+ downloadUrl = serializers.CharField(
+ help_text="URL to download ServiceD Model",
+ required=False,
+ allow_null=True
+ )
+
+
+class ServicePackageSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="CSAR ID",
+ required=False,
+ allow_null=True
+ )
+ packageInfo = ServicePackageInfoSerializer(
+ help_text="Service Package Info",
+ required=False,
+ allow_null=True
+ )
+
+
+class ServicePackagesSerializer(serializers.ListSerializer):
+ child = ServicePackageSerializer()
+
+
+class NfPackageDistributeRequestSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="CSAR ID",
+ required=True
+ )
+ vimIds = serializers.ListField(
+ help_text="A string for vimIds",
+ child=serializers.CharField(),
+ required=False
+ )
+ labVimId = serializers.CharField(
+ help_text="A list of VIM IDs.",
+ allow_blank=True,
+ required=False
+ )
+
+
+class NfPackageInfoSerializer(serializers.Serializer):
+ vnfdId = serializers.CharField(
+ help_text="VNFD ID",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfPackageId = serializers.CharField(
+ help_text="VNF Package ID",
+ required=True
+ )
+ vnfdProvider = serializers.CharField(
+ help_text="VNFD Provider",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfdVersion = serializers.CharField(
+ help_text="VNFD Version",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfVersion = serializers.CharField(
+ help_text="VNF Version",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ csarName = serializers.CharField(
+ help_text="CSAR Name",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfdModel = serializers.CharField(
+ help_text="VNFD Model",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ downloadUrl = serializers.CharField(
+ help_text="URL to download VNFD Model",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+
+
+class NfImageInfoSerializer(serializers.Serializer):
+ index = serializers.CharField(
+ help_text="Index of VNF Image",
+ required=True
+ )
+ fileName = serializers.CharField(
+ help_text="Image file name",
+ required=True
+ )
+ imageId = serializers.CharField(
+ help_text="Image ID",
+ required=True
+ )
+ vimId = serializers.CharField(
+ help_text="VIM ID",
+ required=True
+ )
+ vimUser = serializers.CharField(
+ help_text="User of VIM",
+ required=True
+ )
+ tenant = serializers.CharField(
+ help_text="Tenant",
+ required=True
+ )
+ status = serializers.CharField(
+ help_text="Status",
+ required=True
+ )
+
+
+class NfPackageSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="CSAR ID",
+ required=True
+ )
+ packageInfo = NfPackageInfoSerializer(
+ help_text="VNF Package Info",
+ required=True
+ )
+ imageInfo = NfImageInfoSerializer(
+ help_text="Image Info",
+ required=False,
+ many=True,
+ allow_null=True
+ )
+
+
+class NfPackagesSerializer(serializers.ListSerializer):
+ child = NfPackageSerializer()
+
+
+class PostJobResponseSerializer(serializers.Serializer):
+ jobId = serializers.CharField(
+ help_text="jobId",
+ required=True
+ )
+
+
+class ParseModelRequestSerializer(serializers.Serializer):
+ csarId = serializers.CharField(
+ help_text="CSAR ID",
+ required=True
+ )
+ packageType = serializers.CharField(
+ help_text="Package type: VNF, PNF, NS, Service",
+ required=False
+ )
+ inputs = serializers.JSONField(
+ help_text="Inputs",
+ required=False
+ )
+
+
+class ParseModelResponseSerializer(serializers.Serializer):
+ model = serializers.JSONField(
+ help_text="Model",
+ required=True
+ )
+
+
+class EtsiNsdInfoModelSerializer(serializers.ModelSerializer):
+
+ class Meta:
+ model = EtsiNsdInfoModel
+
+
+class EtsiVnfdInfoModelSerializer(serializers.ModelSerializer):
+
+ class Meta:
+ model = EtsiVnfdInfoModel
+
+
+class ParseNSPackageResponseSerializer(serializers.Serializer):
+ model = EtsiNsdInfoModelSerializer(
+ help_text="NSD Model",
+ required=True
+ )
+
+
+class ParseNfPackageResponseSerializer(serializers.Serializer):
+ model = EtsiVnfdInfoModelSerializer(
+ help_text="VNFD Model",
+ required=True
+ )
diff --git a/catalog/packages/serializers/checksum.py b/catalog/packages/serializers/checksum.py
new file mode 100644
index 0000000..1296626
--- /dev/null
+++ b/catalog/packages/serializers/checksum.py
@@ -0,0 +1,30 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class ChecksumSerializer(serializers.Serializer):
+ algorithm = serializers.CharField(
+ help_text="Name of the algorithm used to generate the checksum.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ hash = serializers.CharField(
+ help_text="The hexadecimal value of the checksum.",
+ required=True,
+ allow_null=True,
+ allow_blank=False
+ )
diff --git a/catalog/packages/serializers/create_nsd_info_request.py b/catalog/packages/serializers/create_nsd_info_request.py
new file mode 100644
index 0000000..24fe3b7
--- /dev/null
+++ b/catalog/packages/serializers/create_nsd_info_request.py
@@ -0,0 +1,29 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class CreateNsdInfoRequestSerializer(serializers.Serializer):
+ userDefinedData = serializers.DictField(
+ help_text="User-defined data for the NS descriptor resource to be created."
+ "It shall be present when the user defined data is set for the individual NS "
+ "descriptor resource to be created.",
+ child=serializers.CharField(
+ help_text='Key Value Pairs',
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
diff --git a/catalog/packages/serializers/create_pnfd_info_request.py b/catalog/packages/serializers/create_pnfd_info_request.py
new file mode 100644
index 0000000..01d8229
--- /dev/null
+++ b/catalog/packages/serializers/create_pnfd_info_request.py
@@ -0,0 +1,29 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class CreatePnfdInfoRequestSerializer(serializers.Serializer):
+ userDefinedData = serializers.DictField(
+ help_text="User-defined data for the PNF descriptor resource to be created."
+ "It shall be present when the user defined data is set for the individual "
+ "PNF descriptor resource to be created.",
+ child=serializers.CharField(
+ help_text='Key Value Pairs',
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
diff --git a/catalog/packages/serializers/create_vnf_pkg_info_req.py b/catalog/packages/serializers/create_vnf_pkg_info_req.py
new file mode 100644
index 0000000..6da281d
--- /dev/null
+++ b/catalog/packages/serializers/create_vnf_pkg_info_req.py
@@ -0,0 +1,27 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class CreateVnfPkgInfoRequestSerializer(serializers.Serializer):
+ userDefinedData = serializers.DictField(
+ help_text="User defined data for the VNF package.",
+ child=serializers.CharField(
+ help_text="KeyValue Pairs",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
diff --git a/catalog/packages/serializers/link.py b/catalog/packages/serializers/link.py
new file mode 100644
index 0000000..a6a503c
--- /dev/null
+++ b/catalog/packages/serializers/link.py
@@ -0,0 +1,24 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class LinkSerializer(serializers.Serializer):
+ href = serializers.CharField(
+ help_text='URI of the referenced resource',
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
diff --git a/catalog/packages/serializers/nsd_info.py b/catalog/packages/serializers/nsd_info.py
new file mode 100644
index 0000000..9450582
--- /dev/null
+++ b/catalog/packages/serializers/nsd_info.py
@@ -0,0 +1,161 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .problem_details import ProblemDetailsSerializer
+from .link import LinkSerializer
+
+
+class _LinkSerializer(serializers.Serializer):
+ self = LinkSerializer(
+ help_text="URI of this resource.",
+ required=True,
+ allow_null=False
+ )
+ nsd_content = LinkSerializer(
+ help_text="Link to the NSD content resource.",
+ required=True,
+ allow_null=False
+ )
+
+ class Meta:
+ ref_name = "NSD_LinkSerializer"
+
+
+class NsdInfoSerializer(serializers.Serializer):
+ id = serializers.CharField(
+ help_text="Identifier of the onboarded individual NS descriptor resource."
+ "This identifier is allocated by the NFVO.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ nsdId = serializers.CharField(
+ help_text="This identifier, which is allocated by the NSD designer,"
+ "identifies the NSD in a globally unique way."
+ "It is copied from the NSD content and shall be present after the "
+ "NSD content is on-boarded.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ nsdName = serializers.CharField(
+ help_text="Name of the onboarded NSD."
+ "This information is copied from the NSD content and shall be present "
+ "after the NSD content is on-boarded.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ nsdVersion = serializers.CharField( # TODO: data type is version
+ help_text="Version of the on-boarded NSD."
+ "This information is copied from the NSD content and shall be "
+ "present after the NSD content is on-boarded.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ nsdDesigner = serializers.CharField(
+ help_text="Designer of the on-boarded NSD."
+ "This information is copied from the NSD content and shall be "
+ "present after the NSD content is on-boarded.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ nsdInvariantId = serializers.CharField(
+ help_text="This identifier, which is allocated by the NSD designer,"
+ "identifies an NSD in a version independent manner."
+ "This information is copied from the NSD content and shall be "
+ "present after the NSD content is on-boarded.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfPkgIds = serializers.ListSerializer(
+ help_text="Identifies the VNF package for the VNFD referenced "
+ "by the on-boarded NS descriptor resource.",
+ child=serializers.CharField(
+ help_text="Identifier of the VNF package",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True,
+ allow_empty=True
+ )
+ pnfdInfoIds = serializers.ListSerializer(
+ help_text="Identifies the PnfdInfo element for the PNFD referenced "
+ "by the on-boarded NS descriptor resource.",
+ child=serializers.CharField(
+ help_text="Identifier of the PnfdInfo element",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True,
+ allow_empty=True
+ )
+ nestedNsdInfoIds = serializers.ListSerializer(
+ help_text="Identifies the NsdInfo element for the nested NSD referenced "
+ "by the on-boarded NS descriptor resource.",
+ child=serializers.CharField(
+ help_text="Identifier of the NsdInfo element",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True,
+ allow_empty=True
+ )
+ nsdOnboardingState = serializers.ChoiceField(
+ help_text="Onboarding state of the individual NS descriptor resource.",
+ choices=["CREATED", "UPLOADING", "PROCESSING", "ONBOARDED"],
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ onboardingFailureDetails = ProblemDetailsSerializer(
+ help_text="Failure details of current onboarding procedure."
+ "It shall be present when the nsdOnboardingState attribute is CREATED "
+ "and the uploading or processing fails in NFVO.",
+ required=False,
+ allow_null=True,
+ )
+ nsdOperationalState = serializers.ChoiceField(
+ help_text="Operational state of the individual NS descriptor resource."
+ "This attribute can be modified with the PATCH method.",
+ choices=["ENABLED", "DISABLED"],
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ nsdUsageState = serializers.ChoiceField(
+ help_text="Usage state of the individual NS descriptor resource.",
+ choices=["IN_USE", "NOT_IN_USE"],
+ required=True,
+ allow_null=False,
+ )
+ userDefinedData = serializers.DictField(
+ help_text="User defined data for the individual NS descriptor resource."
+ "This attribute can be modified with the PATCH method.",
+ child=serializers.CharField(
+ help_text="Key Value Pairs",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
+ _links = _LinkSerializer(
+ help_text="Links to resources related to this resource.",
+ required=True,
+ allow_null=True # TODO: supposed to be False
+ )
diff --git a/catalog/packages/serializers/nsd_infos.py b/catalog/packages/serializers/nsd_infos.py
new file mode 100644
index 0000000..d63c332
--- /dev/null
+++ b/catalog/packages/serializers/nsd_infos.py
@@ -0,0 +1,20 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .nsd_info import NsdInfoSerializer
+
+
+class NsdInfosSerializer(serializers.ListSerializer):
+ child = NsdInfoSerializer()
diff --git a/catalog/packages/serializers/nsdm_filter_data.py b/catalog/packages/serializers/nsdm_filter_data.py
new file mode 100644
index 0000000..47d7680
--- /dev/null
+++ b/catalog/packages/serializers/nsdm_filter_data.py
@@ -0,0 +1,177 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from catalog.packages.const import NSDM_NOTIFICATION_TYPES
+
+
+class NsdmNotificationsFilter(serializers.Serializer):
+ notificationTypes = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=NSDM_NOTIFICATION_TYPES
+ ),
+ help_text="Match particular notification types",
+ allow_null=False,
+ required=False
+ )
+ nsdInfoId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match NS packages with particular nsdInfoIds",
+ allow_null=False,
+ required=False
+ )
+ nsdId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match NS Packages with particular nsdIds",
+ allow_null=False,
+ required=False
+ )
+ nsdName = serializers.ListField(
+ child=serializers.CharField(
+ max_length=255,
+ required=True
+ ),
+ help_text="Match NS Packages with particular nsdNames",
+ allow_null=False,
+ required=False
+ )
+ nsdVersion = serializers.ListField(
+ child=serializers.CharField(
+ max_length=255,
+ required=True
+ ),
+ help_text="match NS packages that belong to certain nsdversion",
+ required=False,
+ allow_null=False
+ )
+ nsdInvariantId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match NS Packages with particular nsdInvariantIds",
+ allow_null=False,
+ required=False
+ )
+ vnfPkgIds = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match NS Packages that has VNF PackageIds",
+ allow_null=False,
+ required=False
+ )
+ nestedNsdInfoIds = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match NS Packages with particular nsdInvariantIds",
+ allow_null=False,
+ required=False
+ )
+ nsdOnboardingState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=[
+ 'CREATED',
+ 'UPLOADING',
+ 'PROCESSING',
+ 'ONBOARDED'
+ ]
+ ),
+ help_text="Match NS Packages with particular NS Onboarding State",
+ allow_null=False,
+ required=False
+ )
+ nsdOperationalState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=['ENABLED', 'DISABLED']
+ ),
+ help_text="Match NS Packages with particular NS Operational State",
+ allow_null=False,
+ required=False
+ )
+ nsdUsageState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=['IN_USE', 'NOT_IN_USE']
+ ),
+ help_text="Match NS Packages with particular NS Usage State",
+ allow_null=False,
+ required=False
+ )
+ pnfdInfoIds = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match PF packages with particular pnfdInfoIds",
+ allow_null=False,
+ required=False
+ )
+ pnfdId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match PF packages with particular pnfdInfoIds",
+ allow_null=False,
+ required=False
+ )
+ pnfdName = serializers.ListField(
+ child=serializers.CharField(
+ max_length=255,
+ required=True
+ ),
+ help_text="Match PF Packages with particular pnfdNames",
+ allow_null=False,
+ required=False
+ )
+ pnfdVersion = serializers.ListField(
+ child=serializers.CharField(
+ max_length=255,
+ required=True
+ ),
+ help_text="match PF packages that belong to certain pnfd version",
+ required=False,
+ allow_null=False
+ )
+ pnfdProvider = serializers.ListField(
+ child=serializers.CharField(
+ max_length=255,
+ required=True
+ ),
+ help_text="Match PF Packages with particular pnfdProvider",
+ allow_null=False,
+ required=False
+ )
+ pnfdInvariantId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match PF Packages with particular pnfdInvariantIds",
+ allow_null=False,
+ required=False
+ )
+ pnfdOnboardingState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=[
+ 'CREATED',
+ 'UPLOADING',
+ 'PROCESSING',
+ 'ONBOARDED'
+ ]
+ ),
+ help_text="Match PF Packages with particular PNF Onboarding State ",
+ allow_null=False,
+ required=False
+ )
+ pnfdUsageState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=['IN_USE', 'NOT_IN_USE']
+ ),
+ help_text="Match PF Packages with particular PNF usage State",
+ allow_null=False,
+ required=False
+ )
diff --git a/catalog/packages/serializers/nsdm_subscription.py b/catalog/packages/serializers/nsdm_subscription.py
new file mode 100644
index 0000000..87aa48d
--- /dev/null
+++ b/catalog/packages/serializers/nsdm_subscription.py
@@ -0,0 +1,84 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from .link import LinkSerializer
+from .subscription_auth_data import SubscriptionAuthenticationSerializer
+from .nsdm_filter_data import NsdmNotificationsFilter
+
+
+class NsdmSubscriptionLinkSerializer(serializers.Serializer):
+ self = LinkSerializer(
+ help_text="Links to resources related to this resource.",
+ required=True
+ )
+
+
+class NsdmSubscriptionSerializer(serializers.Serializer):
+ id = serializers.CharField(
+ help_text="Identifier of this subscription resource.",
+ max_length=255,
+ required=True,
+ allow_null=False
+ )
+ callbackUri = serializers.CharField(
+ help_text="The URI of the endpoint to send the notification to.",
+ max_length=255,
+ required=True,
+ allow_null=False
+ )
+ filter = NsdmNotificationsFilter(
+ help_text="Filter settings for this subscription, to define the "
+ "of all notifications this subscription relates to.",
+ required=False
+ )
+ _links = NsdmSubscriptionLinkSerializer(
+ help_text="Links to resources related to this resource.",
+ required=True
+ )
+
+
+class NsdmSubscriptionsSerializer(serializers.ListSerializer):
+ child = NsdmSubscriptionSerializer()
+
+
+class NsdmSubscriptionIdSerializer(serializers.Serializer):
+ subscription_id = serializers.UUIDField(
+ help_text="Identifier of this subscription resource.",
+ required=True,
+ allow_null=False
+ )
+
+
+class NsdmSubscriptionRequestSerializer(serializers.Serializer):
+ callbackUri = serializers.CharField(
+ help_text="The URI of the endpoint to send the notification to.",
+ required=True,
+ allow_null=False
+ )
+ filter = NsdmNotificationsFilter(
+ help_text="Filter settings for the subscription,"
+ " to define the subset of all "
+ "notifications this subscription relates to.",
+ required=False,
+ allow_null=True
+ )
+ authentication = SubscriptionAuthenticationSerializer(
+ help_text="Authentication parameters to configure"
+ " the use of Authorization when sending "
+ "notifications corresponding to this subscription.",
+ required=False,
+ allow_null=True
+ )
diff --git a/catalog/packages/serializers/pnfd_info.py b/catalog/packages/serializers/pnfd_info.py
new file mode 100644
index 0000000..f9f4b6b
--- /dev/null
+++ b/catalog/packages/serializers/pnfd_info.py
@@ -0,0 +1,107 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .problem_details import ProblemDetailsSerializer
+from .link import LinkSerializer
+
+
+class _LinkSerializer(serializers.Serializer):
+ self = LinkSerializer(
+ help_text='URI of this resource.',
+ required=True,
+ allow_null=False
+ )
+ pnfd_content = LinkSerializer(
+ help_text='Link to the PNFD content resource.',
+ required=True,
+ allow_null=False
+ )
+
+
+class PnfdInfoSerializer(serializers.Serializer):
+ id = serializers.CharField(
+ help_text='Identifier of the onboarded individual PNF descriptor resource. \
+ This identifier is allocated by the NFVO.',
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ pnfdId = serializers.CharField(
+ help_text='This identifier, which is allocated by the PNFD designer, \
+ identifies the PNFD in a globally unique way. \
+ It is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ pnfdName = serializers.CharField(
+ help_text='Name of the onboarded PNFD. \
+ This information is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ pnfdVersion = serializers.CharField( # TODO: data type is version
+ help_text='Version of the on-boarded PNFD. \
+ This information is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ pnfdProvider = serializers.CharField(
+ help_text='Provider of the on-boarded PNFD. \
+ This information is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ pnfdInvariantId = serializers.CharField(
+ help_text='Identifies a PNFD in a version independent manner. \
+ This attribute is invariant across versions of PNFD.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ pnfdOnboardingState = serializers.ChoiceField(
+ help_text='Onboarding state of the individual PNF descriptor resource.',
+ choices=['CREATED', 'UPLOADING', 'PROCESSING', 'ONBOARDED'],
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ onboardingFailureDetails = ProblemDetailsSerializer(
+ help_text='Failure details of current onboarding procedure. \
+ It shall be present when the "pnfdOnboardingState" attribute is CREATED and the uploading or processing fails in NFVO.',
+ required=False,
+ allow_null=True,
+ )
+ pnfdUsageState = serializers.ChoiceField(
+ help_text='Usage state of the individual PNF descriptor resource.',
+ choices=['IN_USE', 'NOT_IN_USE'],
+ required=True,
+ allow_null=False,
+ )
+ userDefinedData = serializers.DictField(
+ help_text='User defined data for the individual PNF descriptor resource. \
+ This attribute can be modified with the PATCH method.',
+ child=serializers.CharField(help_text='Key Value Pairs', allow_blank=True),
+ required=False,
+ allow_null=True
+ )
+ _links = _LinkSerializer(
+ help_text='Links to resources related to this resource.',
+ required=True,
+ allow_null=True # TODO: supposed to be False
+ )
diff --git a/catalog/packages/serializers/pnfd_infos.py b/catalog/packages/serializers/pnfd_infos.py
new file mode 100644
index 0000000..0874c9e
--- /dev/null
+++ b/catalog/packages/serializers/pnfd_infos.py
@@ -0,0 +1,20 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .pnfd_info import PnfdInfoSerializer
+
+
+class PnfdInfosSerializer(serializers.ListSerializer):
+ child = PnfdInfoSerializer()
diff --git a/catalog/packages/serializers/problem_details.py b/catalog/packages/serializers/problem_details.py
new file mode 100644
index 0000000..68d4500
--- /dev/null
+++ b/catalog/packages/serializers/problem_details.py
@@ -0,0 +1,58 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class ProblemDetailsSerializer(serializers.Serializer):
+ type = serializers.CharField(
+ help_text='A URI reference according to IETF RFC 3986 [10] that identifies the problem type. \
+ It is encouraged that the URI provides human-readable documentation for the problem (e.g. using HTML) when dereferenced. \
+ When this member is not present, its value is assumed to be "about:blank".',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ title = serializers.CharField(
+ help_text='A short, human-readable summary of the problem type. \
+ It should not change from occurrence to occurrence of the problem, except for purposes of localization. \
+ If type is given and other than "about:blank", this attribute shall also be provided.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ title = serializers.IntegerField(
+ help_text='The HTTP status code for this occurrence of the problem.',
+ required=True,
+ allow_null=False
+ )
+ detail = serializers.CharField(
+ help_text='A human-readable explanation specific to this occurrence of the problem.',
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ instance = serializers.CharField(
+ help_text='A URI reference that identifies the specific occurrence of the problem. \
+ It may yield further information if dereferenced.',
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ additional_attributes = serializers.DictField(
+ help_text='Any number of additional attributes, as defined in a specification or by an implementation.',
+ child=serializers.CharField(help_text='Additional attribute', allow_blank=True),
+ required=False,
+ allow_null=True,
+ )
diff --git a/catalog/packages/serializers/response.py b/catalog/packages/serializers/response.py
new file mode 100644
index 0000000..e2cca92
--- /dev/null
+++ b/catalog/packages/serializers/response.py
@@ -0,0 +1,51 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class ProblemDetailsSerializer(serializers.Serializer):
+ type = serializers.CharField(
+ help_text="Type",
+ required=False,
+ allow_null=True
+ )
+ title = serializers.CharField(
+ help_text="Title",
+ required=False,
+ allow_null=True
+ )
+ status = serializers.IntegerField(
+ help_text="Status",
+ required=True
+ )
+ detail = serializers.CharField(
+ help_text="Detail",
+ required=True,
+ allow_null=True
+ )
+ instance = serializers.CharField(
+ help_text="Instance",
+ required=False,
+ allow_null=True
+ )
+ additional_details = serializers.ListField(
+ help_text="Any number of additional attributes, as defined in a "
+ "specification or by an implementation.",
+ required=False,
+ allow_null=True
+ )
+
+ class Meta:
+ ref_name = 'SUBSCRIPTION_ProblemDetailsSerializer'
diff --git a/catalog/packages/serializers/subscription_auth_data.py b/catalog/packages/serializers/subscription_auth_data.py
new file mode 100644
index 0000000..bf512d6
--- /dev/null
+++ b/catalog/packages/serializers/subscription_auth_data.py
@@ -0,0 +1,77 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from catalog.packages import const
+
+
+class OAuthCredentialsSerializer(serializers.Serializer):
+ clientId = serializers.CharField(
+ help_text="Client identifier to be used in the access token "
+ "request of the OAuth 2.0 client credentials grant type.",
+ required=False,
+ max_length=255,
+ allow_null=False
+ )
+ clientPassword = serializers.CharField(
+ help_text="Client password to be used in the access token "
+ "request of the OAuth 2.0 client credentials grant type.",
+ required=False,
+ max_length=255,
+ allow_null=False
+ )
+ tokenEndpoint = serializers.CharField(
+ help_text="The token endpoint from which the access token can "
+ "be obtained.",
+ required=False,
+ max_length=255,
+ allow_null=False
+ )
+
+
+class BasicAuthSerializer(serializers.Serializer):
+ userName = serializers.CharField(
+ help_text="Username to be used in HTTP Basic authentication.",
+ max_length=255,
+ required=False,
+ allow_null=False
+ )
+ password = serializers.CharField(
+ help_text="Password to be used in HTTP Basic authentication.",
+ max_length=255,
+ required=False,
+ allow_null=False
+ )
+
+
+class SubscriptionAuthenticationSerializer(serializers.Serializer):
+ authType = serializers.ListField(
+ child=serializers.ChoiceField(required=True, choices=const.AUTH_TYPES),
+ help_text="Defines the types of Authentication / Authorization "
+ "which the API consumer is willing to accept when "
+ "receiving a notification.",
+ required=True
+ )
+ paramsBasic = BasicAuthSerializer(
+ help_text="Parameters for authentication/authorization using BASIC.",
+ required=False,
+ allow_null=False
+ )
+ paramsOauth2ClientCredentials = OAuthCredentialsSerializer(
+ help_text="Parameters for authentication/authorization using "
+ "OAUTH2_CLIENT_CREDENTIALS.",
+ required=False,
+ allow_null=False
+ )
diff --git a/catalog/packages/serializers/upload_vnf_pkg_from_uri_req.py b/catalog/packages/serializers/upload_vnf_pkg_from_uri_req.py
new file mode 100644
index 0000000..b847484
--- /dev/null
+++ b/catalog/packages/serializers/upload_vnf_pkg_from_uri_req.py
@@ -0,0 +1,36 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class UploadVnfPackageFromUriRequestSerializer(serializers.Serializer):
+ addressInformation = serializers.CharField(
+ help_text="Address information of the VNF package content.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ userName = serializers.CharField(
+ help_text="User name to be used for authentication.",
+ required=False,
+ allow_null=False,
+ allow_blank=False
+ )
+ password = serializers.CharField(
+ help_text="Password to be used for authentication.",
+ required=False,
+ allow_null=False,
+ allow_blank=False
+ )
diff --git a/catalog/packages/serializers/vnf_pkg_artifact_info.py b/catalog/packages/serializers/vnf_pkg_artifact_info.py
new file mode 100644
index 0000000..c63b3c2
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_artifact_info.py
@@ -0,0 +1,39 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .checksum import ChecksumSerializer
+
+
+class VnfPackageArtifactInfoSerializer(serializers.Serializer):
+ artifactPath = serializers.CharField(
+ help_text="Path in the VNF package.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ checksum = ChecksumSerializer(
+ help_text="Checksum of the artifact file.",
+ required=True,
+ allow_null=False
+ )
+ metadata = serializers.DictField(
+ help_text="The metadata of the artifact that are available in the VNF package",
+ child=serializers.CharField(
+ help_text="KeyValue Pairs",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
diff --git a/catalog/packages/serializers/vnf_pkg_info.py b/catalog/packages/serializers/vnf_pkg_info.py
new file mode 100644
index 0000000..3fa4b17
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_info.py
@@ -0,0 +1,127 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .checksum import ChecksumSerializer
+from .vnf_pkg_software_image_info import VnfPackageSoftwareImageInfoSerializer
+from .vnf_pkg_artifact_info import VnfPackageArtifactInfoSerializer
+from .link import LinkSerializer
+
+
+class _LinkSerializer(serializers.Serializer):
+ self = LinkSerializer(
+ help_text='URI of this resource.',
+ required=True,
+ allow_null=False
+ )
+ vnfd = LinkSerializer(
+ help_text='Link to the VNFD resource.',
+ required=False,
+ allow_null=False
+ )
+ packageContent = LinkSerializer(
+ help_text='Link to the "VNF package content resource.',
+ required=True,
+ allow_null=False
+ )
+
+ class Meta:
+ ref_name = 'VNF_PKGM_Link_Serializer'
+
+
+class VnfPkgInfoSerializer(serializers.Serializer):
+ id = serializers.CharField(
+ help_text="Identifier of the on-boarded VNF package.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ vnfdId = serializers.CharField(
+ help_text="This identifier, which is managed by the VNF provider, "
+ "identifies the VNF package and the VNFD in a globally unique way.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfProvider = serializers.CharField(
+ help_text="Provider of the VNF package and the VNFD.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfProductName = serializers.CharField(
+ help_text="Name to identify the VNF product.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfSoftwareVersion = serializers.CharField(
+ help_text="Software version of the VNF.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ vnfdVersion = serializers.CharField(
+ help_text="The version of the VNvFD.",
+ required=False,
+ allow_null=True,
+ allow_blank=True
+ )
+ checksum = ChecksumSerializer(
+ help_text="Checksum of the on-boarded VNF package.",
+ required=False,
+ allow_null=True
+ )
+ softwareImages = VnfPackageSoftwareImageInfoSerializer(
+ help_text="Information about VNF package artifacts that are software images.",
+ required=False,
+ allow_null=True,
+ many=True
+ )
+ additionalArtifacts = VnfPackageArtifactInfoSerializer(
+ help_text="Information about VNF package artifacts contained in "
+ "the VNF package that are not software images.",
+ required=False,
+ allow_null=True,
+ many=True
+ )
+ onboardingState = serializers.ChoiceField(
+ help_text="On-boarding state of the VNF package.",
+ choices=["CREATED", "UPLOADING", "PROCESSING", "ONBOARDED"],
+ required=True,
+ allow_null=True
+ )
+ operationalState = serializers.ChoiceField(
+ help_text="Operational state of the VNF package.",
+ choices=["ENABLED", "DISABLED"],
+ required=True,
+ allow_null=True
+ )
+ usageState = serializers.ChoiceField(
+ help_text="Usage state of the VNF package.",
+ choices=["IN_USE", "NOT_IN_USE"],
+ required=True,
+ allow_null=True
+ )
+ userDefinedData = serializers.DictField(
+ help_text="User defined data for the VNF package.",
+ child=serializers.CharField(help_text="KeyValue Pairs", allow_blank=True),
+ required=False,
+ allow_null=True
+ )
+ _links = _LinkSerializer(
+ help_text='Links to resources related to this resource.',
+ required=True,
+ allow_null=True # TODO supposed to be False
+ )
diff --git a/catalog/packages/serializers/vnf_pkg_infos.py b/catalog/packages/serializers/vnf_pkg_infos.py
new file mode 100644
index 0000000..9ffd6f0
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_infos.py
@@ -0,0 +1,20 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .vnf_pkg_info import VnfPkgInfoSerializer
+
+
+class VnfPkgInfosSerializer(serializers.ListSerializer):
+ child = VnfPkgInfoSerializer()
diff --git a/catalog/packages/serializers/vnf_pkg_notifications.py b/catalog/packages/serializers/vnf_pkg_notifications.py
new file mode 100644
index 0000000..5e023af
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_notifications.py
@@ -0,0 +1,117 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from catalog.packages.const import NOTIFICATION_TYPES
+
+PackageOperationalStateType = ["ENABLED", "DISABLED"]
+PackageUsageStateType = ["IN_USE", "NOT_IN_USE"]
+
+
+class VersionSerializer(serializers.Serializer):
+ vnfSoftwareVersion = serializers.CharField(
+ help_text="VNF software version to match.",
+ max_length=255,
+ required=True,
+ allow_null=False
+ )
+ vnfdVersions = serializers.ListField(
+ child=serializers.CharField(),
+ help_text="Match VNF packages that contain "
+ "VNF products with certain VNFD versions",
+ required=False,
+ allow_null=False
+ )
+
+
+class vnfProductsSerializer(serializers.Serializer):
+ vnfProductName = serializers.CharField(
+ help_text="Name of the VNF product to match.",
+ max_length=255,
+ required=True,
+ allow_null=False
+ )
+ versions = VersionSerializer(
+ help_text="match VNF packages that contain "
+ "VNF products with certain versions",
+ required=False,
+ allow_null=False
+ )
+
+
+class vnfProductsProvidersSerializer(serializers.Serializer):
+ vnfProvider = serializers.CharField(
+ help_text="Name of the VNFprovider to match.",
+ max_length=255,
+ required=True,
+ allow_null=False
+ )
+ vnfProducts = vnfProductsSerializer(
+ help_text="match VNF packages that contain "
+ "VNF products with certain product names, "
+ "from one particular provider",
+ required=False,
+ allow_null=False
+ )
+
+
+class PkgmNotificationsFilter(serializers.Serializer):
+ notificationTypes = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=NOTIFICATION_TYPES
+ ),
+ help_text="Match particular notification types",
+ allow_null=False,
+ required=False
+ )
+ vnfProductsFromProviders = vnfProductsProvidersSerializer(
+ help_text="Match VNF packages that contain "
+ "VNF products from certain providers.",
+ allow_null=False,
+ required=False
+ )
+ vnfdId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match VNF packages with a VNFD identifier"
+ "listed in the attribute",
+ required=False,
+ allow_null=False
+ )
+ vnfPkgId = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="Match VNF packages with a VNFD identifier"
+ "listed in the attribute",
+ required=False,
+ allow_null=False
+ )
+ operationalState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=PackageOperationalStateType
+ ),
+ help_text="Operational state of the VNF package.",
+ allow_null=False,
+ required=False
+ )
+ usageState = serializers.ListField(
+ child=serializers.ChoiceField(
+ required=True,
+ choices=PackageUsageStateType
+ ),
+ help_text="Operational state of the VNF package.",
+ allow_null=False,
+ required=False
+ )
diff --git a/catalog/packages/serializers/vnf_pkg_software_image_info.py b/catalog/packages/serializers/vnf_pkg_software_image_info.py
new file mode 100644
index 0000000..790c61e
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_software_image_info.py
@@ -0,0 +1,96 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from .checksum import ChecksumSerializer
+
+
+class VnfPackageSoftwareImageInfoSerializer(serializers.Serializer):
+ id = serializers.CharField(
+ help_text="Identifier of the software image.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+ name = serializers.CharField(
+ help_text="Name of the software image.",
+ required=True,
+ allow_null=True,
+ allow_blank=False
+ )
+ provider = serializers.CharField(
+ help_text="Provider of the software image.",
+ required=True,
+ allow_null=True,
+ allow_blank=False
+ )
+ version = serializers.CharField(
+ help_text="Version of the software image.",
+ required=True,
+ allow_null=True,
+ allow_blank=False
+ )
+ checksum = ChecksumSerializer(
+ help_text="Checksum of the software image file.",
+ required=True,
+ allow_null=False
+ )
+ containerFormat = serializers.ChoiceField(
+ help_text="terminationType: Indicates whether forceful or graceful termination is requested.",
+ choices=["AKI", "AMI", "ARI", "BARE", "DOCKER", "OVA", "OVF"],
+ required=True,
+ allow_null=True
+ )
+ diskFormat = serializers.ChoiceField(
+ help_text="Disk format of a software image is the format of the underlying disk image.",
+ choices=["AKI", "AMI", "ARI", "ISO", "QCOW2", "RAW", "VDI", "VHD", "VHDX", "VMDK"],
+ required=True,
+ allow_null=True
+ )
+ createdAt = serializers.DateTimeField(
+ help_text="Time when this software image was created.",
+ required=True,
+ format=None,
+ input_formats=None
+ )
+ minDisk = serializers.IntegerField(
+ help_text="The minimal disk for this software image in bytes.",
+ required=True,
+ allow_null=True
+ )
+ minRam = serializers.IntegerField(
+ help_text="The minimal RAM for this software image in bytes.",
+ required=True,
+ allow_null=True
+ )
+ size = serializers.IntegerField(
+ help_text="Size of this software image in bytes.",
+ required=True,
+ allow_null=True
+ )
+ userMetadata = serializers.DictField(
+ help_text="User-defined data.",
+ child=serializers.CharField(
+ help_text="KeyValue Pairs",
+ allow_blank=True
+ ),
+ required=False,
+ allow_null=True
+ )
+ imagePath = serializers.CharField(
+ help_text="Path in the VNF package.",
+ required=True,
+ allow_null=True,
+ allow_blank=False
+ )
diff --git a/catalog/packages/serializers/vnf_pkg_subscription.py b/catalog/packages/serializers/vnf_pkg_subscription.py
new file mode 100644
index 0000000..edcd6fe
--- /dev/null
+++ b/catalog/packages/serializers/vnf_pkg_subscription.py
@@ -0,0 +1,93 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from catalog.packages.serializers import subscription_auth_data
+from catalog.packages.serializers import vnf_pkg_notifications
+
+
+class LinkSerializer(serializers.Serializer):
+ href = serializers.CharField(
+ help_text="URI of the referenced resource.",
+ required=True,
+ allow_null=False,
+ allow_blank=False
+ )
+
+ class Meta:
+ ref_name = 'VNF_SUBSCRIPTION_LINKSERIALIZER'
+
+
+class LinkSelfSerializer(serializers.Serializer):
+ self = LinkSerializer(
+ help_text="URI of this resource.",
+ required=True,
+ allow_null=False
+ )
+
+
+class PkgmSubscriptionRequestSerializer(serializers.Serializer):
+ filters = vnf_pkg_notifications.PkgmNotificationsFilter(
+ help_text="Filter settings for this subscription, "
+ "to define the subset of all notifications"
+ " this subscription relates to",
+ required=False,
+ allow_null=False
+ )
+ callbackUri = serializers.URLField(
+ help_text="Callback URI to send"
+ "the notification",
+ required=True,
+ allow_null=False
+ )
+ authentication = subscription_auth_data.SubscriptionAuthenticationSerializer(
+ help_text="Authentication parameters to configure the use of "
+ "authorization when sending notifications corresponding to"
+ "this subscription",
+ required=False,
+ allow_null=False
+ )
+
+
+class PkgmSubscriptionSerializer(serializers.Serializer):
+ id = serializers.UUIDField(
+ help_text="Identifier of this subscription resource.",
+ required=True,
+ allow_null=False
+ )
+ callbackUri = serializers.URLField(
+ help_text="The URI of the endpoint to send the notification to.",
+ required=True,
+ allow_null=False
+ )
+
+ _links = LinkSelfSerializer(
+ help_text="Links to resources related to this resource.",
+ required=True,
+ allow_null=False
+ )
+
+ filter = vnf_pkg_notifications.PkgmNotificationsFilter(
+ help_text="Filter settings for this subscription, "
+ "to define the subset of all notifications"
+ " this subscription relates to",
+ required=False,
+ allow_null=False
+ )
+
+
+class PkgmSubscriptionsSerializer(serializers.ListSerializer):
+ child = PkgmSubscriptionSerializer()
+ allow_empty = True
diff --git a/catalog/packages/tests/__init__.py b/catalog/packages/tests/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/packages/tests/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/packages/tests/const.py b/catalog/packages/tests/const.py
new file mode 100644
index 0000000..991c87c
--- /dev/null
+++ b/catalog/packages/tests/const.py
@@ -0,0 +1,596 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+vnfd_data = {
+ "volume_storages": [
+ {
+ "properties": {
+ "size_of_storage": {
+ "factor": 10,
+ "value": 10000000000,
+ "unit": "GB",
+ "unit_size": 1000000000
+ },
+ "type_of_storage": "volume",
+ "rdma_enabled": False,
+ "size": "10 GB"
+ },
+ "volume_storage_id": "vNAT_Storage_6wdgwzedlb6sq18uzrr41sof7",
+ "description": ""
+ }
+ ],
+ "inputs": {},
+ "vdus": [
+ {
+ "volume_storages": [
+ "vNAT_Storage_6wdgwzedlb6sq18uzrr41sof7"
+ ],
+ "description": "",
+ "dependencies": [],
+ "vls": [],
+ "properties": {
+ "name": "vNat",
+ "configurable_properties": {
+ "test": {
+ "additional_vnfc_configurable_properties": {
+ "aaa": "1",
+ "bbb": "2",
+ "ccc": "3"
+ }
+ }
+ },
+ "description": "the virtual machine of vNat",
+ "nfvi_constraints": [
+ "test"
+ ],
+ "boot_order": [
+ "vNAT_Storage"
+ ]
+ },
+ "vdu_id": "vdu_vNat",
+ "artifacts": [
+ {
+ "artifact_name": "vNatVNFImage",
+ "type": "tosca.artifacts.nfv.SwImage",
+ "properties": {
+ "operating_system": "linux",
+ "sw_image": "/swimages/vRouterVNF_ControlPlane.qcow2",
+ "name": "vNatVNFImage",
+ "container_format": "bare",
+ "min_ram": "1 GB",
+ "disk_format": "qcow2",
+ "supported_virtualisation_environments": [
+ "test_0"
+ ],
+ "version": "1.0",
+ "checksum": "5000",
+ "min_disk": "10 GB",
+ "size": "10 GB"
+ },
+ "file": "/swimages/vRouterVNF_ControlPlane.qcow2"
+ }
+ ],
+ "nfv_compute": {
+ "flavor_extra_specs": {
+ "hw:cpu_sockets": "2",
+ "sw:ovs_dpdk": "true",
+ "hw:cpu_threads": "2",
+ "hw:numa_mem.1": "3072",
+ "hw:numa_mem.0": "1024",
+ "hw:numa_nodes": "2",
+ "hw:numa_cpus.0": "0,1",
+ "hw:numa_cpus.1": "2,3,4,5",
+ "hw:cpu_cores": "2",
+ "hw:cpu_threads_policy": "isolate"
+ },
+ "cpu_frequency": "2.4 GHz",
+ "num_cpus": 2,
+ "mem_size": "10 GB"
+ },
+ "local_storages": [],
+ "image_file": "vNatVNFImage",
+ "cps": []
+ }
+ ],
+ "image_files": [
+ {
+ "properties": {
+ "operating_system": "linux",
+ "sw_image": "/swimages/vRouterVNF_ControlPlane.qcow2",
+ "name": "vNatVNFImage",
+ "container_format": "bare",
+ "min_ram": "1 GB",
+ "disk_format": "qcow2",
+ "supported_virtualisation_environments": [
+ "test_0"
+ ],
+ "version": "1.0",
+ "checksum": "5000",
+ "min_disk": "10 GB",
+ "size": "10 GB"
+ },
+ "image_file_id": "vNatVNFImage",
+ "description": ""
+ }
+ ],
+ "routers": [],
+ "local_storages": [],
+ "vnf_exposed": {
+ "external_cps": [
+ {
+ "key_name": "sriov_plane",
+ "cp_id": "SRIOV_Port"
+ }
+ ],
+ "forward_cps": []
+ },
+ "vls": [
+ {
+ "route_id": "",
+ "vl_id": "sriov_link",
+ "route_external": False,
+ "description": "",
+ "properties": {
+ "vl_flavours": {
+ "vl_id": "aaaa"
+ },
+ "connectivity_type": {
+ "layer_protocol": "ipv4",
+ "flow_pattern": "flat"
+ },
+ "description": "sriov_link",
+ "test_access": [
+ "test"
+ ]
+ }
+ }
+ ],
+ "cps": [
+ {
+ "vl_id": "sriov_link",
+ "vdu_id": "vdu_vNat",
+ "description": "",
+ "cp_id": "SRIOV_Port",
+ "properties": {
+ "address_data": [
+ {
+ "address_type": "ip_address",
+ "l3_address_data": {
+ "ip_address_type": "ipv4",
+ "floating_ip_activated": False,
+ "number_of_ip_address": 1,
+ "ip_address_assignment": True
+ }
+ }
+ ],
+ "description": "sriov port",
+ "layer_protocol": "ipv4",
+ "virtual_network_interface_requirements": [
+ {
+ "requirement": {
+ "SRIOV": "true"
+ },
+ "support_mandatory": False,
+ "name": "sriov",
+ "description": "sriov"
+ },
+ {
+ "requirement": {
+ "SRIOV": "False"
+ },
+ "support_mandatory": False,
+ "name": "normal",
+ "description": "normal"
+ }
+ ],
+ "role": "root",
+ "bitrate_requirement": 10
+ }
+ }
+ ],
+ "metadata": {
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfProductName": "zte",
+ "localizationLanguage": [
+ "english",
+ "chinese"
+ ],
+ "vnfProvider": "zte",
+ "vnfmInfo": "zte",
+ "defaultLocalizationLanguage": "english",
+ "vnfdId": "zte-hss-1.0",
+ "id": "zte-hss-1.0",
+ "vnfProductInfoDescription": "hss",
+ "vnfdVersion": "1.0.0",
+ "vnfProductInfoName": "hss"
+ },
+ "vnf": {
+ "properties": {
+ "descriptor_id": "zte-hss-1.0",
+ "descriptor_version": "1.0.0",
+ "software_version": "1.0.0",
+ "provider": "zte"
+ },
+ "metadata": {
+ }
+ }
+}
+
+nsd_data = {"vnffgs": [{"vnffg_id": "vnffg1",
+ "description": "",
+ "members": ["path1",
+ "path2"],
+ "properties": {"vendor": "zte",
+ "connection_point": ["m6000_data_in",
+ "m600_tunnel_cp",
+ "m6000_data_out"],
+ "version": "1.0",
+ "constituent_vnfs": ["VFW",
+ "VNAT"],
+ "number_of_endpoints": 3,
+ "dependent_virtual_link": ["sfc_data_network",
+ "ext_datanet_net",
+ "ext_mnet_net"]}}],
+ "inputs": {"sfc_data_network": {"type": "string",
+ "value": "sfc_data_network"},
+ "externalDataNetworkName": {"type": "string",
+ "value": "vlan_4004_tunnel_net"},
+ "externalManageNetworkName": {"type": "string",
+ "value": "vlan_4008_mng_net"},
+ "NatIpRange": {"type": "string",
+ "value": "192.167.0.10-192.168.0.20"},
+ "externalPluginManageNetworkName": {"type": "string",
+ "value": "vlan_4007_plugin_net"}},
+ "pnfs": [{"pnf_id": "m6000_s",
+ "cps": [],
+ "description": "",
+ "properties": {"vendor": "zte",
+ "request_reclassification": False,
+ "pnf_type": "m6000s",
+ "version": "1.0",
+ "management_address": "111111",
+ "id": "m6000_s",
+ "nsh_aware": False}}],
+ "fps": [{"properties": {"symmetric": False,
+ "policy": {"type": "ACL",
+ "criteria": {"dest_port_range": "1-100",
+ "ip_protocol": "tcp",
+ "source_ip_range": ["119.1.1.1-119.1.1.10"],
+ "dest_ip_range": [{"get_input": "NatIpRange"}],
+ "dscp": 0,
+ "source_port_range": "1-100"}}},
+ "forwarder_list": [{"capability": "",
+ "type": "cp",
+ "node_name": "m6000_data_out"},
+ {"capability": "",
+ "type": "cp",
+ "node_name": "m600_tunnel_cp"},
+ {"capability": "vnat_fw_inout",
+ "type": "vnf",
+ "node_name": "VNAT"}],
+ "description": "",
+ "fp_id": "path2"},
+ {"properties": {"symmetric": True,
+ "policy": {"type": "ACL",
+ "criteria": {"dest_port_range": "1-100",
+ "ip_protocol": "tcp",
+ "source_ip_range": ["1-100"],
+ "dest_ip_range": ["1-100"],
+ "dscp": 4,
+ "source_port_range": "1-100"}}},
+ "forwarder_list": [{"capability": "",
+ "type": "cp",
+ "node_name": "m6000_data_in"},
+ {"capability": "",
+ "type": "cp",
+ "node_name": "m600_tunnel_cp"},
+ {"capability": "vfw_fw_inout",
+ "type": "vnf",
+ "node_name": "VFW"},
+ {"capability": "vnat_fw_inout",
+ "type": "vnf",
+ "node_name": "VNAT"},
+ {"capability": "",
+ "type": "cp",
+ "node_name": "m600_tunnel_cp"},
+ {"capability": "",
+ "type": "cp",
+ "node_name": "m6000_data_out"}],
+ "description": "",
+ "fp_id": "path1"}],
+ "routers": [],
+ "vnfs": [{"vnf_id": "VFW",
+ "description": "",
+ "properties": {"plugin_info": "vbrasplugin_1.0",
+ "vendor": "zte",
+ "is_shared": False,
+ "adjust_vnf_capacity": True,
+ "name": "VFW",
+ "vnf_extend_type": "driver",
+ "csarVersion": "v1.0",
+ "csarType": "NFAR",
+ "csarProvider": "ZTE",
+ "version": "1.0",
+ "nsh_aware": True,
+ "cross_dc": False,
+ "vnf_type": "VFW",
+ "vmnumber_overquota_alarm": True,
+ "vnfd_version": "1.0.0",
+ "externalPluginManageNetworkName": "vlan_4007_plugin_net",
+ "id": "vcpe_vfw_zte_1_0",
+ "request_reclassification": False},
+ "dependencies": [{"key_name": "vfw_ctrl_by_manager_cp",
+ "vl_id": "ext_mnet_net"},
+ {"key_name": "vfw_data_cp",
+ "vl_id": "sfc_data_network"}],
+ "type": "tosca.nodes.nfv.ext.zte.VNF.VFW",
+ "networks": []}],
+ "ns_exposed": {"external_cps": [],
+ "forward_cps": []},
+ "policies": [{"file_url": "policies/abc.drl",
+ "name": "aaa"}],
+ "vls": [{"route_id": "",
+ "vl_id": "ext_mnet_net",
+ "route_external": False,
+ "description": "",
+ "properties": {"name": "vlan_4008_mng_net",
+ "mtu": 1500,
+ "location_info": {"tenant": "admin",
+ "vimid": 2,
+ "availability_zone": "nova"},
+ "ip_version": 4,
+ "dhcp_enabled": True,
+ "network_name": "vlan_4008_mng_net",
+ "network_type": "vlan"}},
+ {"route_id": "",
+ "vl_id": "ext_datanet_net",
+ "route_external": False,
+ "description": "",
+ "properties": {"name": "vlan_4004_tunnel_net",
+ "mtu": 1500,
+ "location_info": {"tenant": "admin",
+ "vimid": 2,
+ "availability_zone": "nova"},
+ "ip_version": 4,
+ "dhcp_enabled": True,
+ "network_name": "vlan_4004_tunnel_net",
+ "network_type": "vlan"}},
+ {"route_id": "",
+ "vl_id": "sfc_data_network",
+ "route_external": False,
+ "description": "",
+ "properties": {"name": "sfc_data_network",
+ "dhcp_enabled": True,
+ "is_predefined": False,
+ "location_info": {"tenant": "admin",
+ "vimid": 2,
+ "availability_zone": "nova"},
+ "ip_version": 4,
+ "mtu": 1500,
+ "network_name": "sfc_data_network",
+ "network_type": "vlan"}}],
+ "cps": [{"pnf_id": "m6000_s",
+ "vl_id": "path2",
+ "description": "",
+ "cp_id": "m6000_data_out",
+ "properties": {"direction": "bidirectional",
+ "vnic_type": "normal",
+ "bandwidth": 0,
+ "mac_address": "11-22-33-22-11-44",
+ "interface_name": "xgei-0/4/1/5",
+ "ip_address": "176.1.1.2",
+ "order": 0,
+ "sfc_encapsulation": "mac"}},
+ {"pnf_id": "m6000_s",
+ "vl_id": "ext_datanet_net",
+ "description": "",
+ "cp_id": "m600_tunnel_cp",
+ "properties": {"direction": "bidirectional",
+ "vnic_type": "normal",
+ "bandwidth": 0,
+ "mac_address": "00-11-00-22-33-00",
+ "interface_name": "gei-0/4/0/13",
+ "ip_address": "191.167.100.5",
+ "order": 0,
+ "sfc_encapsulation": "mac"}},
+ {"pnf_id": "m6000_s",
+ "vl_id": "path2",
+ "description": "",
+ "cp_id": "m6000_data_in",
+ "properties": {"direction": "bidirectional",
+ "vnic_type": "normal",
+ "bandwidth": 0,
+ "mac_address": "11-22-33-22-11-41",
+ "interface_name": "gei-0/4/0/7",
+ "ip_address": "1.1.1.1",
+ "order": 0,
+ "sfc_encapsulation": "mac",
+ "bond": "none"}},
+ {"pnf_id": "m6000_s",
+ "vl_id": "ext_mnet_net",
+ "description": "",
+ "cp_id": "m600_mnt_cp",
+ "properties": {"direction": "bidirectional",
+ "vnic_type": "normal",
+ "bandwidth": 0,
+ "mac_address": "00-11-00-22-33-11",
+ "interface_name": "gei-0/4/0/1",
+ "ip_address": "10.46.244.51",
+ "order": 0,
+ "sfc_encapsulation": "mac",
+ "bond": "none"}}],
+ "metadata": {"invariant_id": "vcpe_ns_sff_1",
+ "name": "VCPE_NS",
+ "csarVersion": "v1.0",
+ "csarType": "NSAR",
+ "csarProvider": "ZTE",
+ "version": 1,
+ "vendor": "ZTE",
+ "id": "VCPE_NS",
+ "description": "vcpe_ns"},
+ "ns": {
+ "properties": {
+ "descriptor_id": "VCPE_NS",
+ "version": 1,
+ "name": "VCPE_NS",
+ "desginer": "ZTE",
+ "invariant_id": "vcpe_ns_sff_1"
+ }
+}
+}
+
+pnfd_data = {
+ "metadata": {
+ "id": "zte-1.0",
+ }
+}
+
+sd_data = {
+ "inputs": {
+ "sdwanvpnresource_list": [
+ {
+ "sdwanvpn_topology": "",
+ "required": True,
+ "type": "string"
+ }
+ ]
+ },
+ "pnfs": [
+ {
+ "pnf_id": "m6000_s",
+ "cps": [],
+ "description": "",
+ "properties": {
+ "vendor": "zte",
+ "request_reclassification": False,
+ "pnf_type": "m6000s",
+ "version": "1.0",
+ "management_address": "111111",
+ "id": "m6000_s",
+ "nsh_aware": False
+ }
+ }
+ ],
+ "description": "",
+ "vnfs": [
+ {
+ "vnf_id": "sdwansiteresource",
+ "description": "",
+ "properties": {
+ "sdwandevice_type": "",
+ "sdwandevice_class": "PNF",
+ "multi_stage_design": "false",
+ "min_instances": "1",
+ "sdwansite_controlPoint": "",
+ "id": "cd557883-ac4b-462d-aa01-421b5fa606b1",
+ "sdwansite_longitude": "",
+ "sdwansite_latitude": "",
+ "sdwansite_postcode": "",
+ "sdwansite_type": "",
+ "nf_naming": {
+ "ecomp_generated_naming": True
+ },
+ "sdwansite_emails": "",
+ "sdwansite_role": "",
+ "vnfm_info": "",
+ "sdwansite_address": "",
+ "sdwansite_description": "",
+ "availability_zone_max_count": "1",
+ "sdwansite_name": ""
+ }
+ }
+ ],
+ "service": {
+ "type": "org.openecomp.service.EnhanceService",
+ "properties": {
+ "descriptor_id": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "designer": "",
+ "invariant_id": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "name": "Enhance_Service",
+ "verison": ""
+ },
+ "metadata": {
+ "category": "E2E Service",
+ "serviceType": "",
+ "description": "Enhance_Service",
+ "instantiationType": "A-la-carte",
+ "type": "Service",
+ "environmentContext": "General_Revenue-Bearing",
+ "serviceEcompNaming": True,
+ "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "ecompGeneratedNaming": True,
+ "serviceRole": "",
+ "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "namingPolicy": "",
+ "name": "Enhance_Service"
+ }
+ },
+ "metadata": {
+ "category": "E2E Service",
+ "serviceType": "",
+ "description": "Enhance_Service",
+ "instantiationType": "A-la-carte",
+ "type": "Service",
+ "environmentContext": "General_Revenue-Bearing",
+ "serviceEcompNaming": True,
+ "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "ecompGeneratedNaming": True,
+ "serviceRole": "",
+ "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "namingPolicy": "",
+ "name": "Enhance_Service"
+ }
+}
+
+vnf_subscription_data = {
+ "filters": {
+ "notificationTypes": [
+ "VnfPackageOnboardingNotification"
+ ],
+ "vnfProductsFromProviders": {
+ "vnfProvider": "string",
+ "vnfProducts": {
+ "vnfProductName": "string",
+ "versions": {
+ "vnfSoftwareVersion": "string",
+ "vnfdVersions": [
+ "string"
+ ]
+ }
+ }
+ },
+ "vnfdId": [
+ "3fa85f64-5717-4562-b3fc-2c963f66afa6"
+ ],
+ "vnfPkgId": [
+ "3fa85f64-5717-4562-b3fc-2c963f66afa6"
+ ],
+ "operationalState": [
+ "ENABLED"
+ ],
+ "usageState": [
+ "IN_USE"
+ ]
+ },
+ "callbackUri": "http://www.vnf1.com/notification",
+ "authentication": {
+ "authType": [
+ "BASIC"
+ ],
+ "paramsBasic": {
+ "userName": "string",
+ "password": "string"
+ }
+ }
+}
diff --git a/catalog/packages/tests/test_health_check.py b/catalog/packages/tests/test_health_check.py
new file mode 100644
index 0000000..f8c3b56
--- /dev/null
+++ b/catalog/packages/tests/test_health_check.py
@@ -0,0 +1,50 @@
+# Copyright (c) 2019, CMCC Technologies Co., Ltd.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+
+from django.test import TestCase, Client
+from rest_framework import status
+
+
+class TestHealthCheck(TestCase):
+ def setUp(self):
+ self.client = Client()
+
+ def tearDown(self):
+ pass
+
+ def test_vnfpkgm_health_check(self):
+ response = self.client.get("/api/vnfpkgm/v1/health_check")
+ self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+ resp_data = json.loads(response.content)
+ self.assertEqual({"status": "active"}, resp_data)
+
+ def test_nsd_health_check(self):
+ response = self.client.get("/api/nsd/v1/health_check")
+ self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+ resp_data = json.loads(response.content)
+ self.assertEqual({"status": "active"}, resp_data)
+
+ def test_catalog_health_check(self):
+ response = self.client.get("/api/catalog/v1/health_check")
+ self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+ resp_data = json.loads(response.content)
+ self.assertEqual({"status": "active"}, resp_data)
+
+ def test_parser_health_check(self):
+ response = self.client.get("/api/parser/v1/health_check")
+ self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+ resp_data = json.loads(response.content)
+ self.assertEqual({"status": "active"}, resp_data)
diff --git a/catalog/packages/tests/test_ns_descriptor.py b/catalog/packages/tests/test_ns_descriptor.py
new file mode 100644
index 0000000..473786e
--- /dev/null
+++ b/catalog/packages/tests/test_ns_descriptor.py
@@ -0,0 +1,300 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import json
+import mock
+import os
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+from catalog.packages.biz.ns_descriptor import NsDescriptor
+from catalog.packages.const import PKG_STATUS
+from catalog.packages.tests.const import nsd_data
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import NSPackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.utils import toscaparser
+
+
+class TestNsDescriptor(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ self.user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ self.expected_nsd_info = {
+ 'id': None,
+ 'nsdId': None,
+ 'nsdName': None,
+ 'nsdVersion': None,
+ 'nsdDesigner': None,
+ 'nsdInvariantId': None,
+ 'vnfPkgIds': [],
+ 'pnfdInfoIds': [],
+ 'nestedNsdInfoIds': [],
+ 'nsdOnboardingState': 'CREATED',
+ 'onboardingFailureDetails': None,
+ 'nsdOperationalState': 'DISABLED',
+ 'nsdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': self.user_defined_data,
+ '_links': None
+ }
+ self.nsdModel = {
+ "pnfs": [{"properties": {"id": "m6000_s"}}],
+ "vnfs": [{"properties": {"id": "123"}}]
+ }
+
+ def tearDown(self):
+ pass
+
+ def test_nsd_create_normal(self):
+ reqest_data = {'userDefinedData': self.user_defined_data}
+ expected_reponse_data = {
+ 'nsdOnboardingState': 'CREATED',
+ 'nsdOperationalState': 'DISABLED',
+ 'nsdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': self.user_defined_data,
+ '_links': None
+ }
+
+ response = self.client.post(
+ '/api/nsd/v1/ns_descriptors',
+ data=reqest_data,
+ format='json'
+ )
+ response.data.pop('id')
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_multiple_nsds_normal(self):
+ expected_reponse_data = [
+ copy.deepcopy(self.expected_nsd_info),
+ copy.deepcopy(self.expected_nsd_info)
+ ]
+ expected_reponse_data[0]['id'] = '0'
+ expected_reponse_data[0]['nsdId'] = '0'
+ expected_reponse_data[1]['id'] = '1'
+ expected_reponse_data[1]['nsdId'] = '1'
+
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ for i in range(2):
+ NSPackageModel(
+ nsPackageId=str(i),
+ onboardingState='CREATED',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data,
+ nsdId=str(i)
+ ).save()
+
+ response = self.client.get('/api/nsd/v1/ns_descriptors', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ expected_reponse_data = [
+ copy.deepcopy(self.expected_nsd_info)
+ ]
+ expected_reponse_data[0]['id'] = '1'
+ expected_reponse_data[0]['nsdId'] = '1'
+ response = self.client.get('/api/nsd/v1/ns_descriptors?nsdId=1', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_single_nsd_normal(self):
+ expected_reponse_data = copy.deepcopy(self.expected_nsd_info)
+ expected_reponse_data['id'] = '22'
+
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ NSPackageModel(
+ nsPackageId='22',
+ onboardingState='CREATED',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data,
+ nsdModel=json.JSONEncoder().encode(self.nsdModel)
+ ).save()
+
+ response = self.client.get('/api/nsd/v1/ns_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_single_when_ns_not_exist(self):
+ response = self.client.get('/api/nsd/v1/ns_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_delete_single_nsd_normal(self):
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ NSPackageModel(
+ nsPackageId='21',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data,
+ nsdModel='test'
+ ).save()
+
+ response = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json')
+ self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, response.data)
+
+ def test_delete_when_ns_not_exist(self):
+ response = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json')
+ self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+
+ @mock.patch.object(toscaparser, 'parse_nsd')
+ def test_nsd_content_upload_normal(self, mock_parse_nsd):
+ user_defined_data_json = json.JSONEncoder().encode(self.user_defined_data)
+ mock_parse_nsd.return_value = json.JSONEncoder().encode(nsd_data)
+ VnfPackageModel(
+ vnfPackageId="111",
+ vnfdId="vcpe_vfw_zte_1_0"
+ ).save()
+
+ PnfPackageModel(
+ pnfPackageId="112",
+ pnfdId="m6000_s"
+ ).save()
+
+ NSPackageModel(
+ nsPackageId='22',
+ operationalState='DISABLED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data_json,
+ ).save()
+
+ with open('nsd_content.txt', 'wt') as fp:
+ fp.write('test')
+ with open('nsd_content.txt', 'rt') as fp:
+ resp = self.client.put(
+ "/api/nsd/v1/ns_descriptors/22/nsd_content",
+ {'file': fp},
+ )
+ file_content = ''
+ with open(os.path.join(CATALOG_ROOT_PATH, '22/nsd_content.txt')) as fp:
+ data = fp.read()
+ file_content = '%s%s' % (file_content, data)
+ ns_pkg = NSPackageModel.objects.filter(nsPackageId="22")
+ self.assertEqual("VCPE_NS", ns_pkg[0].nsdId)
+ self.assertEqual(PKG_STATUS.ONBOARDED, ns_pkg[0].onboardingState)
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, resp.data)
+ self.assertEqual(file_content, 'test')
+ os.remove('nsd_content.txt')
+
+ def test_nsd_content_upload_failure(self):
+ with open('nsd_content.txt', 'wt') as fp:
+ fp.write('test')
+ with open('nsd_content.txt', 'rt') as fp:
+ response = self.client.put(
+ "/api/nsd/v1/ns_descriptors/22/nsd_content",
+ {'file': fp},
+ )
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_nsd_content_download_normal(self):
+ with open('nsd_content.txt', 'wt') as fp:
+ fp.writelines('test1')
+ fp.writelines('test2')
+ NSPackageModel.objects.create(
+ nsPackageId='23',
+ onboardingState='ONBOARDED',
+ localFilePath='nsd_content.txt'
+ )
+ response = self.client.get(
+ "/api/nsd/v1/ns_descriptors/23/nsd_content", format='json'
+ )
+ file_content = ""
+ for data in response.streaming_content:
+ file_content = '%s%s' % (file_content, data.decode())
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual("test1test2", file_content)
+ os.remove('nsd_content.txt')
+
+ def test_nsd_content_download_when_ns_not_exist(self):
+ response = self.client.get("/api/nsd/v1/ns_descriptors/23/nsd_content", format='json')
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_nsd_content_download_failed(self):
+ NSPackageModel.objects.create(
+ nsPackageId='23',
+ onboardingState='CREATED',
+ localFilePath='nsd_content.txt'
+ )
+ response = self.client.get("/api/nsd/v1/ns_descriptors/23/nsd_content", format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_nsd_content_partial_download_normal(self):
+ with open('nsd_content.txt', 'wt') as fp:
+ fp.writelines('test1')
+ fp.writelines('test2')
+ NSPackageModel(
+ nsPackageId='23',
+ onboardingState='ONBOARDED',
+ localFilePath='nsd_content.txt'
+ ).save()
+
+ response = self.client.get(
+ "/api/nsd/v1/ns_descriptors/23/nsd_content",
+ HTTP_RANGE='5-10',
+ format='json'
+ )
+ partial_file_content = ''
+ for data in response.streaming_content:
+ partial_file_content = '%s%s' % (partial_file_content, data.decode())
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual("test2", partial_file_content)
+ os.remove('nsd_content.txt')
+
+ @mock.patch.object(NsDescriptor, 'create')
+ def test_nsd_create_when_catch_exception(self, mock_create):
+ reqest_data = {'userDefinedData': self.user_defined_data}
+ mock_create.side_effect = TypeError("integer type")
+ response = self.client.post('/api/nsd/v1/ns_descriptors', data=reqest_data, format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(NsDescriptor, 'query_single')
+ def test_query_single_when_catch_exception(self, mock_query_single):
+ mock_query_single.side_effect = TypeError("integer type")
+ response = self.client.get('/api/nsd/v1/ns_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(NsDescriptor, 'query_multiple')
+ def test_query_multiple_when_catch_exception(self, mock_query_multipe):
+ mock_query_multipe.side_effect = TypeError("integer type")
+ response = self.client.get('/api/nsd/v1/ns_descriptors', format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(NsDescriptor, 'delete_single')
+ def test_delete_when_catch_exception(self, mock_delete_single):
+ mock_delete_single.side_effect = TypeError("integer type")
+ response = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(NsDescriptor, 'upload')
+ def test_upload_when_catch_exception(self, mock_upload):
+ mock_upload.side_effect = TypeError("integer type")
+ with open('nsd_content.txt', 'wt') as fp:
+ fp.write('test')
+ with open('nsd_content.txt', 'rt') as fp:
+ response = self.client.put("/api/nsd/v1/ns_descriptors/22/nsd_content", {'file': fp})
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+ os.remove('nsd_content.txt')
+
+ @mock.patch.object(NsDescriptor, 'download')
+ def test_download_when_catch_exception(self, mock_download):
+ mock_download.side_effect = TypeError("integer type")
+ response = self.client.get("/api/nsd/v1/ns_descriptors/23/nsd_content", format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/catalog/packages/tests/test_nsdm_subscription.py b/catalog/packages/tests/test_nsdm_subscription.py
new file mode 100644
index 0000000..f73c416
--- /dev/null
+++ b/catalog/packages/tests/test_nsdm_subscription.py
@@ -0,0 +1,521 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import mock
+import uuid
+from django.test import TestCase
+from rest_framework.test import APIClient
+from rest_framework import status
+
+from catalog.packages.biz.nsdm_subscription import NsdmSubscription
+from catalog.pub.database.models import NsdmSubscriptionModel
+
+
+class TestNsdmSubscription(TestCase):
+
+ def setUp(self):
+ self.client = APIClient()
+ NsdmSubscriptionModel.objects.all().delete()
+ self.subscription_id = str(uuid.uuid4())
+ self.subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ },
+ "filter": {
+ "nsdId": ["b632bddc-abcd-4180-bd8d-4e8a9578eff7"],
+ }
+ }
+ self.links = {
+ "self": {
+ "href": "/api/v1/subscriptions/" + self.subscription_id
+ }
+ }
+ self.test_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "id": self.subscription_id,
+ "filter": {
+ "notificationTypes": [
+ "NsdOnBoardingNotification"
+ ],
+ "nsdInfoId": [],
+ "nsdId": [],
+ "nsdName": [],
+ "nsdVersion": [],
+ "nsdInvariantId": [],
+ "vnfPkgIds": [],
+ "nestedNsdInfoIds": [],
+ "nsdOnboardingState": [],
+ "nsdOperationalState": [],
+ "nsdUsageState": [],
+ "pnfdInfoIds": [],
+ "pnfdId": [],
+ "pnfdName": [],
+ "pnfdVersion": [],
+ "pnfdProvider": [],
+ "pnfdInvariantId": [],
+ "pnfdOnboardingState": [],
+ "pnfdUsageState": []
+ },
+ "_links": self.links,
+ }
+
+ def tearDown(self):
+ pass
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_nsdm_subscribe_notification(self, mock_uuid4, mock_requests):
+ temp_uuid = str(uuid.uuid4())
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=self.subscription, format='json')
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(self.subscription["callbackUri"],
+ response.data["callbackUri"])
+ self.assertEqual(temp_uuid, response.data["id"])
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_nsdm_subscribe_callbackFailure(self, mock_uuid4, mock_requests):
+ temp_uuid = str(uuid.uuid4())
+ mock_requests.return_value.status_code = 500
+ mock_requests.get.return_value.status_code = 500
+ mock_uuid4.return_value = temp_uuid
+ expected_data = {
+ 'status': 500,
+ 'detail': "callbackUri http://callbackuri.com didn't"
+ " return 204 statuscode."
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=self.subscription, format='json')
+ self.assertEqual(500, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_second_subscription(self, mock_requests):
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=self.subscription, format='json')
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(self.subscription["callbackUri"],
+ response.data["callbackUri"])
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ },
+ "filter": {
+ "nsdId": ["b632bddc-bccd-4180-bd8d-4e8a9578eff7"],
+ }
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(dummy_subscription["callbackUri"],
+ response.data["callbackUri"])
+
+ @mock.patch("requests.get")
+ def test_nsdm_duplicate_subscription(self, mock_requests):
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=self.subscription, format='json')
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(self.subscription["callbackUri"],
+ response.data["callbackUri"])
+ expected_data = {
+ 'status': 303,
+ 'detail': 'Already Subscription exists with'
+ ' the same callbackUri and filter'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=self.subscription, format='json')
+ self.assertEqual(303, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_bad_request(self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ },
+ "filter": {
+ "nsdId": "b632bddc-bccd-4180-bd8d-4e8a9578eff7",
+ }
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_authtype_subscription(self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["OAUTH2_CLIENT_CREDENTIALS"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'Auth type should be BASIC'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_authtype_oauthclient_subscription(
+ self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsOauth2ClientCredentials": {
+ "clientId": "clientId",
+ "clientPassword": "password",
+ "tokenEndpoint": "http://tokenEndpoint"
+ }
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'Auth type should be OAUTH2_CLIENT_CREDENTIALS'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_authparams_subscription(self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username"
+ }
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'userName and password needed for BASIC'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_authparams_oauthclient_subscription(
+ self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["OAUTH2_CLIENT_CREDENTIALS"],
+ "paramsOauth2ClientCredentials": {
+ "clientPassword": "password",
+ "tokenEndpoint": "http://tokenEndpoint"
+ }
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'clientId, clientPassword and tokenEndpoint'
+ ' required for OAUTH2_CLIENT_CREDENTIALS'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_filter_subscription(self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ },
+ "filter": {
+ "nsdId": ["b632bddc-bccd-4180-bd8d-4e8a9578eff7"],
+ "nsdInfoId": ["d0ea5ec3-0b98-438a-9bea-488230cff174"]
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'Notification Filter should contain'
+ ' either nsdId or nsdInfoId'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch("requests.get")
+ def test_nsdm_invalid_filter_pnfd_subscription(self, mock_requests):
+ dummy_subscription = {
+ "callbackUri": "http://callbackuri.com",
+ "authentication": {
+ "authType": ["BASIC"],
+ "paramsBasic": {
+ "userName": "username",
+ "password": "password"
+ }
+ },
+ "filter": {
+ "pnfdId": ["b632bddc-bccd-4180-bd8d-4e8a9578eff7"],
+ "pnfdInfoIds": ["d0ea5ec3-0b98-438a-9bea-488230cff174"]
+ }
+ }
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.return_value.status_code = 204
+ expected_data = {
+ 'status': 400,
+ 'detail': 'Notification Filter should contain'
+ ' either pnfdId or pnfdInfoIds'
+ }
+ response = self.client.post("/api/nsd/v1/subscriptions",
+ data=dummy_subscription, format='json')
+ self.assertEqual(400, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ @mock.patch.object(NsdmSubscription, 'create')
+ def test_nsdmsubscription_create_when_catch_exception(self, mock_create):
+ mock_create.side_effect = TypeError("Unicode type")
+ response = self.client.post('/api/nsd/v1/subscriptions',
+ data=self.subscription, format='json')
+ self.assertEqual(response.status_code,
+ status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_nsdm_get_subscriptions(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.get("/api/nsd/v1/subscriptions",
+ format='json')
+ self.assertEqual(status.HTTP_200_OK, response.status_code)
+ self.assertEqual([self.test_subscription], response.data)
+
+ def test_nsdm_get_subscriptions_filter(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.get("/api/nsd/v1/subscriptions"
+ "?notificationTypes"
+ "=NsdOnBoardingNotification",
+ format='json')
+ self.assertEqual(status.HTTP_200_OK, response.status_code)
+ self.assertEqual([self.test_subscription], response.data)
+
+ def test_nsdm_get_subscriptions_filter_failure(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.get("/api/nsd/v1/subscriptions"
+ "?notificationTypes="
+ "PnfdOnBoardingFailureNotification",
+ format='json')
+ self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
+
+ def test_nsdm_get_subscriptions_invalid_filter(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.get("/api/nsd/v1/subscriptions"
+ "?notificationTypes="
+ "PnfdOnBoardingFailureNotificati",
+ format='json')
+ self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
+
+ @mock.patch.object(NsdmSubscription, 'query_multi_subscriptions')
+ def test_nsdmsubscription_get_when_catch_exception(self, mock_create):
+ mock_create.side_effect = TypeError("Unicode type")
+ response = self.client.get('/api/nsd/v1/subscriptions', format='json')
+ self.assertEqual(response.status_code,
+ status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_nsdm_get_subscription(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.get('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(status.HTTP_200_OK, response.status_code)
+ self.assertEqual(self.test_subscription, response.data)
+
+ def test_nsdm_get_subscription_failure(self):
+ expected_data = {
+ "status": 404,
+ "detail": "Subscription(" + self.subscription_id + ") "
+ "doesn't exists"
+ }
+ response = self.client.get('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
+ self.assertEqual(expected_data, response.data)
+
+ def test_nsdm_get_subscription_failure_bad_request(self):
+ response = self.client.get("/api/nsd/v1/subscriptions/123",
+ format='json')
+ self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
+
+ @mock.patch.object(NsdmSubscription, 'query_single_subscription')
+ def test_nsdmsubscription_getsingle_when_catch_exception(
+ self, mock_create):
+ mock_create.side_effect = TypeError("Unicode type")
+ response = self.client.get('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(response.status_code,
+ status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_ndsm_delete_subscription(self):
+ NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+ callback_uri="http://callbackuri.com",
+ auth_info={},
+ notificationTypes=json.dumps(
+ ["NsdOnBoardingNotification"]),
+ nsdId=[], nsdVersion=[],
+ nsdInfoId=[], nsdDesigner=[],
+ nsdName=[], nsdInvariantId=[],
+ vnfPkgIds=[], pnfdInfoIds=[],
+ nestedNsdInfoIds=[], nsdOnboardingState=[],
+ nsdOperationalState=[], nsdUsageState=[],
+ pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+ pnfdName=[], pnfdInvariantId=[],
+ pnfdOnboardingState=[], pnfdUsageState=[],
+ links=json.dumps(self.links)).save()
+ response = self.client.delete('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code)
+
+ def test_ndsm_delete_subscription_failure(self):
+ response = self.client.delete('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
+
+ def test_nsdm_delete_subscription_failure_bad_request(self):
+ response = self.client.delete("/api/nsd/v1/subscriptions/123",
+ format='json')
+ self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
+
+ @mock.patch.object(NsdmSubscription, 'delete_single_subscription')
+ def test_nsdmsubscription_delete_when_catch_exception(self, mock_create):
+ mock_create.side_effect = TypeError("Unicode type")
+ response = self.client.delete('/api/nsd/v1/'
+ 'subscriptions/' + self.subscription_id,
+ format='json')
+ self.assertEqual(response.status_code,
+ status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/catalog/packages/tests/test_nspackage.py b/catalog/packages/tests/test_nspackage.py
new file mode 100644
index 0000000..91f3503
--- /dev/null
+++ b/catalog/packages/tests/test_nspackage.py
@@ -0,0 +1,246 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import mock
+from rest_framework import status
+from django.test import TestCase
+from django.test import Client
+
+from catalog.pub.utils import restcall, toscaparser
+from catalog.pub.database.models import NSPackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.msapi import sdc
+from .const import nsd_data
+
+
+class TestNsPackage(TestCase):
+ def setUp(self):
+ self.client = Client()
+ NSPackageModel.objects.filter().delete()
+ VnfPackageModel.objects.filter().delete()
+ self.nsd_data = nsd_data
+
+ def tearDown(self):
+ pass
+
+ def test_ns_pkg_distribute_when_ns_exists(self):
+ NSPackageModel(nsPackageId="1", nsdId="2").save()
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("failed", resp.data["status"])
+ self.assertEqual(
+ "NS CSAR(1) already exists.",
+ resp.data["statusDescription"])
+
+ @mock.patch.object(restcall, 'call_req')
+ def test_ns_pkg_distribute_when_csar_not_exist(self, mock_call_req):
+ mock_call_req.return_value = [0, "[]", '200']
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("failed", resp.data["status"])
+ self.assertEqual(
+ "Failed to query artifact(services,1) from sdc.",
+ resp.data["statusDescription"])
+
+ @mock.patch.object(restcall, 'call_req')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_nsd')
+ def test_ns_pkg_distribute_when_nsd_already_exists(
+ self, mock_parse_nsd, mock_download_artifacts, mock_call_req):
+ mock_parse_nsd.return_value = json.JSONEncoder().encode(self.nsd_data)
+ mock_download_artifacts.return_value = "/home/vcpe.csar"
+ mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+ "uuid": "1",
+ "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/vcpe.csar",
+ "distributionStatus": "DISTRIBUTED"
+ }]), '200']
+ NSPackageModel(nsPackageId="2", nsdId="VCPE_NS").save()
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("failed", resp.data["status"])
+ self.assertEqual(
+ "NSD(VCPE_NS) already exists.",
+ resp.data["statusDescription"])
+
+ @mock.patch.object(restcall, 'call_req')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_nsd')
+ def test_ns_pkg_distribute_when_nf_not_distributed(
+ self, mock_parse_nsd, mock_download_artifacts, mock_call_req):
+ mock_parse_nsd.return_value = json.JSONEncoder().encode(self.nsd_data)
+ mock_download_artifacts.return_value = "/home/vcpe.csar"
+ mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+ "uuid": "1",
+ "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/vcpe.csar",
+ "distributionStatus": "DISTRIBUTED",
+ }]), '200']
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("failed", resp.data["status"])
+ self.assertEqual(
+ "VNF package(vcpe_vfw_zte_1_0) is not distributed.",
+ resp.data["statusDescription"])
+
+ @mock.patch.object(restcall, 'call_req')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_nsd')
+ def test_ns_pkg_distribute_when_successfully(
+ self, mock_parse_nsd, mock_download_artifacts, mock_call_req):
+ mock_parse_nsd.return_value = json.JSONEncoder().encode(self.nsd_data)
+ mock_download_artifacts.return_value = "/home/vcpe.csar"
+ mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+ "uuid": "1",
+ "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/vcpe.csar",
+ "distributionStatus": "DISTRIBUTED"
+ }]), '200']
+ VnfPackageModel(vnfPackageId="1", vnfdId="vcpe_vfw_zte_1_0").save()
+ PnfPackageModel(pnfPackageId="1", pnfdId="m6000_s").save()
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("success", resp.data["status"])
+ self.assertEqual(
+ "CSAR(1) distributed successfully.",
+ resp.data["statusDescription"])
+
+ @mock.patch.object(sdc, 'get_artifacts')
+ def test_ns_when_not_distributed_by_sdc(self, mock_get_artifacts):
+ mock_get_artifacts.return_value = [{
+ "uuid": "1",
+ "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb",
+ "name": "underlayvpn",
+ "version": "2.0",
+ "toscaModelURL": "/sdc/v1/catalog/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel",
+ "category": "Volte",
+ "subCategory": "VolteVNF",
+ "resourceType": "VF",
+ "lifecycleState": "CERTIFIED",
+ "distributionStatus": "DISTRIBUTION_APPROVED",
+ "lastUpdaterUserId": "jh0003"
+ }]
+ resp = self.client.post(
+ "/api/catalog/v1/nspackages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual("failed", resp.data["status"])
+ self.assertEqual(
+ "The artifact (services,1) is not distributed from sdc.",
+ resp.data["statusDescription"])
+
+ ##########################################################################
+
+ def test_ns_pkg_normal_delete(self):
+ NSPackageModel(nsPackageId="8", nsdId="2").save()
+ resp = self.client.delete("/api/catalog/v1/nspackages/8")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ self.assertEqual("success", resp.data["status"])
+ self.assertEqual(
+ "Delete CSAR(8) successfully.",
+ resp.data["statusDescription"])
+
+ def test_ns_pkg_get_all(self):
+ NSPackageModel(
+ nsPackageId="13",
+ nsdId="2",
+ nsdDesginer="2",
+ nsdVersion="2",
+ nsPackageUri="13.csar",
+ nsdModel="").save()
+ NSPackageModel(
+ nsPackageId="14",
+ nsdId="3",
+ nsdDesginer="3",
+ nsdVersion="3",
+ nsPackageUri="14.csar",
+ nsdModel="").save()
+ resp = self.client.get("/api/catalog/v1/nspackages")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ expect_data = [{"csarId": "13",
+ "packageInfo": {"csarName": "13.csar",
+ "nsdProvider": "2",
+ "nsdId": "2",
+ "nsPackageId": "13",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/13/13.csar",
+ "nsdModel": "",
+ "nsdVersion": "2",
+ "nsdInvariantId": None
+ }},
+ {"csarId": "14",
+ "packageInfo": {"csarName": "14.csar",
+ "nsdProvider": "3",
+ "nsdId": "3",
+ "nsPackageId": "14",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/14/14.csar",
+ "nsdModel": "",
+ "nsdVersion": "3",
+ "nsdInvariantId": None}}]
+ self.assertEqual(expect_data, resp.data)
+
+ def test_ns_pkg_get_one(self):
+ NSPackageModel(
+ nsPackageId="14",
+ nsdId="2",
+ nsdDesginer="3",
+ nsdVersion="4",
+ nsPackageUri="14.csar",
+ nsdModel="").save()
+ resp = self.client.get("/api/catalog/v1/nspackages/14")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ expect_data = {
+ "csarId": "14",
+ "packageInfo": {
+ "nsdId": "2",
+ "nsPackageId": "14",
+ "nsdProvider": "3",
+ "nsdVersion": "4",
+ "csarName": "14.csar",
+ "nsdModel": "",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/14/14.csar",
+ "nsdInvariantId": None}}
+ self.assertEqual(expect_data, resp.data)
+
+ def test_ns_pkg_get_one_not_found(self):
+ resp = self.client.get("/api/catalog/v1/nspackages/22")
+ self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+ self.assertEqual(
+ {"error": "Ns package[22] not Found."},
+ resp.data)
+
+ ##########################################################################
+
+ @mock.patch.object(toscaparser, 'parse_nsd')
+ def test_nsd_parse_normal(self, mock_parse_nsd):
+ NSPackageModel(nsPackageId="18", nsdId="12").save()
+ mock_parse_nsd.return_value = json.JSONEncoder().encode({"a": "b"})
+ req_data = {"csarId": "18", "inputs": []}
+ resp = self.client.post(
+ "/api/catalog/v1/parsernsd",
+ req_data,
+ format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual({"model": '{"a": "b"}'}, resp.data)
+
+ def test_nsd_parse_when_csar_not_exist(self):
+ req_data = {"csarId": "1", "inputs": []}
+ resp = self.client.post(
+ "/api/catalog/v1/parsernsd",
+ req_data,
+ format='json')
+ self.assertEqual(
+ resp.status_code,
+ status.HTTP_500_INTERNAL_SERVER_ERROR)
+ self.assertEqual(resp.data, {"error": "NS CSAR(1) does not exist."})
diff --git a/catalog/packages/tests/test_pnf_descriptor.py b/catalog/packages/tests/test_pnf_descriptor.py
new file mode 100644
index 0000000..8af8614
--- /dev/null
+++ b/catalog/packages/tests/test_pnf_descriptor.py
@@ -0,0 +1,286 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import copy
+import json
+import mock
+import os
+import shutil
+
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+from catalog.packages.biz.pnf_descriptor import PnfDescriptor
+from catalog.packages.const import PKG_STATUS
+from catalog.packages.tests.const import pnfd_data
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import PnfPackageModel, NSPackageModel
+from catalog.pub.utils import toscaparser
+
+
+class TestPnfDescriptor(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ self.user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ self.expected_pnfd_info = {
+ 'id': None,
+ 'pnfdId': None,
+ 'pnfdName': None,
+ 'pnfdVersion': None,
+ 'pnfdProvider': None,
+ 'pnfdInvariantId': None,
+ 'pnfdOnboardingState': 'CREATED',
+ 'onboardingFailureDetails': None,
+ 'pnfdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': self.user_defined_data,
+ '_links': None
+ }
+ self.nsdModel = {
+ "pnfs": [{"properties": {"id": "m6000_s"}}]
+ }
+
+ def tearDown(self):
+ file_path = os.path.join(CATALOG_ROOT_PATH, "22")
+ if os.path.exists(file_path):
+ shutil.rmtree(file_path)
+
+ def test_pnfd_create_normal(self):
+ request_data = {'userDefinedData': self.user_defined_data}
+ expected_reponse_data = {
+ 'pnfdOnboardingState': 'CREATED',
+ 'pnfdUsageState': 'NOT_IN_USE',
+ 'userDefinedData': self.user_defined_data,
+ '_links': None
+ }
+
+ response = self.client.post(
+ '/api/nsd/v1/pnf_descriptors',
+ data=request_data,
+ format='json'
+ )
+ response.data.pop('id')
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_multiple_pnfds_normal(self):
+ expected_reponse_data = [
+ copy.deepcopy(self.expected_pnfd_info),
+ copy.deepcopy(self.expected_pnfd_info)
+ ]
+ expected_reponse_data[0]['id'] = '0'
+ expected_reponse_data[1]['id'] = '1'
+
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ for i in range(2):
+ PnfPackageModel(
+ pnfPackageId=str(i),
+ onboardingState='CREATED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data
+ ).save()
+ response = self.client.get('/api/nsd/v1/pnf_descriptors', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_single_pnfd_normal(self):
+ expected_reponse_data = copy.deepcopy(self.expected_pnfd_info)
+ expected_reponse_data['id'] = '22'
+
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ onboardingState='CREATED',
+ usageState='NOT_IN_USE',
+ userDefinedData=user_defined_data
+ ).save()
+
+ response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(expected_reponse_data, response.data)
+
+ def test_query_single_pnfd_failed(self):
+ response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_delete_single_pnfd_normal(self):
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ userDefinedData=user_defined_data,
+ pnfdModel='test'
+ ).save()
+ NSPackageModel.objects.create(
+ nsPackageId="111",
+ nsdModel=json.JSONEncoder().encode(self.nsdModel)
+ )
+ resp = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, resp.data)
+
+ def test_delete_single_pnfd_when_not_exist(self):
+ resp = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, resp.data)
+
+ @mock.patch.object(toscaparser, "parse_pnfd")
+ def test_pnfd_content_upload_normal(self, mock_parse_pnfd):
+ user_defined_data_json = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ userDefinedData=user_defined_data_json,
+ ).save()
+ mock_parse_pnfd.return_value = json.JSONEncoder().encode(pnfd_data)
+ with open('pnfd_content.txt', 'wt') as fp:
+ fp.write('test')
+
+ with open('pnfd_content.txt', 'rt') as fp:
+ resp = self.client.put(
+ "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
+ {'file': fp},
+ )
+ pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId="22")
+ self.assertEqual(pnf_pkg[0].pnfdId, "zte-1.0")
+ self.assertEqual(pnf_pkg[0].onboardingState, PKG_STATUS.ONBOARDED)
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(None, resp.data)
+ os.remove('pnfd_content.txt')
+
+ def test_pnfd_content_upload_when_pnf_not_exist(self):
+ with open('pnfd_content.txt', 'wt') as fp:
+ fp.write('test')
+
+ with open('pnfd_content.txt', 'rt') as fp:
+ resp = self.client.put(
+ "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
+ {'file': fp},
+ )
+ self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(toscaparser, "parse_pnfd")
+ def test_pnfd_content_upload_when_pnfd_exist(self, mock_parse_pnfd):
+ with open('pnfd_content.txt', 'wt') as fp:
+ fp.write('test')
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ pnfdId="zte-1.1"
+ ).save()
+ PnfPackageModel(
+ pnfPackageId='23',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ pnfdId="zte-1.0"
+ ).save()
+ mock_parse_pnfd.return_value = json.JSONEncoder().encode(pnfd_data)
+ with open('pnfd_content.txt', 'rt') as fp:
+ resp = self.client.put(
+ "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
+ {'file': fp},
+ )
+ self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_pnfd_download_normal(self):
+ with open('pnfd_content.txt', 'wt') as fp:
+ fp.writelines('test1')
+ fp.writelines('test2')
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ onboardingState=PKG_STATUS.ONBOARDED,
+ userDefinedData=user_defined_data,
+ localFilePath="pnfd_content.txt",
+ pnfdModel='test'
+ ).save()
+ resp = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+ file_content = ""
+ for data in resp.streaming_content:
+ file_content = '%s%s' % (file_content, data.decode())
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ self.assertEqual("test1test2", file_content)
+ os.remove('pnfd_content.txt')
+
+ def test_pnfd_download_failed(self):
+ response = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_pnfd_download_when_not_on_boarded(self):
+ with open('pnfd_content.txt', 'wt') as fp:
+ fp.writelines('test1')
+ fp.writelines('test2')
+ user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+ PnfPackageModel(
+ pnfPackageId='22',
+ usageState=PKG_STATUS.NOT_IN_USE,
+ onboardingState=PKG_STATUS.CREATED,
+ userDefinedData=user_defined_data,
+ localFilePath="pnfd_content.txt",
+ pnfdModel='test'
+ ).save()
+ response = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+ os.remove('pnfd_content.txt')
+
+ @mock.patch.object(PnfDescriptor, "create")
+ def test_pnfd_create_when_catch_exception(self, mock_create):
+ request_data = {'userDefinedData': self.user_defined_data}
+ mock_create.side_effect = TypeError('integer type')
+ response = self.client.post('/api/nsd/v1/pnf_descriptors', data=request_data, format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(PnfDescriptor, "delete_single")
+ def test_delete_single_when_catch_exception(self, mock_delete_single):
+ mock_delete_single.side_effect = TypeError("integer type")
+ response = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(PnfDescriptor, "query_single")
+ def test_query_single_when_catch_exception(self, mock_query_single):
+ mock_query_single.side_effect = TypeError("integer type")
+ response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(PnfDescriptor, "query_multiple")
+ def test_query_multiple_when_catch_exception(self, mock_query_muitiple):
+ mock_query_muitiple.side_effect = TypeError("integer type")
+ response = self.client.get('/api/nsd/v1/pnf_descriptors', format='json')
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(PnfDescriptor, "upload")
+ def test_upload_when_catch_exception(self, mock_upload):
+ mock_upload.side_effect = TypeError("integer type")
+ response = self.client.put("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(PnfDescriptor, "download")
+ def test_download_when_catch_exception(self, mock_download):
+ mock_download.side_effect = TypeError("integer type")
+ response = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(toscaparser, 'parse_pnfd')
+ def test_pnfd_parse_normal(self, mock_parse_pnfd):
+ PnfPackageModel(pnfPackageId="8", pnfdId="10").save()
+ mock_parse_pnfd.return_value = json.JSONEncoder().encode({"c": "d"})
+ req_data = {"csarId": "8", "inputs": []}
+ resp = self.client.post("/api/catalog/v1/parserpnfd", req_data, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual({"model": '{"c": "d"}'}, resp.data)
diff --git a/catalog/packages/tests/test_service_descriptor.py b/catalog/packages/tests/test_service_descriptor.py
new file mode 100644
index 0000000..08a6f03
--- /dev/null
+++ b/catalog/packages/tests/test_service_descriptor.py
@@ -0,0 +1,95 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+import logging
+
+from django.test import TestCase
+from mock import mock
+
+from catalog.packages.biz.service_descriptor import ServiceDescriptor
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.exceptions import PackageNotFoundException
+from catalog.pub.utils import toscaparser
+from .const import sd_data
+
+logger = logging.getLogger(__name__)
+
+
+class TestServiceDescription(TestCase):
+
+ def setUp(self):
+ self.user_defined_data = {
+ 'key1': 'value1',
+ 'key2': 'value2',
+ 'key3': 'value3',
+ }
+ self.data = {
+ 'userDefinedData': self.user_defined_data,
+ }
+ self.sd_data = sd_data
+ ServicePackageModel.objects.filter().delete()
+
+ def tearDown(self):
+ pass
+
+ def test_create(self):
+ result_data = ServiceDescriptor().create(self.data)
+ self.assertIsNotNone(result_data['id'])
+ service_package = ServicePackageModel.objects.filter(servicePackageId=result_data['id'])[0]
+ self.assertIsNotNone(service_package)
+ self.assertEqual(PKG_STATUS.DISABLED, service_package.operationalState)
+ self.assertEqual(PKG_STATUS.CREATED, service_package.onboardingState)
+ self.assertEqual(PKG_STATUS.NOT_IN_USE, service_package.usageState)
+
+ def test_create_with_csarid(self):
+ csar_id = '0b667470-e6b3-4ee8-8f08-186317a04dc2'
+ result_data = ServiceDescriptor().create(self.data, csar_id)
+ self.assertEqual(csar_id, result_data['id'])
+ service_package = ServicePackageModel.objects.filter(servicePackageId=csar_id)[0]
+ self.assertIsNotNone(service_package)
+ self.assertEqual(PKG_STATUS.DISABLED, service_package.operationalState)
+ self.assertEqual(PKG_STATUS.CREATED, service_package.onboardingState)
+ self.assertEqual(PKG_STATUS.NOT_IN_USE, service_package.usageState)
+
+ @mock.patch.object(toscaparser, 'parse_sd')
+ def test_parse_serviced_and_save(self, mock_parse_sd):
+ mock_parse_sd.return_value = json.JSONEncoder().encode(self.sd_data)
+ servcie_desc = ServiceDescriptor()
+ csar_id = '0b667470-e6b3-4ee8-8f08-186317a04dc2'
+ servcie_desc.create(self.data, csar_id)
+ VnfPackageModel(vnfPackageId="1", vnfdId="cd557883-ac4b-462d-aa01-421b5fa606b1").save()
+ PnfPackageModel(pnfPackageId="1", pnfdId="m6000_s").save()
+ local_file_name = "/test.csar"
+ servcie_desc.parse_serviced_and_save(csar_id, local_file_name)
+
+ service_package = ServicePackageModel.objects.filter(servicePackageId=csar_id)[0]
+ self.assertIsNotNone(service_package)
+
+ def test_delete_single(self):
+ servcie_desc = ServiceDescriptor()
+ csar_id = '0b667470-e6b3-4ee8-8f08-186317a04dc2'
+ servcie_desc.create(self.data, csar_id)
+
+ servcie_desc.delete_single(csar_id)
+ self.assertTrue(len(ServicePackageModel.objects.filter(servicePackageId=csar_id)) == 0)
+ self.assertFalse(ServicePackageModel.objects.filter(servicePackageId=csar_id).exists())
+
+ def test_delete_single_not_exists(self):
+ csar_id = "8000"
+ try:
+ ServiceDescriptor().delete_single(csar_id)
+ except Exception as e:
+ self.assertTrue(isinstance(e, PackageNotFoundException))
+ self.assertEqual("Service package[8000] not Found.", e.args[0])
diff --git a/catalog/packages/tests/test_servicepackage.py b/catalog/packages/tests/test_servicepackage.py
new file mode 100644
index 0000000..241d80d
--- /dev/null
+++ b/catalog/packages/tests/test_servicepackage.py
@@ -0,0 +1,481 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+
+from django.test import TestCase, Client
+from mock import mock
+from rest_framework import status
+
+from catalog.packages.biz.sdc_service_package import ServicePackage
+from catalog.packages.const import PKG_STATUS
+from catalog.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from catalog.pub.exceptions import PackageNotFoundException, PackageHasExistsException, CatalogException
+from catalog.pub.msapi import sdc
+from catalog.pub.utils import toscaparser
+
+PARSER_BASE_URL = "/api/parser/v1"
+
+
+class TestServicePackage(TestCase):
+ """ Test case for Service Package operations"""
+
+ def setUp(self):
+ self.client = Client()
+ ServicePackageModel.objects.filter().delete()
+ self.sd_data = {
+ "inputs": {
+ "sdwanvpnresource_list": [
+ {
+ "sdwanvpn_topology": "",
+ "required": True,
+ "type": "string"
+ },
+ {
+ "sdwansitelan_list": [
+ {
+ "deviceName": "",
+ "required": True,
+ "type": "string",
+ "description": "The device name in the site"
+ }
+ ]
+ }
+ ],
+ "sdwansiteresource_list": [
+ {
+ "sdwansite_controlPoint": "",
+ "required": False,
+ "type": "string",
+ "description": "The control point of the site,only for sd-wan-edge"
+ },
+ {
+ "sdwandevice_list": [
+ {
+ "systemIp": "",
+ "required": False,
+ "type": "string",
+ "description": "The system ip of the device"
+ }
+ ]
+ }
+ ]
+ },
+ "pnfs": [
+ {
+ "pnf_id": "m6000_s",
+ "cps": [],
+ "description": "",
+ "properties": {
+ "vendor": "zte",
+ "request_reclassification": False,
+ "pnf_type": "m6000s",
+ "version": "1.0",
+ "management_address": "111111",
+ "id": "m6000_s",
+ "nsh_aware": False
+ }
+ }
+ ],
+ "description": "",
+ "graph": {
+ "sdwansiteresource": [
+ "sdwanvpnresource"
+ ],
+ "sdwanvpnresource": []
+ },
+ "basepath": "c:\\users\\cmcc\\appdata\\local\\temp\\tmpn79jwc\\Definitions",
+ "vnfs": [
+ {
+ "vnf_id": "sdwansiteresource",
+ "description": "",
+ "properties": {
+ "sdwandevice_type": "",
+ "sdwandevice_class": "PNF",
+ "multi_stage_design": "False",
+ "min_instances": "1",
+ "sdwansite_controlPoint": "",
+ "id": "cd557883-ac4b-462d-aa01-421b5fa606b1",
+ "sdwansite_longitude": "",
+ "sdwansite_latitude": "",
+ "sdwansite_postcode": "",
+ "sdwansite_type": "",
+ "nf_naming": {
+ "ecomp_generated_naming": True
+ },
+ "sdwansite_emails": "",
+ "sdwansite_role": "",
+ "vnfm_info": "",
+ "sdwansite_address": "",
+ "sdwansite_description": "",
+ "availability_zone_max_count": "1",
+ "sdwansite_name": ""
+ },
+ "dependencies": [],
+ "networks": [],
+ "metadata": {
+ "category": "Configuration",
+ "subcategory": "Configuration",
+ "UUID": "cd557883-ac4b-462d-aa01-421b5fa606b1",
+ "invariantUUID": "c83b621e-e267-4910-a75a-a2a5957296e4",
+ "name": "sdwansiteresource",
+ "customizationUUID": "673dd6b3-3a06-4ef0-8ad0-8c26224b08f7",
+ "resourceVendorRelease": "1.0",
+ "version": "1.0",
+ "resourceVendor": "onap",
+ "resourceVendorModelNumber": "",
+ "type": "VF",
+ "description": "sdwansiteresource"
+ }
+ }
+ ],
+ "vls": [],
+ "service": {
+ "type": "org.openecomp.service.EnhanceService",
+ "requirements": {
+ "sdwanvpnresource.sdwanvpn.dependency": [
+ "sdwanvpnresource",
+ "sdwanvpn.dependency"
+ ],
+ "sdwansiteresource.sdwansitewan.dependency": [
+ "sdwansiteresource",
+ "sdwansitewan.dependency"
+ ],
+ "sdwansiteresource.sdwandevice.dependency": [
+ "sdwansiteresource",
+ "sdwandevice.dependency"
+ ],
+ "sdwanvpnresource.sdwansitelan.dependency": [
+ "sdwanvpnresource",
+ "sdwansitelan.dependency"
+ ],
+ "sdwanvpnresource.sdwanvpn.device": [
+ "sdwanvpnresource",
+ "sdwanvpn.device"
+ ],
+ "sdwansiteresource.sdwansite.device": [
+ "sdwansiteresource",
+ "sdwansite.device"
+ ],
+ "sdwansiteresource.sdwansite.dependency": [
+ "sdwansiteresource",
+ "sdwansite.dependency"
+ ],
+ "sdwanvpnresource.sdwansitelan.device": [
+ "sdwanvpnresource",
+ "sdwansitelan.device"
+ ],
+ "sdwansiteresource.sdwansitewan.device": [
+ "sdwansiteresource",
+ "sdwansitewan.device"
+ ],
+ "sdwansiteresource.sdwandevice.device": [
+ "sdwansiteresource",
+ "sdwandevice.device"
+ ]
+ },
+ "properties": {
+ "descriptor_id": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "designer": "",
+ "invariant_id": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "name": "Enhance_Service",
+ "verison": ""
+ },
+ "capabilities": {
+ "sdwansiteresource.sdwandevice.feature": [
+ "sdwansiteresource",
+ "sdwandevice.feature"
+ ],
+ "sdwanvpnresource.sdwanvpn.feature": [
+ "sdwanvpnresource",
+ "sdwanvpn.feature"
+ ],
+ "sdwanvpnresource.sdwanvpn.link": [
+ "sdwanvpnresource",
+ "sdwanvpn.link"
+ ],
+ "sdwansiteresource.sdwansite.feature": [
+ "sdwansiteresource",
+ "sdwansite.feature"
+ ],
+ "sdwansiteresource.sdwansitewan.feature": [
+ "sdwansiteresource",
+ "sdwansitewan.feature"
+ ],
+ "sdwanvpnresource.sdwansitelan.feature": [
+ "sdwanvpnresource",
+ "sdwansitelan.feature"
+ ]
+ },
+ "metadata": {
+ "category": "E2E Service",
+ "serviceType": "",
+ "description": "Enhance_Service",
+ "instantiationType": "A-la-carte",
+ "type": "Service",
+ "environmentContext": "General_Revenue-Bearing",
+ "serviceEcompNaming": True,
+ "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "ecompGeneratedNaming": True,
+ "serviceRole": "",
+ "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "namingPolicy": "",
+ "name": "Enhance_Service"
+ }
+ },
+ "metadata": {
+ "category": "E2E Service",
+ "serviceType": "",
+ "description": "Enhance_Service",
+ "instantiationType": "A-la-carte",
+ "type": "Service",
+ "environmentContext": "General_Revenue-Bearing",
+ "serviceEcompNaming": True,
+ "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+ "ecompGeneratedNaming": True,
+ "serviceRole": "",
+ "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+ "namingPolicy": "",
+ "name": "Enhance_Service"
+ }
+ }
+ self.asset_data = {
+ "uuid": "1",
+ "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb",
+ "name": "underlayvpn",
+ "version": "2.0",
+ "toscaModelURL": "/sdc/v1/catalog/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel",
+ "category": "Volte",
+ "subCategory": "VolteVNF",
+ "resourceType": "VF",
+ "lifecycleState": "CERTIFIED",
+ "distributionStatus": "DISTRIBUTION_APPROVED",
+ "lastUpdaterUserId": "jh0003",
+ "resources": [
+ {
+ "resourceInstanceName": "contrailV2VLANSubInterface 0",
+ "resourceName": "contrailV2VLANSubInterface",
+ "resourceInvariantUUID": "4d31b775-af63-491d-89f1-254e218e7140",
+ "resourceVersion": "1.0",
+ "resoucreType": "CP",
+ "resourceUUID": "cd557883-ac4b-462d-aa01-421b5fa606b1"
+ },
+ {
+ "resourceInstanceName": "Network 0",
+ "resourceName": "Network",
+ "resourceInvariantUUID": "f90f567e-7d7d-4216-af38-6bca0637c59f",
+ "resourceVersion": "1.0",
+ "resoucreType": "VL",
+ "resourceUUID": "m6000_s"
+ }
+ ]
+ }
+
+ def tearDown(self):
+ pass
+
+ ###############################################################
+
+ def test_service_pkg_distribute_when_pkg_exists(self):
+ ServicePackageModel(servicePackageId="1", servicedId="2").save()
+ csar_id = "1"
+ try:
+ ServicePackage().on_distribute(csar_id)
+ except PackageHasExistsException as e:
+ self.assertEqual("Service CSAR(1) already exists.", e.args[0])
+
+ @mock.patch.object(sdc, 'get_asset')
+ def test_service_pkg_distribute_when_fail_get_artifacts(self, mock_get_asset):
+ mock_get_asset.side_effect = CatalogException("Failed to query artifact(services,1) from sdc.")
+ csar_id = "1"
+ try:
+ ServicePackage().on_distribute(csar_id)
+ except Exception as e:
+ self.assertTrue(isinstance(e, CatalogException))
+ self.assertEqual("Failed to query artifact(services,1) from sdc.", e.args[0])
+
+ @mock.patch.object(sdc, 'get_asset')
+ def test_service_pkg_distribute_when_resource_not_distribute(self, mock_get_asset):
+ mock_get_asset.return_value = self.asset_data
+ csar_id = "1"
+ try:
+ ServicePackage().on_distribute(csar_id)
+ except Exception as e:
+ self.assertTrue(isinstance(e, CatalogException))
+ self.assertEqual("Resource (cd557883-ac4b-462d-aa01-421b5fa606b1) is not distributed.", e.args[0])
+
+ @mock.patch.object(sdc, 'get_asset')
+ @mock.patch.object(sdc, 'download_artifacts')
+ def test_service_pkg_distribute_when_fail_download_artifacts(self, mock_get_asset, mock_download_artifacts):
+ mock_get_asset.return_value = self.asset_data
+ mock_download_artifacts.side_effect = CatalogException("Failed to download 1 from sdc.")
+ csar_id = "1"
+ VnfPackageModel(vnfPackageId="cd557883-ac4b-462d-aa01-421b5fa606b1",
+ vnfdId="cd557883-ac4b-462d-aa01-421b5fa606b1").save()
+ PnfPackageModel(pnfPackageId="m6000_s", pnfdId="m6000_s").save()
+
+ try:
+ ServicePackage().on_distribute(csar_id)
+ except Exception as e:
+ self.assertTrue(isinstance(e, CatalogException))
+ self.assertEqual("Failed to download 1 from sdc.", e.args[0])
+
+ @mock.patch.object(sdc, 'get_asset')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_sd')
+ def test_service_pkg_distribute(self, mock_parse_sd, mock_download_artifacts, mock_get_asset):
+ mock_parse_sd.return_value = json.JSONEncoder().encode(self.sd_data)
+ mock_download_artifacts.return_value = "/test.csar"
+ mock_get_asset.return_value = self.asset_data
+ VnfPackageModel(vnfPackageId="cd557883-ac4b-462d-aa01-421b5fa606b1",
+ vnfdId="cd557883-ac4b-462d-aa01-421b5fa606b1").save()
+ PnfPackageModel(pnfPackageId="m6000_s", pnfdId="m6000_s").save()
+ ServicePackage().on_distribute(csar_id="1")
+
+ service_package = ServicePackageModel.objects.filter(servicePackageId="1").first()
+ self.assertEqual("5de07996-7ff0-4ec1-b93c-e3a00bb3f207", service_package.invariantId)
+ self.assertEqual("Enhance_Service", service_package.servicedName)
+ self.assertEqual(PKG_STATUS.ONBOARDED, service_package.onboardingState)
+ self.assertEqual(PKG_STATUS.ENABLED, service_package.operationalState)
+ self.assertEqual(PKG_STATUS.NOT_IN_USE, service_package.usageState)
+
+ def test_api_service_pkg_distribute_when_pkg_exists(self):
+ ServicePackageModel(servicePackageId="1", servicedId="2").save()
+ resp = self.client.post(
+ PARSER_BASE_URL + "/service_packages", {"csarId": "1"}, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
+ self.assertEqual("Service CSAR(1) already exists.", resp.data["errorMessage"])
+
+ ###############################################################
+
+ def test_service_pkg_get_all(self):
+ ServicePackageModel(
+ servicePackageId="13",
+ servicedId="2",
+ servicedDesigner="2",
+ servicedVersion="2",
+ servicePackageUri="13.csar",
+ servicedModel="").save()
+ ServicePackageModel(
+ servicePackageId="14",
+ servicedId="3",
+ servicedDesigner="3",
+ servicedVersion="3",
+ servicePackageUri="14.csar",
+ servicedModel="").save()
+ csars = ServicePackage().get_csars()
+ self.assertEqual(2, len(csars))
+
+ def test_api_service_pkg_get_all(self):
+ ServicePackageModel(
+ servicePackageId="13",
+ servicedId="2",
+ servicedDesigner="2",
+ servicedVersion="2",
+ servicePackageUri="13.csar",
+ servicedModel="").save()
+ ServicePackageModel(
+ servicePackageId="14",
+ servicedId="3",
+ servicedDesigner="3",
+ servicedVersion="3",
+ servicePackageUri="14.csar",
+ servicedModel="").save()
+ resp = self.client.get(PARSER_BASE_URL + "/service_packages")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+
+ ###############################################################
+
+ def test_service_pkg_get_one(self):
+ ServicePackageModel(
+ servicePackageId="14",
+ servicedId="2",
+ servicedDesigner="3",
+ servicedVersion="4",
+ servicePackageUri="14.csar",
+ servicedModel="").save()
+ csar = ServicePackage().get_csar(14)
+ self.assertEqual(14, csar['csarId'])
+
+ def test_service_pkg_get_one_not_found(self):
+ try:
+ ServicePackage().get_csar(1000)
+ except PackageNotFoundException as e:
+ self.assertEqual("Service package[1000] not Found.", e.args[0])
+
+ def test_api_service_pkg_get_one(self):
+ ServicePackageModel(
+ servicePackageId="14",
+ servicedId="2",
+ servicedDesigner="3",
+ servicedVersion="4",
+ servicePackageUri="14.csar",
+ servicedModel="").save()
+ resp = self.client.get(PARSER_BASE_URL + "/service_packages/14")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+
+ def test_api_service_pkg_get_one_not_found(self):
+ resp = self.client.get(PARSER_BASE_URL + "/service_packages/22")
+ self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
+ self.assertEqual(
+ {"errorMessage": "Service package[22] not Found.", 'error': 404},
+ resp.data)
+
+ ###############################################################
+
+ def test_service_pkg_normal_delete(self):
+ ServicePackageModel(servicePackageId="8", servicedId="2").save()
+ sp = ServicePackageModel.objects.filter(servicePackageId=8)
+ self.assertEqual(1, len(sp))
+ ServicePackage().delete_csar("8")
+ sp = ServicePackageModel.objects.filter(servicePackageId=8)
+ self.assertEqual(0, len(sp))
+
+ def test_service_pkg_normal_delete_not_found(self):
+ try:
+ ServicePackage().delete_csar("8000")
+ except PackageNotFoundException as e:
+ self.assertEqual("Service package[8000] not Found.", e.args[0])
+
+ def test_api_service_pkg_normal_delete(self):
+ ServicePackageModel(servicePackageId="8", servicedId="2").save()
+ resp = self.client.delete(PARSER_BASE_URL + "/service_packages/8")
+ self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+
+ ###############################################################
+
+ @mock.patch.object(toscaparser, 'parse_sd')
+ def test_service_pkg_parser(self, mock_parse_sd):
+ ServicePackageModel(servicePackageId="8", servicedId="2").save()
+ mock_parse_sd.return_value = json.JSONEncoder().encode({"a": "b"})
+
+ inputs = []
+ ret = ServicePackage().parse_serviced("8", inputs)
+ self.assertTrue({"model": '{"c": "d"}'}, ret)
+
+ def test_service_pkg_parser_not_found(self):
+ try:
+ csar_id = "8000"
+ inputs = []
+ ServicePackage().parse_serviced(csar_id, inputs)
+ except PackageNotFoundException as e:
+ self.assertEqual("Service CSAR(8000) does not exist.", e.args[0])
+
+ def test_api_service_pkg_parser_not_found(self):
+ query_data = {
+ "csarId": "1",
+ "packageType": "Service",
+ "inputs": "string"
+ }
+ resp = self.client.post(PARSER_BASE_URL + "/parser", query_data, format='json')
+ self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
diff --git a/catalog/packages/tests/test_vnf_package.py b/catalog/packages/tests/test_vnf_package.py
new file mode 100644
index 0000000..b83268a
--- /dev/null
+++ b/catalog/packages/tests/test_vnf_package.py
@@ -0,0 +1,382 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import urllib
+import mock
+import shutil
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+
+from catalog.packages.biz.vnf_package import VnfPackage, VnfPkgUploadThread
+from catalog.packages.const import PKG_STATUS
+from catalog.packages.tests.const import vnfd_data
+from catalog.pub.config.config import CATALOG_ROOT_PATH
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.utils import toscaparser
+
+
+class MockReq():
+ def read(self):
+ return "1"
+
+ def close(self):
+ pass
+
+
+class TestVnfPackage(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+
+ def tearDown(self):
+ file_path = os.path.join(CATALOG_ROOT_PATH, "222")
+ if os.path.exists(file_path):
+ shutil.rmtree(file_path)
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_upload_vnf_pkg(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "empty.txt"), "rt")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId="222")
+ self.assertEqual("zte-hss-1.0", vnf_pkg[0].vnfdId)
+ self.assertEqual(PKG_STATUS.ONBOARDED, vnf_pkg[0].onboardingState)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+
+ def test_upload_vnf_pkg_failed(self):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "empty.txt"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ )
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ @mock.patch.object(urllib.request, 'urlopen')
+ def test_upload_nf_pkg_from_uri(self, mock_urlopen, mock_parse_vnfd):
+ vnf_pkg = VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+ req_data = {"addressInformation": "https://127.0.0.1:1234/sdc/v1/hss.csar"}
+ mock_urlopen.return_value = MockReq()
+ vnf_pkg_id = vnf_pkg.vnfPackageId
+ VnfPkgUploadThread(req_data, vnf_pkg_id).run()
+ vnf_pkg1 = VnfPackageModel.objects.filter(vnfPackageId="222")
+ self.assertEqual("zte-hss-1.0", vnf_pkg1[0].vnfdId)
+
+ def test_upload_from_uri_failed(self):
+ req_data = {"username": "123"}
+ response = self.client.post("/api/vnfpkgm/v1/vnf_packages/111/package_content/upload_from_uri", data=req_data)
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ def test_create_vnf_pkg(self):
+ req_data = {
+ "userDefinedData": {"a": "A"}
+ }
+ response = self.client.post("/api/vnfpkgm/v1/vnf_packages", data=req_data, format="json")
+ resp_data = json.loads(response.content)
+ expect_resp_data = {
+ "id": resp_data.get("id"),
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None # TODO
+ }
+ self.assertEqual(expect_resp_data, resp_data)
+ self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+
+ def test_query_single_vnf(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222")
+ expect_data = {
+ "id": "222",
+ "vnfdId": "zte-hss-1.0",
+ "vnfProductName": "hss",
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfdVersion": "1.0.0",
+ "checksum": {"algorithm": "111", "hash": "11"},
+ "softwareImages": None,
+ "additionalArtifacts": None,
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None
+ }
+ self.assertEqual(response.data, expect_data)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ def test_query_single_vnf_failed(self):
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_query_multiple_vnf(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="111",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages")
+ expect_data = [
+ {
+ "id": "111",
+ "vnfdId": "zte-hss-1.0",
+ "vnfProductName": "hss",
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfdVersion": "1.0.0",
+ "checksum": {"algorithm": "111", "hash": "11"},
+ "softwareImages": None,
+ "additionalArtifacts": None,
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None
+ },
+ {
+ "id": "222",
+ "vnfdId": "zte-hss-1.0",
+ "vnfProductName": "hss",
+ "vnfSoftwareVersion": "1.0.0",
+ "vnfdVersion": "1.0.0",
+ "checksum": {"algorithm": "111", "hash": "11"},
+ "softwareImages": None,
+ "additionalArtifacts": None,
+ "onboardingState": "CREATED",
+ "operationalState": "DISABLED",
+ "usageState": "NOT_IN_USE",
+ "userDefinedData": {"a": "A"},
+ "_links": None
+ }
+ ]
+ self.assertEqual(response.data, expect_data)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ def test_delete_single_vnf_pkg(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ vnfdId="zte-hss-1.0",
+ vnfVendor="zte",
+ vnfdProductName="hss",
+ vnfSoftwareVersion="1.0.0",
+ vnfdVersion="1.0.0",
+ checksum='{"algorithm":"111", "hash": "11"}',
+ onboardingState="CREATED",
+ operationalState="DISABLED",
+ usageState="NOT_IN_USE",
+ userDefinedData='{"a": "A"}'
+ )
+ response = self.client.delete("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(response.data, None)
+
+ def test_delete_when_vnf_pkg_not_exist(self):
+ response = self.client.delete("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+ self.assertEqual(response.data, None)
+
+ def test_fetch_vnf_pkg(self):
+ with open("vnfPackage.csar", "wt") as fp:
+ fp.writelines("AAAABBBBCCCCDDDD")
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="ONBOARDED",
+ localFilePath="vnfPackage.csar"
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+ file_content = ''
+ for data in response.streaming_content:
+ file_content = file_content + data.decode()
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual('AAAABBBBCCCCDDDD', file_content)
+ os.remove("vnfPackage.csar")
+
+ def test_fetch_partical_vnf_pkg(self):
+ with open("vnfPackage.csar", "wt") as fp:
+ fp.writelines("AAAABBBBCCCCDDDD")
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="ONBOARDED",
+ localFilePath="vnfPackage.csar"
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content", HTTP_RANGE="4-7")
+ partial_file_content = ''
+ for data in response.streaming_content:
+ partial_file_content = partial_file_content + data.decode()
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual('BBB', partial_file_content)
+ os.remove("vnfPackage.csar")
+
+ def test_fetch_last_partical_vnf_pkg(self):
+ with open("vnfPackage.csar", "wt") as fp:
+ fp.writelines("AAAABBBBCCCCDDDD")
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="ONBOARDED",
+ localFilePath="vnfPackage.csar"
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content", HTTP_RANGE=" 4-")
+ partial_file_content = ''
+ for data in response.streaming_content:
+ partial_file_content = partial_file_content + data.decode()
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual('BBBBCCCCDDDD', partial_file_content)
+ os.remove("vnfPackage.csar")
+
+ def test_fetch_vnf_pkg_when_pkg_not_exist(self):
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_fetch_vnf_pkg_when_catch_cataloge_exception(self):
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED",
+ localFilePath="vnfPackage.csar"
+ )
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPackage, "create_vnf_pkg")
+ def test_create_vnf_pkg_when_catch_exception(self, mock_create_vnf_pkg):
+ mock_create_vnf_pkg.side_effect = TypeError('integer type')
+ req_data = {
+ "userDefinedData": {"a": "A"}
+ }
+ response = self.client.post("/api/vnfpkgm/v1/vnf_packages", data=req_data, format="json")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPackage, "delete_vnf_pkg")
+ def test_delete_single_when_catch_exception(self, mock_delete_vnf_pkg):
+ mock_delete_vnf_pkg.side_effect = TypeError("integer type")
+ response = self.client.delete("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPackage, "query_single")
+ def test_query_single_when_catch_exception(self, mock_query_single):
+ mock_query_single.side_effect = TypeError("integer type")
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPackage, "query_multiple")
+ def test_query_multiple_when_catch_exception(self, mock_query_muitiple):
+ mock_query_muitiple.side_effect = TypeError("integer type")
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_upload_when_catch_exception(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "empty.txt"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.side_effect = TypeError("integer type")
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPkgUploadThread, 'start')
+ def test_upload_from_uri_when_catch_exception(self, mock_start):
+ req_data = {"addressInformation": "https://127.0.0.1:1234/sdc/v1/hss.csar"}
+ mock_start.side_effect = TypeError("integer type")
+ response = self.client.post("/api/vnfpkgm/v1/vnf_packages/111/package_content/upload_from_uri", data=req_data)
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(VnfPackage, 'download')
+ def test_fetch_vnf_pkg_when_catch_exception(self, mock_download):
+ mock_download.side_effect = TypeError("integer type")
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+ self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_fetch_vnf_artifact(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "resource_test.csar"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/artifacts/image")
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(response.getvalue(), b"ubuntu_16.04\n")
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_fetch_vnf_artifact_not_exists(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "resource_test.csar"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/1451/artifacts/image")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_fetch_vnf_artifact_vnf_not_exists(self, mock_parse_vnfd):
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "resource_test.csar"), "rb")}
+ VnfPackageModel.objects.create(
+ vnfPackageId="222",
+ onboardingState="CREATED"
+ )
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+ response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+ self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+ response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/artifacts/image1")
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
diff --git a/catalog/packages/tests/test_vnf_pkg_subscription.py b/catalog/packages/tests/test_vnf_pkg_subscription.py
new file mode 100644
index 0000000..635b137
--- /dev/null
+++ b/catalog/packages/tests/test_vnf_pkg_subscription.py
@@ -0,0 +1,183 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import uuid
+import mock
+
+from rest_framework.test import APIClient
+from django.test import TestCase
+
+from catalog.pub.database.models import VnfPkgSubscriptionModel
+from .const import vnf_subscription_data
+
+
+class TestNfPackageSubscription(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ VnfPkgSubscriptionModel.objects.filter().delete()
+ self.vnf_subscription_data = vnf_subscription_data
+
+ def tearDown(self):
+ pass
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_create_vnf_subscription(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ response = self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(
+ self.vnf_subscription_data["callbackUri"],
+ response.data["callbackUri"]
+ )
+ self.assertEqual(temp_uuid, response.data["id"])
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_duplicate_subscriptions(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ temp1_uuid = "00342b18-a5c7-11e8-998c-bf1755941f12"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.side_effect = [temp_uuid, temp1_uuid]
+ response = self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(
+ self.vnf_subscription_data["callbackUri"],
+ response.data["callbackUri"]
+ )
+ self.assertEqual(temp_uuid, response.data["id"])
+ temp_uuid = "00442b18-a5c7-11e8-998c-bf1755941f12"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ response = self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ self.assertEqual(303, response.status_code)
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_get_subscriptions(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ response = self.client.get(
+ "/api/vnfpkgm/v1/subscriptions?usageState=IN_USE",
+ format='json'
+ )
+ self.assertEqual(200, response.status_code)
+ self.assertEqual(1, len(response.data))
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_get_subscriptions_with_invalid_params(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ response = self.client.get(
+ "/api/vnfpkgm/v1/subscriptions?dummy=dummy",
+ format='json'
+ )
+ self.assertEqual(400, response.status_code)
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_get_subscription_with_id(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ response = self.client.get(
+ "/api/vnfpkgm/v1/subscriptions/%s" % temp_uuid,
+ format='json'
+ )
+ self.assertEqual(200, response.status_code)
+ self.assertEqual(temp_uuid, response.data["id"])
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_get_subscription_with_id_not_exists(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ dummy_uuid = str(uuid.uuid4())
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ response = self.client.get(
+ "/api/vnfpkgm/v1/subscriptions/%s" % dummy_uuid,
+ format='json'
+ )
+ self.assertEqual(404, response.status_code)
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_delete_subscription_with_id(self, mock_uuid4, mock_requests):
+ temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+ dummy_uuid = str(uuid.uuid4())
+ mock_requests.return_value.status_code = 204
+ mock_requests.get.status_code = 204
+ mock_uuid4.return_value = temp_uuid
+ self.client.post(
+ "/api/vnfpkgm/v1/subscriptions",
+ data=self.vnf_subscription_data,
+ format='json'
+ )
+ self.client.get(
+ "/api/vnfpkgm/v1/subscriptions/%s" % dummy_uuid,
+ format='json'
+ )
+ response = self.client.delete("/api/vnfpkgm/v1/subscriptions/%s" % temp_uuid)
+ self.assertEqual(204, response.status_code)
+
+ @mock.patch("requests.get")
+ @mock.patch.object(uuid, 'uuid4')
+ def test_delete_subscription_with_id_not_exists(self, mock_uuid4, mock_requests):
+ dummy_uuid = str(uuid.uuid4())
+ response = self.client.delete("/api/vnfpkgm/v1/subscriptions/%s" % dummy_uuid)
+ self.assertEqual(404, response.status_code)
diff --git a/catalog/packages/tests/test_vnfpackage.py b/catalog/packages/tests/test_vnfpackage.py
new file mode 100644
index 0000000..0d8cbad
--- /dev/null
+++ b/catalog/packages/tests/test_vnfpackage.py
@@ -0,0 +1,258 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import mock
+from rest_framework.test import APIClient
+from django.test import TestCase
+from rest_framework import status
+from catalog.packages.biz.sdc_vnf_package import NfDistributeThread, NfPkgDeleteThread
+from catalog.pub.database.models import JobStatusModel, JobModel
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.msapi import sdc
+from catalog.pub.utils import restcall, toscaparser
+from .const import vnfd_data
+
+
+class TestNfPackage(TestCase):
+ def setUp(self):
+ self.client = APIClient()
+ VnfPackageModel.objects.filter().delete()
+ JobModel.objects.filter().delete()
+ JobStatusModel.objects.filter().delete()
+ self.vnfd_data = vnfd_data
+
+ def tearDown(self):
+ pass
+
+ def assert_job_result(self, job_id, job_progress, job_detail):
+ jobs = JobStatusModel.objects.filter(
+ jobid=job_id,
+ progress=job_progress,
+ descp=job_detail)
+ self.assertEqual(1, len(jobs))
+
+ @mock.patch.object(NfDistributeThread, 'run')
+ def test_nf_pkg_distribute_normal(self, mock_run):
+ resp = self.client.post(
+ "/api/catalog/v1/vnfpackages",
+ {
+ "csarId": "1",
+ "vimIds": ["1"]
+ },
+ format='json'
+ )
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+
+ def test_nf_pkg_distribute_when_csar_already_exist(self):
+ VnfPackageModel(
+ vnfPackageId="1",
+ vnfdId="vcpe_vfw_zte_1_0"
+ ).save()
+ NfDistributeThread(
+ csar_id="1",
+ vim_ids=["1"],
+ lab_vim_id="",
+ job_id="2"
+ ).run()
+ self.assert_job_result("2", 255, "NF CSAR(1) already exists.")
+
+ @mock.patch.object(restcall, 'call_req')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_nf_pkg_distribute_when_vnfd_already_exist(self,
+ mock_parse_vnfd,
+ mock_download_artifacts,
+ mock_call_req):
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(self.vnfd_data)
+ mock_download_artifacts.return_value = "/home/hss.csar"
+ mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+ "uuid": "1",
+ "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/hss.csar"
+ }]), '200']
+ VnfPackageModel(vnfPackageId="2", vnfdId="zte-hss-1.0").save()
+ NfDistributeThread(
+ csar_id="1",
+ vim_ids=["1"],
+ lab_vim_id="",
+ job_id="2"
+ ).run()
+ self.assert_job_result("2", 255, "VNF package(zte-hss-1.0) already exists.")
+
+ @mock.patch.object(restcall, 'call_req')
+ @mock.patch.object(sdc, 'download_artifacts')
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_nf_pkg_distribute_successfully(self,
+ mock_parse_vnfd,
+ mock_download_artifacts,
+ mock_call_req):
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode(self.vnfd_data)
+ mock_download_artifacts.return_value = "/home/hss.csar"
+ mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+ "uuid": "1",
+ "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/hss.csar"
+ }]), '200']
+ NfDistributeThread(
+ csar_id="1",
+ vim_ids=["1"],
+ lab_vim_id="",
+ job_id="4"
+ ).run()
+ self.assert_job_result("4", 100, "CSAR(1) distribute successfully.")
+
+ ###############################################################################################################
+
+ @mock.patch.object(NfPkgDeleteThread, 'run')
+ def test_nf_pkg_delete_normal(self, mock_run):
+ resp = self.client.delete("/api/catalog/v1/vnfpackages/1")
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+
+ def test_nf_pkg_normal_delete(self):
+ VnfPackageModel(
+ vnfPackageId="2",
+ vnfdId="vcpe_vfw_zte_1_0"
+ ).save()
+ NfPkgDeleteThread(
+ csar_id="2",
+ job_id="2"
+ ).run()
+ self.assert_job_result("2", 100, "Delete CSAR(2) successfully.")
+
+ def test_nf_pkg_get_all(self):
+ VnfPackageModel(
+ vnfPackageId="3",
+ vnfdId="3",
+ vnfVendor='3',
+ vnfdVersion='3',
+ vnfSoftwareVersion='',
+ vnfPackageUri='',
+ vnfdModel=''
+ ).save()
+ VnfPackageModel(
+ vnfPackageId="4",
+ vnfdId="4",
+ vnfVendor='4',
+ vnfdVersion='4',
+ vnfSoftwareVersion='',
+ vnfPackageUri='',
+ vnfdModel=''
+ ).save()
+ resp = self.client.get("/api/catalog/v1/vnfpackages")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ expect_data = [
+ {
+ "imageInfo": [],
+ "csarId": "3",
+ "packageInfo": {
+ "csarName": "",
+ "vnfdModel": "",
+ "vnfdProvider": "3",
+ "vnfdId": "3",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/3/",
+ "vnfVersion": "",
+ "vnfdVersion": "3",
+ "vnfPackageId": "3"
+ }
+ },
+ {
+ "imageInfo": [],
+ "csarId": "4",
+ "packageInfo": {
+ "csarName": "",
+ "vnfdModel": "",
+ "vnfdProvider": "4",
+ "vnfdId": "4",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/4/",
+ "vnfVersion": "",
+ "vnfdVersion": "4",
+ "vnfPackageId": "4"
+ }
+ }
+ ]
+ self.assertEqual(expect_data, resp.data)
+
+ def test_nf_pkg_get_one(self):
+ VnfPackageModel(
+ vnfPackageId="4",
+ vnfdId="4",
+ vnfVendor='4',
+ vnfdVersion='4',
+ vnfSoftwareVersion='',
+ vnfPackageUri='',
+ vnfdModel=''
+ ).save()
+
+ resp = self.client.get("/api/catalog/v1/vnfpackages/4")
+ self.assertEqual(resp.status_code, status.HTTP_200_OK)
+ expect_data = {
+ "imageInfo": [],
+ "csarId": "4",
+ "packageInfo": {
+ "csarName": "",
+ "vnfdModel": "",
+ "vnfdProvider": "4",
+ "vnfdId": "4",
+ "downloadUrl": "http://127.0.0.1:8806/static/catalog/4/",
+ "vnfVersion": "",
+ "vnfdVersion": "4",
+ "vnfPackageId": "4"
+ }
+ }
+ self.assertEqual(expect_data, resp.data)
+
+ def test_nf_pkg_get_one_failed(self):
+ VnfPackageModel(
+ vnfPackageId="4",
+ vnfdId="4",
+ vnfVendor='4',
+ vnfdVersion='4',
+ vnfSoftwareVersion='',
+ vnfPackageUri='',
+ vnfdModel=''
+ ).save()
+
+ resp = self.client.get("/api/catalog/v1/vnfpackages/2")
+ self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+ self.assertEqual({'error': 'Vnf package[2] not Found.'}, resp.data)
+
+ ###############################################################################################################
+
+ @mock.patch.object(toscaparser, 'parse_vnfd')
+ def test_vnfd_parse_normal(self, mock_parse_vnfd):
+ VnfPackageModel(
+ vnfPackageId="8",
+ vnfdId="10"
+ ).save()
+ mock_parse_vnfd.return_value = json.JSONEncoder().encode({"c": "d"})
+ req_data = {
+ "csarId": "8",
+ "inputs": []
+ }
+ resp = self.client.post(
+ "/api/catalog/v1/parservnfd",
+ req_data,
+ format='json'
+ )
+ self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+ self.assertEqual({"model": '{"c": "d"}'}, resp.data)
+
+ def test_vnfd_parse_when_csar_not_exist(self):
+ req_data = {"csarId": "1", "inputs": []}
+ resp = self.client.post(
+ "/api/catalog/v1/parservnfd",
+ req_data,
+ format='json'
+ )
+ self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+ self.assertEqual(resp.data, {"error": "VNF CSAR(1) does not exist."})
diff --git a/catalog/packages/urls.py b/catalog/packages/urls.py
new file mode 100644
index 0000000..776e940
--- /dev/null
+++ b/catalog/packages/urls.py
@@ -0,0 +1,76 @@
+# Copyright 2017-2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import url
+
+from catalog.packages.views import vnf_package_views
+from catalog.packages.views.vnf_package_subscription_views import CreateQuerySubscriptionView,\
+ QueryTerminateSubscriptionView
+from catalog.packages.views.vnf_package_artifact_views import FetchVnfPkgmArtifactsView
+from catalog.packages.views import catalog_views, ns_descriptor_views, pnf_descriptor_views, nsdm_subscription_views
+from catalog.packages.views.health_check_views import HealthCheckView
+
+
+urlpatterns = [
+
+ # Sync package from SDC
+ url(r'^api/catalog/v1/nspackages$', catalog_views.nspackages_rc, name='nspackages_rc'),
+ url(r'^api/catalog/v1/nspackages/(?P<csarId>[0-9a-zA-Z\-\_]+)$', catalog_views.ns_rd_csar, name='nspackage_rd'),
+ url(r'^api/catalog/v1/vnfpackages$', catalog_views.nfpackages_rc, name='nfpackages_rc'),
+ url(r'^api/catalog/v1/vnfpackages/(?P<csarId>[0-9a-zA-Z\-\_]+)$', catalog_views.nf_rd_csar, name='nfpackage_rd'),
+ url(r'^api/parser/v1/service_packages$', catalog_views.servicepackages_rc, name='servicepackages_rc'),
+ url(r'^api/parser/v1/service_packages/(?P<csarId>[0-9a-zA-Z\-\_]+)$', catalog_views.service_rd_csar, name='servicepackage_rd'),
+
+ # NFV Model Parser
+ url(r'^api/parser/v1/parser$', catalog_views.model_parser, name='modelparser_rc'),
+ url(r'^api/parser/v1/parsernsd$', catalog_views.ns_model_parser, name='nsmodelparser_rc'),
+ url(r'^api/parser/v1/parservnfd$', catalog_views.vnf_model_parser, name='vnfmodelparser_rc'),
+ url(r'^api/parser/v1/parserpnfd$', pnf_descriptor_views.pnf_model_parser, name='pnfmodelparser_rc'),
+ url(r'^api/catalog/v1/parsernsd$', catalog_views.ns_model_parser, name='nsmodelparser_rc'),
+ url(r'^api/catalog/v1/parservnfd$', catalog_views.vnf_model_parser, name='vnfmodelparser_rc'),
+ url(r'^api/catalog/v1/parserpnfd$', pnf_descriptor_views.pnf_model_parser, name='pnfmodelparser_rc'),
+
+ # ETSI SOL005 NSD API
+ url(r'^api/nsd/v1/ns_descriptors$', ns_descriptor_views.ns_descriptors_rc, name='ns_descriptors_rc'),
+ url(r'^api/nsd/v1/ns_descriptors/(?P<nsdInfoId>[0-9a-zA-Z\-\_]+)$', ns_descriptor_views.ns_info_rd, name='ns_info_rd'),
+ url(r'^api/nsd/v1/ns_descriptors/(?P<nsdInfoId>[0-9a-zA-Z\-\_]+)/nsd_content$', ns_descriptor_views.nsd_content_ru, name='nsd_content_ru'),
+ url(r'^api/nsd/v1/subscriptions$', nsdm_subscription_views.nsd_subscription_rc, name='nsd_subscription_rc'),
+ url(r'^api/nsd/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', nsdm_subscription_views.nsd_subscription_rd, name='nsd_subscription_rd'),
+
+ # ETSI SOL005 PNFD
+ url(r'^api/nsd/v1/pnf_descriptors$', pnf_descriptor_views.pnf_descriptors_rc, name='pnf_descriptors_rc'),
+ url(r'^api/nsd/v1/pnf_descriptors/(?P<pnfdInfoId>[0-9a-zA-Z\-\_]+)$', pnf_descriptor_views.pnfd_info_rd, name='pnfd_info_rd'),
+ url(r'^api/nsd/v1/pnf_descriptors/(?P<pnfdInfoId>[0-9a-zA-Z\-\_]+)/pnfd_content$', pnf_descriptor_views.pnfd_content_ru, name='pnfd_content_ru'),
+
+ # ETSI SOL005&SOL003 VNF Package
+ url(r'^api/vnfpkgm/v1/vnf_packages$', vnf_package_views.vnf_packages_rc, name='vnf_packages_rc'),
+ url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)$', vnf_package_views.vnf_package_rd, name='vnf_package_rd'),
+ url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/package_content$', vnf_package_views.package_content_ru, name='package_content_ru'),
+ url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/package_content/upload_from_uri$', vnf_package_views.upload_from_uri_c, name='upload_from_uri_c'),
+
+ # ETSI SOL 005 VNF Package Management Subscription APIs
+ url(r'^api/vnfpkgm/v1/subscriptions$', CreateQuerySubscriptionView.as_view(), name='subscriptions_create_query'),
+ url(r'^api/vnfpkgm/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', QueryTerminateSubscriptionView.as_view(), name='subscriptions_query_terminate'),
+ url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/artifacts/(?P<artifactPath>[0-9a-zA-Z\-\_]+)$', FetchVnfPkgmArtifactsView.as_view(), name="fetch_vnf_artifacts"),
+ # url(r'^api/vnfpkgm/v1/subscriptions/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)$', vnf_package_subscription_views.vnf_package_subscriptions_rc, name='subscriptions_rc'),
+ # url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/vnfd$', vnfd.as_view(), name='vnfd_r'),# url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/artifacts/artifactPath$', artifacts.as_view(), name='artifacts_r'),
+
+ # url(r'^api/vnfpkgm/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', vnfpkg_subscription.as_view(), name='subscription_rd'),
+
+ # health check
+ url(r'^api/vnfpkgm/v1/health_check$', HealthCheckView.as_view()),
+ url(r'^api/nsd/v1/health_check$', HealthCheckView.as_view()),
+ url(r'^api/catalog/v1/health_check$', HealthCheckView.as_view()),
+ url(r'^api/parser/v1/health_check$', HealthCheckView.as_view()),
+]
diff --git a/catalog/packages/views/__init__.py b/catalog/packages/views/__init__.py
new file mode 100644
index 0000000..342c2a8
--- /dev/null
+++ b/catalog/packages/views/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/packages/views/catalog_views.py b/catalog/packages/views/catalog_views.py
new file mode 100644
index 0000000..6ed9fb9
--- /dev/null
+++ b/catalog/packages/views/catalog_views.py
@@ -0,0 +1,535 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import uuid
+
+from drf_yasg import openapi
+from drf_yasg.utils import no_body, swagger_auto_schema
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+from catalog.packages.biz import sdc_vnf_package, sdc_ns_package
+from catalog.packages.biz.pnf_descriptor import PnfDescriptor
+from catalog.packages.biz.sdc_service_package import ServicePackage
+from catalog.packages.serializers.catalog_serializers import InternalErrorRequestSerializer, \
+ ServicePackageDistributeRequestSerializer, ServicePackagesSerializer, ServicePackageSerializer
+from catalog.packages.serializers.catalog_serializers import NfPackageDistributeRequestSerializer
+from catalog.packages.serializers.catalog_serializers import NfPackageSerializer
+from catalog.packages.serializers.catalog_serializers import NfPackagesSerializer
+from catalog.packages.serializers.catalog_serializers import NsPackageDistributeRequestSerializer
+from catalog.packages.serializers.catalog_serializers import NsPackageDistributeResponseSerializer
+from catalog.packages.serializers.catalog_serializers import NsPackageSerializer
+from catalog.packages.serializers.catalog_serializers import NsPackagesSerializer
+from catalog.packages.serializers.catalog_serializers import ParseModelRequestSerializer
+from catalog.packages.serializers.catalog_serializers import ParseModelResponseSerializer
+from catalog.packages.serializers.catalog_serializers import PostJobResponseSerializer
+from catalog.packages.views.common import fmt_error_rsp
+from catalog.pub.exceptions import PackageNotFoundException, PackageHasExistsException
+from catalog.pub.utils.syscomm import fun_name
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="On distribute NS package",
+ request_body=NsPackageDistributeRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: NsPackageDistributeResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query NS packages",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NsPackagesSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST', 'GET'])
+def nspackages_rc(request, *args, **kwargs):
+ logger.debug("Enter %s, method is %s", fun_name(), request.method)
+ ret, normal_status, response_serializer, validation_error = None, None, None, None
+
+ if request.method == 'GET':
+ # Gets ns package list
+ ret = sdc_ns_package.ns_get_csars()
+ normal_status = status.HTTP_200_OK
+
+ if ret[0] == 0:
+ response_serializer = NsPackagesSerializer(data=ret[1])
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+ elif request.method == 'POST':
+ # Distributes the package accroding to the given csarId
+ request_serializer = NsPackageDistributeRequestSerializer(data=request.data)
+ validation_error = handleValidatonError(request_serializer, True)
+ if validation_error:
+ return validation_error
+
+ csar_id = ignore_case_get(request.data, "csarId")
+ logger.debug("csar_id is %s", csar_id)
+ ret = sdc_ns_package.ns_on_distribute(csar_id)
+ normal_status = status.HTTP_202_ACCEPTED
+
+ logger.debug("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ return Response(data=ret[1], status=normal_status)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="On distribute Nf package",
+ request_body=NfPackageDistributeRequestSerializer(),
+ responses={
+ status.HTTP_202_ACCEPTED: PostJobResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query Nf packages",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NfPackagesSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST', 'GET'])
+def nfpackages_rc(request, *args, **kwargs):
+ logger.debug(
+ "Enter %s%s, method is %s",
+ fun_name(),
+ request.data,
+ request.method)
+ ret, normal_status, response_serializer, validation_error = None, None, None, None
+ if request.method == 'GET':
+ ret = sdc_vnf_package.nf_get_csars()
+ normal_status = status.HTTP_200_OK
+ response_serializer = NfPackagesSerializer(data=ret[1])
+ elif request.method == 'POST':
+ request_serivalizer = NfPackageDistributeRequestSerializer(
+ data=request.data)
+ validation_error = handleValidatonError(
+ request_serivalizer, True)
+ if validation_error:
+ return validation_error
+
+ csar_id = ignore_case_get(request_serivalizer.data, "csarId")
+ vim_ids = ignore_case_get(request_serivalizer.data, "vimIds")
+ lab_vim_id = ignore_case_get(request_serivalizer.data, "labVimId")
+ job_id = str(uuid.uuid4())
+ sdc_vnf_package.NfDistributeThread(
+ csar_id, vim_ids, lab_vim_id, job_id).start()
+ ret = [0, {"jobId": job_id}]
+ normal_status = status.HTTP_202_ACCEPTED
+
+ response_serializer = PostJobResponseSerializer(data=ret[1])
+ logger.debug("Leave %s, Return value is %s", fun_name(), ret)
+
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+
+ return Response(data=response_serializer.data, status=normal_status)
+
+
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete one NS package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_200_OK: NsPackageDistributeResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+ 'error message',
+ openapi.Schema(
+ type=openapi.TYPE_STRING))})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query one NS package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_200_OK: NsPackageSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+ 'error message',
+ openapi.Schema(
+ type=openapi.TYPE_STRING))})
+@api_view(http_method_names=['DELETE', 'GET'])
+def ns_rd_csar(request, *args, **kwargs):
+ csar_id = ignore_case_get(kwargs, "csarId")
+ logger.info("Enter %s, method is %s, csar_id is %s",
+ fun_name(), request.method, csar_id)
+ ret, normal_status, response_serializer, validation_error = None, None, None, None
+ if request.method == 'GET':
+ ret = sdc_ns_package.ns_get_csar(csar_id)
+ normal_status = status.HTTP_200_OK
+ if ret[0] == 0:
+ response_serializer = NsPackageSerializer(data=ret[1])
+ validation_error = handleValidatonError(response_serializer, False)
+ if validation_error:
+ return validation_error
+ elif request.method == 'DELETE':
+ ret = sdc_ns_package.ns_delete_csar(csar_id)
+ normal_status = status.HTTP_200_OK
+ logger.info("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ return Response(data=ret[1], status=normal_status)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="On distribute Service package",
+ request_body=ServicePackageDistributeRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: "",
+ status.HTTP_400_BAD_REQUEST: InternalErrorRequestSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query Service packages",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: ServicePackagesSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST', 'GET'])
+def servicepackages_rc(request, *args, **kwargs):
+ logger.debug("Enter %s, method is %s", fun_name(), request.method)
+
+ if request.method == 'GET':
+ # Gets service package list
+ try:
+ csar_list = ServicePackage().get_csars()
+ response_serializer = ServicePackagesSerializer(data=csar_list)
+ validation_error = handleValidatonError(response_serializer, False)
+ if validation_error:
+ return validation_error
+ return Response(data=csar_list, status=status.HTTP_200_OK)
+ except Exception as e:
+ error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ elif request.method == 'POST':
+ # Distributes the package according to the given csarId
+ request_serializer = ServicePackageDistributeRequestSerializer(data=request.data)
+ validation_error = handleValidatonError(request_serializer, True)
+ if validation_error:
+ return validation_error
+
+ csar_id = ignore_case_get(request.data, "csarId")
+ logger.debug("csar_id is %s", csar_id)
+ try:
+ ServicePackage().on_distribute(csar_id)
+ return Response(status=status.HTTP_202_ACCEPTED)
+ except PackageHasExistsException as e:
+ error_status = status.HTTP_400_BAD_REQUEST
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ except Exception as e:
+ error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+
+
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete one Service package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_204_NO_CONTENT: "",
+ status.HTTP_404_NOT_FOUND: InternalErrorRequestSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query one Service package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_200_OK: ServicePackageSerializer,
+ status.HTTP_404_NOT_FOUND: InternalErrorRequestSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['DELETE', 'GET'])
+def service_rd_csar(request, *args, **kwargs):
+ csar_id = ignore_case_get(kwargs, "csarId")
+ logger.info("Enter %s, method is %s, csar_id is %s", fun_name(), request.method, csar_id)
+
+ if request.method == 'GET':
+ try:
+ ret = ServicePackage().get_csar(csar_id)
+ response_serializer = ServicePackageSerializer(data=ret)
+ validation_error = handleValidatonError(response_serializer, False)
+ if validation_error:
+ return validation_error
+ return Response(data=ret, status=status.HTTP_200_OK)
+ except PackageNotFoundException as e:
+ error_status = status.HTTP_404_NOT_FOUND
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ except Exception as e:
+ error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+
+ elif request.method == 'DELETE':
+ try:
+ ServicePackage().delete_csar(csar_id)
+ return Response(status=status.HTTP_204_NO_CONTENT)
+ except PackageNotFoundException as e:
+ error_status = status.HTTP_404_NOT_FOUND
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ except Exception as e:
+ error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+
+
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete one Nf package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_202_ACCEPTED: PostJobResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+ 'error message',
+ openapi.Schema(
+ type=openapi.TYPE_STRING))})
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query one Nf package",
+ request_body=no_body,
+ manual_parameters=[
+ openapi.Parameter(
+ 'csarId',
+ openapi.IN_QUERY,
+ "csarId",
+ type=openapi.TYPE_STRING)],
+ responses={
+ status.HTTP_200_OK: NfPackageSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+ 'error message',
+ openapi.Schema(
+ type=openapi.TYPE_STRING))})
+@api_view(http_method_names=['DELETE', 'GET'])
+def nf_rd_csar(request, *args, **kwargs):
+ csar_id = ignore_case_get(kwargs, "csarId")
+ logger.info("Enter %s, method is %s, csar_id is %s",
+ fun_name(), request.method, csar_id)
+ ret, normal_status, response_serializer, validation_error = None, None, None, None
+
+ if request.method == 'GET':
+ ret = sdc_vnf_package.nf_get_csar(csar_id)
+ normal_status = status.HTTP_200_OK
+ response_serializer = NfPackageSerializer(data=ret[1])
+
+ elif request.method == 'DELETE':
+ job_id = str(uuid.uuid4())
+ sdc_vnf_package.NfPkgDeleteThread(csar_id, job_id).start()
+ ret = [0, {"jobId": job_id}]
+ normal_status = status.HTTP_202_ACCEPTED
+ response_serializer = PostJobResponseSerializer(data=ret[1])
+
+ logger.info("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+
+ return Response(data=response_serializer.data, status=normal_status)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Parse model(NS, Service, VNF, PNF)",
+ request_body=ParseModelRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def model_parser(request, *args, **kwargs):
+ csar_id = ignore_case_get(request.data, "csarId")
+ package_type = ignore_case_get(request.data, "packageType")
+ inputs = ignore_case_get(request.data, "inputs")
+ logger.debug(
+ "Enter %s, csar_id=%s, package_type=%s, inputs=%s",
+ fun_name(),
+ csar_id,
+ package_type,
+ inputs)
+
+ if package_type.lower().__eq__("service"):
+ try:
+ ret = ServicePackage().parse_serviced(csar_id, inputs)
+ response_serializer = ParseModelResponseSerializer(data=ret)
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+ return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+ except PackageNotFoundException as e:
+ error_status = status.HTTP_404_NOT_FOUND
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ except Exception as e:
+ error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
+ elif package_type.lower().__eq__("ns"):
+ ret = sdc_ns_package.parse_nsd(csar_id, inputs)
+ elif package_type.lower().__eq__("vnf"):
+ ret = sdc_vnf_package.parse_vnfd(csar_id, inputs)
+ elif package_type.lower().__eq__("pnf"):
+ ret = PnfDescriptor().parse_pnfd(csar_id, inputs)
+ else:
+ error_status = status.HTTP_400_BAD_REQUEST
+ error_message = "Invalid package type, it should be one of [VNF, PNF, NS, Service]"
+ return Response(data=fmt_error_rsp(error_message, error_status), status=error_status)
+
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ response_serializer = ParseModelResponseSerializer(data=ret[1])
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+
+ return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Parse NS model",
+ request_body=ParseModelRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def ns_model_parser(request, *args, **kwargs):
+ csar_id = ignore_case_get(request.data, "csarId")
+ inputs = ignore_case_get(request.data, "inputs")
+ logger.debug(
+ "Enter %s, csar_id=%s, inputs=%s",
+ fun_name(),
+ csar_id,
+ inputs)
+ ret = sdc_ns_package.parse_nsd(csar_id, inputs)
+ logger.info("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ response_serializer = ParseModelResponseSerializer(data=ret[1])
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+
+ return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Parse NF model",
+ request_body=ParseModelRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def vnf_model_parser(request, *args, **kwargs):
+ csar_id = ignore_case_get(request.data, "csarId")
+ inputs = ignore_case_get(request.data, "inputs")
+ logger.debug(
+ "Enter %s, csar_id=%s, inputs=%s",
+ fun_name(),
+ csar_id,
+ inputs)
+ ret = sdc_vnf_package.parse_vnfd(csar_id, inputs)
+ logger.info("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(
+ data={
+ 'error': ret[1]},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ response_serializer = ParseModelResponseSerializer(data=ret[1])
+ validation_error = handleValidatonError(
+ response_serializer, False)
+ if validation_error:
+ return validation_error
+
+ return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+
+
+def handleValidatonError(base_serializer, is_request):
+ response = None
+
+ if not base_serializer.is_valid():
+ errormessage = base_serializer.errors
+ logger.error(errormessage)
+
+ if is_request:
+ message = 'Invalid request'
+ else:
+ message = 'Invalid response'
+ logger.error(message)
+ response = Response(
+ data={'error': errormessage},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+ return response
diff --git a/catalog/packages/views/common.py b/catalog/packages/views/common.py
new file mode 100644
index 0000000..6285cb9
--- /dev/null
+++ b/catalog/packages/views/common.py
@@ -0,0 +1,123 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import traceback
+import logging
+
+from rest_framework import status
+from rest_framework.response import Response
+
+from catalog.pub.exceptions import CatalogException
+from catalog.pub.exceptions import BadRequestException
+from catalog.pub.exceptions import NsdmBadRequestException
+from catalog.pub.exceptions import PackageNotFoundException
+from catalog.pub.exceptions import ResourceNotFoundException
+from catalog.pub.exceptions import ArtifactNotFoundException
+from catalog.pub.exceptions import NsdmDuplicateSubscriptionException
+from catalog.pub.exceptions import VnfPkgDuplicateSubscriptionException
+from catalog.pub.exceptions import VnfPkgSubscriptionException
+
+logger = logging.getLogger(__name__)
+
+
+def validate_data(data, serializer):
+ serialized_data = serializer(data=data)
+ if not serialized_data.is_valid():
+ logger.error('Data validation failed.')
+ raise CatalogException(serialized_data.errors)
+ return serialized_data
+
+
+def fmt_error_rsp(error_message, status):
+ return {"errorMessage": error_message, "error": status}
+
+
+def make_error_resp(status, detail):
+ return Response(
+ data={
+ 'status': status,
+ 'detail': detail
+ },
+ status=status
+ )
+
+
+def view_safe_call_with_log(logger):
+ def view_safe_call(func):
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except NsdmDuplicateSubscriptionException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_303_SEE_OTHER
+ )
+ except VnfPkgDuplicateSubscriptionException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_303_SEE_OTHER
+ )
+ except PackageNotFoundException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_404_NOT_FOUND
+ )
+ except ResourceNotFoundException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_404_NOT_FOUND
+ )
+ except ArtifactNotFoundException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_404_NOT_FOUND
+ )
+ except BadRequestException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_400_BAD_REQUEST
+ )
+ except NsdmBadRequestException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_400_BAD_REQUEST
+ )
+ except VnfPkgSubscriptionException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR
+ )
+ except CatalogException as e:
+ logger.error(e.args[0])
+ return make_error_resp(
+ detail=e.args[0],
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR
+ )
+ except Exception as e:
+ logger.error(e.args[0])
+ logger.error(traceback.format_exc())
+ return make_error_resp(
+ detail='Unexpected exception',
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR
+ )
+ return wrapper
+ return view_safe_call
diff --git a/catalog/packages/views/health_check_views.py b/catalog/packages/views/health_check_views.py
new file mode 100644
index 0000000..cc1a379
--- /dev/null
+++ b/catalog/packages/views/health_check_views.py
@@ -0,0 +1,31 @@
+# Copyright (c) 2019, CMCC Technologies Co., Ltd.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+logger = logging.getLogger(__name__)
+
+
+class HealthCheckView(APIView):
+ @swagger_auto_schema(
+ responses={
+ status.HTTP_200_OK: 'Active'})
+ def get(self, request, format=None):
+ logger.debug("Health check.")
+ return Response({"status": "active"})
diff --git a/catalog/packages/views/ns_descriptor_views.py b/catalog/packages/views/ns_descriptor_views.py
new file mode 100644
index 0000000..3b8c1f9
--- /dev/null
+++ b/catalog/packages/views/ns_descriptor_views.py
@@ -0,0 +1,139 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.http import StreamingHttpResponse
+from drf_yasg.utils import no_body, swagger_auto_schema
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from catalog.packages.biz.ns_descriptor import NsDescriptor
+from catalog.packages.serializers.create_nsd_info_request import CreateNsdInfoRequestSerializer
+from catalog.packages.serializers.nsd_info import NsdInfoSerializer
+from catalog.packages.serializers.nsd_infos import NsdInfosSerializer
+from catalog.packages.views.common import validate_data
+from catalog.pub.exceptions import CatalogException
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query a NSD",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NsdInfoSerializer(),
+ status.HTTP_404_NOT_FOUND: 'NSDs do not exist',
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete a NSD",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: "No content",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def ns_info_rd(request, **kwargs):
+ nsd_info_id = kwargs.get("nsdInfoId")
+ if request.method == 'GET':
+ data = NsDescriptor().query_single(nsd_info_id)
+ nsd_info = validate_data(data, NsdInfoSerializer)
+ return Response(data=nsd_info.data, status=status.HTTP_200_OK)
+ if request.method == 'DELETE':
+ NsDescriptor().delete_single(nsd_info_id)
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Create a NSD",
+ request_body=CreateNsdInfoRequestSerializer(),
+ responses={
+ status.HTTP_201_CREATED: NsdInfoSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query multiple NSDs",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NsdInfosSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['POST', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def ns_descriptors_rc(request):
+ if request.method == 'POST':
+ create_nsd_info_request = validate_data(request.data, CreateNsdInfoRequestSerializer)
+ data = NsDescriptor().create(create_nsd_info_request.data)
+ validate_data(data, NsdInfoSerializer)
+ return Response(data=data, status=status.HTTP_201_CREATED)
+
+ if request.method == 'GET':
+ nsdId = request.query_params.get("nsdId", None)
+ data = NsDescriptor().query_multiple(nsdId)
+ validate_data(data, NsdInfosSerializer)
+ return Response(data=data, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+ method='PUT',
+ operation_description="Upload NSD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: 'PNFD file',
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Download NSD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: "No content",
+ status.HTTP_404_NOT_FOUND: 'NSD does not exist.',
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['PUT', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def nsd_content_ru(request, **kwargs):
+ nsd_info_id = kwargs.get("nsdInfoId")
+ if request.method == 'PUT':
+ files = request.FILES.getlist('file')
+ try:
+ local_file_name = NsDescriptor().upload(nsd_info_id, files[0])
+ NsDescriptor().parse_nsd_and_save(nsd_info_id, local_file_name)
+ return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+ except CatalogException as e:
+ NsDescriptor().handle_upload_failed(nsd_info_id)
+ raise e
+ except Exception as e:
+ NsDescriptor().handle_upload_failed(nsd_info_id)
+ raise e
+
+ if request.method == 'GET':
+ file_range = request.META.get('HTTP_RANGE')
+ file_iterator = NsDescriptor().download(nsd_info_id, file_range)
+ return StreamingHttpResponse(file_iterator, status=status.HTTP_200_OK)
diff --git a/catalog/packages/views/nsdm_subscription_views.py b/catalog/packages/views/nsdm_subscription_views.py
new file mode 100644
index 0000000..5e6394e
--- /dev/null
+++ b/catalog/packages/views/nsdm_subscription_views.py
@@ -0,0 +1,127 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from drf_yasg.utils import swagger_auto_schema, no_body
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from catalog.packages.serializers.nsdm_filter_data import NsdmNotificationsFilter
+from catalog.packages.serializers.nsdm_subscription import NsdmSubscriptionsSerializer
+from catalog.packages.serializers.nsdm_subscription import NsdmSubscriptionIdSerializer
+from catalog.packages.serializers.nsdm_subscription import NsdmSubscriptionSerializer
+from catalog.packages.serializers.nsdm_subscription import NsdmSubscriptionRequestSerializer
+from catalog.packages.serializers.response import ProblemDetailsSerializer
+
+from catalog.pub.exceptions import NsdmBadRequestException
+from catalog.packages.biz.nsdm_subscription import NsdmSubscription
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+def validate_data(data, serializer):
+ serialized_data = serializer(data=data)
+ if not serialized_data.is_valid():
+ logger.error('Data validation failed.')
+ raise NsdmBadRequestException(serialized_data.errors)
+ return serialized_data
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Create Subscription for NSD Management",
+ request_body=NsdmSubscriptionRequestSerializer(),
+ responses={
+ status.HTTP_201_CREATED: NsdmSubscriptionSerializer,
+ status.HTTP_303_SEE_OTHER: ProblemDetailsSerializer(),
+ status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query subscriptions for Nsd Management",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NsdmSubscriptionsSerializer(),
+ status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer(),
+ }
+)
+@api_view(http_method_names=['POST', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def nsd_subscription_rc(request):
+ if request.method == 'POST':
+ logger.debug("SubscribeNotification--post::> %s" % request.data)
+ nsdm_subscription_request = \
+ validate_data(request.data,
+ NsdmSubscriptionRequestSerializer)
+ subscription = NsdmSubscription().create(
+ nsdm_subscription_request.data)
+ validate_data(subscription, NsdmSubscriptionSerializer)
+ return Response(data=subscription, status=status.HTTP_201_CREATED)
+
+ if request.method == 'GET':
+ logger.debug("Subscription Notification GET %s" % request.query_params)
+ request_query_params = {}
+ if request.query_params:
+ request_query_params = \
+ validate_data(request.query_params,
+ NsdmNotificationsFilter).data
+ subscription_data = \
+ NsdmSubscription().query_multi_subscriptions(
+ request_query_params)
+ subscriptions = validate_data(subscription_data,
+ NsdmSubscriptionsSerializer)
+ return Response(data=subscriptions.data, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query subscriptions for Nsd Management",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: NsdmSubscriptionSerializer(),
+ status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete subscription for Nsd Management",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: 'No_Content',
+ status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def nsd_subscription_rd(request, **kwargs):
+ subscription_id = kwargs.get("subscriptionId")
+ validate_data({'subscription_id': subscription_id}, NsdmSubscriptionIdSerializer)
+ if request.method == 'GET':
+ subscription_data = NsdmSubscription().query_single_subscription(subscription_id)
+ subscription = validate_data(subscription_data, NsdmSubscriptionSerializer)
+ return Response(data=subscription.data, status=status.HTTP_200_OK)
+ elif request.method == 'DELETE':
+ subscription_data = NsdmSubscription().delete_single_subscription(subscription_id)
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/catalog/packages/views/pnf_descriptor_views.py b/catalog/packages/views/pnf_descriptor_views.py
new file mode 100644
index 0000000..9120980
--- /dev/null
+++ b/catalog/packages/views/pnf_descriptor_views.py
@@ -0,0 +1,166 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.http import StreamingHttpResponse
+from drf_yasg.utils import no_body, swagger_auto_schema
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from catalog.packages.biz.pnf_descriptor import PnfDescriptor
+from catalog.packages.serializers.create_pnfd_info_request import CreatePnfdInfoRequestSerializer
+from catalog.packages.serializers.pnfd_info import PnfdInfoSerializer
+from catalog.packages.serializers.pnfd_infos import PnfdInfosSerializer
+from catalog.packages.views.common import validate_data
+from catalog.packages.serializers.catalog_serializers import ParseModelRequestSerializer
+from catalog.packages.serializers.catalog_serializers import ParseModelResponseSerializer
+from catalog.packages.serializers.catalog_serializers import InternalErrorRequestSerializer
+from catalog.packages.serializers.response import ProblemDetailsSerializer
+from catalog.pub.utils.syscomm import fun_name
+from catalog.pub.utils.values import ignore_case_get
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query a PNFD",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: PnfdInfoSerializer(),
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete a PNFD",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: "No content",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def pnfd_info_rd(request, **kwargs): # TODO
+ pnfd_info_id = kwargs.get('pnfdInfoId')
+ if request.method == 'GET':
+ logger.debug("Query an individual PNF descriptor> %s" % request.data)
+ data = PnfDescriptor().query_single(pnfd_info_id)
+ pnfd_info = validate_data(data, PnfdInfoSerializer)
+ return Response(data=pnfd_info.data, status=status.HTTP_200_OK)
+
+ if request.method == 'DELETE':
+ logger.debug("Delete an individual PNFD resource> %s" % request.data)
+ PnfDescriptor().delete_single(pnfd_info_id)
+ return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Create a PNFD",
+ request_body=CreatePnfdInfoRequestSerializer(),
+ responses={
+ status.HTTP_201_CREATED: PnfdInfoSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query multiple PNFDs",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: PnfdInfosSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@api_view(http_method_names=['POST', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def pnf_descriptors_rc(request):
+ if request.method == 'POST':
+ create_pnfd_info_request = validate_data(request.data, CreatePnfdInfoRequestSerializer)
+ data = PnfDescriptor().create(create_pnfd_info_request.data)
+ validate_data(data, PnfdInfoSerializer)
+ return Response(data=data, status=status.HTTP_201_CREATED)
+
+ if request.method == 'GET':
+ data = PnfDescriptor().query_multiple(request)
+ validate_data(data, PnfdInfosSerializer)
+ return Response(data=data, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+ method='PUT',
+ operation_description="Upload PNFD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: "No content",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Fetch PNFD content",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: 'PNFD file',
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+)
+@api_view(http_method_names=['PUT', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def pnfd_content_ru(request, **kwargs):
+ pnfd_info_id = kwargs.get("pnfdInfoId")
+ if request.method == 'PUT':
+ files = request.FILES.getlist('file')
+ try:
+ local_file_name = PnfDescriptor().upload(files[0], pnfd_info_id)
+ PnfDescriptor().parse_pnfd_and_save(pnfd_info_id, local_file_name)
+ return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+ except Exception as e:
+ PnfDescriptor().handle_upload_failed(pnfd_info_id)
+ raise e
+
+ if request.method == 'GET':
+ file_iterator = PnfDescriptor().download(pnfd_info_id)
+ return StreamingHttpResponse(file_iterator, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Parse PNF model",
+ request_body=ParseModelRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+ status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def pnf_model_parser(request, *args, **kwargs):
+ csar_id = ignore_case_get(request.data, "csarId")
+ inputs = ignore_case_get(request.data, "inputs")
+ logger.debug(
+ "Enter %s, csar_id=%s, inputs=%s",
+ fun_name(),
+ csar_id,
+ inputs)
+ ret = PnfDescriptor().parse_pnfd(csar_id, inputs)
+ logger.info("Leave %s, Return value is %s", fun_name(), ret)
+ if ret[0] != 0:
+ return Response(data={'error': ret[1]}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ response = validate_data(ret[1], ParseModelResponseSerializer)
+ return Response(data=response.data, status=status.HTTP_202_ACCEPTED)
diff --git a/catalog/packages/views/vnf_package_artifact_views.py b/catalog/packages/views/vnf_package_artifact_views.py
new file mode 100644
index 0000000..0de9682
--- /dev/null
+++ b/catalog/packages/views/vnf_package_artifact_views.py
@@ -0,0 +1,54 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.views import APIView
+from django.http import FileResponse
+
+from catalog.packages.serializers.response import ProblemDetailsSerializer
+from catalog.packages.biz.vnf_pkg_artifacts import FetchVnfPkgArtifact
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+VALID_FILTERS = [
+ "callbackUri",
+ "notificationTypes",
+ "vnfdId",
+ "vnfPkgId",
+ "operationalState",
+ "usageState"
+]
+
+
+class FetchVnfPkgmArtifactsView(APIView):
+
+ @swagger_auto_schema(
+ responses={
+ status.HTTP_200_OK: "HTTP_200_OK",
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+ )
+ @view_safe_call_with_log(logger=logger)
+ def get(self, request, vnfPkgId, artifactPath):
+ logger.debug("FetchVnfPkgmArtifactsView--get::> ")
+
+ resp_data = FetchVnfPkgArtifact().fetch(vnfPkgId, artifactPath)
+ response = FileResponse(resp_data)
+
+ return response
diff --git a/catalog/packages/views/vnf_package_subscription_views.py b/catalog/packages/views/vnf_package_subscription_views.py
new file mode 100644
index 0000000..32904e3
--- /dev/null
+++ b/catalog/packages/views/vnf_package_subscription_views.py
@@ -0,0 +1,120 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.views import APIView
+from rest_framework.response import Response
+
+from catalog.packages.serializers.vnf_pkg_subscription import PkgmSubscriptionRequestSerializer
+from catalog.packages.serializers.vnf_pkg_subscription import PkgmSubscriptionSerializer
+from catalog.packages.serializers.vnf_pkg_subscription import PkgmSubscriptionsSerializer
+from catalog.packages.serializers.response import ProblemDetailsSerializer
+from catalog.packages.biz.vnf_pkg_subscription import CreateSubscription
+from catalog.packages.biz.vnf_pkg_subscription import QuerySubscription
+from catalog.packages.biz.vnf_pkg_subscription import TerminateSubscription
+from catalog.packages.views.common import validate_data
+from catalog.pub.exceptions import VnfPkgSubscriptionException
+from catalog.pub.exceptions import BadRequestException
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+VALID_FILTERS = [
+ "callbackUri",
+ "notificationTypes",
+ "vnfdId",
+ "vnfPkgId",
+ "operationalState",
+ "usageState"
+]
+
+
+class CreateQuerySubscriptionView(APIView):
+
+ @swagger_auto_schema(
+ request_body=PkgmSubscriptionRequestSerializer,
+ responses={
+ status.HTTP_201_CREATED: PkgmSubscriptionSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+ )
+ @view_safe_call_with_log(logger=logger)
+ def post(self, request):
+ logger.debug("Create VNF package Subscription> %s" % request.data)
+
+ vnf_pkg_subscription_request = validate_data(request.data, PkgmSubscriptionRequestSerializer)
+ data = CreateSubscription(vnf_pkg_subscription_request.data).do_biz()
+ subscription_info = validate_data(data, PkgmSubscriptionSerializer)
+ return Response(data=subscription_info.data, status=status.HTTP_201_CREATED)
+
+ @swagger_auto_schema(
+ responses={
+ status.HTTP_200_OK: PkgmSubscriptionSerializer(),
+ status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+ )
+ @view_safe_call_with_log(logger=logger)
+ def get(self, request):
+ logger.debug("SubscribeNotification--get::> %s" % request.query_params)
+
+ if request.query_params and not set(request.query_params).issubset(set(VALID_FILTERS)):
+ raise BadRequestException("Not a valid filter")
+
+ resp_data = QuerySubscription().query_multi_subscriptions(request.query_params)
+
+ subscriptions_serializer = PkgmSubscriptionsSerializer(data=resp_data)
+ if not subscriptions_serializer.is_valid():
+ raise VnfPkgSubscriptionException(subscriptions_serializer.errors)
+
+ return Response(data=subscriptions_serializer.data, status=status.HTTP_200_OK)
+
+
+class QueryTerminateSubscriptionView(APIView):
+
+ @swagger_auto_schema(
+ responses={
+ status.HTTP_200_OK: PkgmSubscriptionSerializer(),
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+ )
+ @view_safe_call_with_log(logger=logger)
+ def get(self, request, subscriptionId):
+ logger.debug("SubscribeNotification--get::> %s" % subscriptionId)
+
+ resp_data = QuerySubscription().query_single_subscription(subscriptionId)
+
+ subscription_serializer = PkgmSubscriptionSerializer(data=resp_data)
+ if not subscription_serializer.is_valid():
+ raise VnfPkgSubscriptionException(subscription_serializer.errors)
+
+ return Response(data=subscription_serializer.data, status=status.HTTP_200_OK)
+
+ @swagger_auto_schema(
+ responses={
+ status.HTTP_204_NO_CONTENT: "",
+ status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+ }
+ )
+ @view_safe_call_with_log(logger=logger)
+ def delete(self, request, subscriptionId):
+ logger.debug("SubscribeNotification--get::> %s" % subscriptionId)
+
+ TerminateSubscription().terminate(subscriptionId)
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/catalog/packages/views/vnf_package_views.py b/catalog/packages/views/vnf_package_views.py
new file mode 100644
index 0000000..9fc143b
--- /dev/null
+++ b/catalog/packages/views/vnf_package_views.py
@@ -0,0 +1,168 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.http import StreamingHttpResponse
+from drf_yasg.utils import swagger_auto_schema, no_body
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from catalog.packages.serializers.upload_vnf_pkg_from_uri_req import UploadVnfPackageFromUriRequestSerializer
+from catalog.packages.serializers.create_vnf_pkg_info_req import CreateVnfPkgInfoRequestSerializer
+from catalog.packages.serializers.vnf_pkg_info import VnfPkgInfoSerializer
+from catalog.packages.serializers.vnf_pkg_infos import VnfPkgInfosSerializer
+from catalog.packages.biz.vnf_package import VnfPackage
+from catalog.packages.biz.vnf_package import VnfPkgUploadThread
+from catalog.packages.biz.vnf_package import parse_vnfd_and_save
+from catalog.packages.biz.vnf_package import handle_upload_failed
+from .common import validate_data
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+ method="GET",
+ operation_description="Query multiple VNF package resource",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: VnfPkgInfosSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method="POST",
+ operation_description="Create an individual VNF package resource",
+ request_body=CreateVnfPkgInfoRequestSerializer,
+ responses={
+ status.HTTP_201_CREATED: VnfPkgInfoSerializer(),
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=["GET", "POST"])
+@view_safe_call_with_log(logger=logger)
+def vnf_packages_rc(request):
+ if request.method == 'GET':
+ logger.debug("Query VNF packages> %s" % request.data)
+ data = VnfPackage().query_multiple()
+ validate_data(data, VnfPkgInfosSerializer)
+ return Response(data=data, status=status.HTTP_200_OK)
+
+ if request.method == 'POST':
+ logger.debug("Create VNF package> %s" % request.data)
+ create_vnf_pkg_info_request = validate_data(request.data,
+ CreateVnfPkgInfoRequestSerializer)
+ data = VnfPackage().create_vnf_pkg(create_vnf_pkg_info_request.data)
+ validate_data(data, VnfPkgInfoSerializer)
+ return Response(data=data, status=status.HTTP_201_CREATED)
+
+
+@swagger_auto_schema(
+ method='PUT',
+ operation_description="Upload VNF package content",
+ request_body=no_body,
+ responses={
+ status.HTTP_202_ACCEPTED: "Successfully",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method="GET",
+ operation_description="Fetch VNF package content",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: VnfPkgInfosSerializer(),
+ status.HTTP_404_NOT_FOUND: "VNF package does not exist",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=["PUT", "GET"])
+@view_safe_call_with_log(logger=logger)
+def package_content_ru(request, **kwargs):
+ vnf_pkg_id = kwargs.get("vnfPkgId")
+ if request.method == "PUT":
+ logger.debug("Upload VNF package %s" % vnf_pkg_id)
+ files = request.FILES.getlist('file')
+ try:
+ local_file_name = VnfPackage().upload(vnf_pkg_id, files[0])
+ parse_vnfd_and_save(vnf_pkg_id, local_file_name)
+ return Response(None, status=status.HTTP_202_ACCEPTED)
+ except Exception as e:
+ handle_upload_failed(vnf_pkg_id)
+ raise e
+
+ if request.method == "GET":
+ file_range = request.META.get('HTTP_RANGE')
+ file_iterator = VnfPackage().download(vnf_pkg_id, file_range)
+ return StreamingHttpResponse(file_iterator, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+ method='POST',
+ operation_description="Upload VNF package content from uri",
+ request_body=UploadVnfPackageFromUriRequestSerializer,
+ responses={
+ status.HTTP_202_ACCEPTED: "Successfully",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['POST'])
+@view_safe_call_with_log(logger=logger)
+def upload_from_uri_c(request, **kwargs):
+ vnf_pkg_id = kwargs.get("vnfPkgId")
+ try:
+ upload_vnf_from_uri_request = validate_data(request.data,
+ UploadVnfPackageFromUriRequestSerializer)
+ VnfPkgUploadThread(upload_vnf_from_uri_request.data, vnf_pkg_id).start()
+ return Response(None, status=status.HTTP_202_ACCEPTED)
+ except Exception as e:
+ handle_upload_failed(vnf_pkg_id)
+ raise e
+
+
+@swagger_auto_schema(
+ method='GET',
+ operation_description="Query an individual VNF package resource",
+ request_body=no_body,
+ responses={
+ status.HTTP_200_OK: VnfPkgInfoSerializer(),
+ status.HTTP_404_NOT_FOUND: "VNF package does not exist",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@swagger_auto_schema(
+ method='DELETE',
+ operation_description="Delete an individual VNF package resource",
+ request_body=no_body,
+ responses={
+ status.HTTP_204_NO_CONTENT: "No content",
+ status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+ }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def vnf_package_rd(request, **kwargs):
+ vnf_pkg_id = kwargs.get("vnfPkgId")
+ if request.method == 'GET':
+ logger.debug("Query an individual VNF package> %s" % request.data)
+ data = VnfPackage().query_single(vnf_pkg_id)
+ validate_data(data, VnfPkgInfoSerializer)
+ return Response(data=data, status=status.HTTP_200_OK)
+
+ if request.method == 'DELETE':
+ logger.debug("Delete an individual VNF package> %s" % request.data)
+ VnfPackage().delete_vnf_pkg(vnf_pkg_id)
+ return Response(data=None, status=status.HTTP_204_NO_CONTENT)
diff --git a/catalog/pub/__init__.py b/catalog/pub/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/pub/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/pub/config/__init__.py b/catalog/pub/config/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/pub/config/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/pub/config/config.py b/catalog/pub/config/config.py
new file mode 100644
index 0000000..b8a5141
--- /dev/null
+++ b/catalog/pub/config/config.py
@@ -0,0 +1,86 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [MSB]
+MSB_SERVICE_IP = '127.0.0.1'
+MSB_SERVICE_PORT = '80'
+
+# [REDIS]
+REDIS_HOST = '127.0.0.1'
+REDIS_PORT = '6379'
+REDIS_PASSWD = ''
+
+# [mysql]
+DB_IP = "127.0.0.1"
+DB_PORT = 3306
+DB_NAME = "nfvocatalog"
+DB_USER = "nfvocatalog"
+DB_PASSWD = "nfvocatalog"
+
+# [MDC]
+SERVICE_NAME = "catalog"
+FORWARDED_FOR_FIELDS = ["HTTP_X_FORWARDED_FOR", "HTTP_X_FORWARDED_HOST",
+ "HTTP_X_FORWARDED_SERVER"]
+
+# [register]
+REG_TO_MSB_WHEN_START = True
+REG_TO_MSB_REG_URL = "/api/microservices/v1/services"
+REG_TO_MSB_REG_PARAM = [{
+ "serviceName": "catalog",
+ "version": "v1",
+ "url": "/api/catalog/v1",
+ "protocol": "REST",
+ "visualRange": "1",
+ "nodes": [{
+ "ip": "127.0.0.1",
+ "port": "8806",
+ "ttl": 0
+ }]
+}, {
+ "serviceName": "nsd",
+ "version": "v1",
+ "url": "/api/nsd/v1",
+ "protocol": "REST",
+ "visualRange": "1",
+ "nodes": [{
+ "ip": "127.0.0.1",
+ "port": "8806",
+ "ttl": 0
+ }]
+}, {
+ "serviceName": "vnfpkgm",
+ "version": "v1",
+ "url": "/api/vnfpkgm/v1",
+ "protocol": "REST",
+ "visualRange": "1",
+ "nodes": [{
+ "ip": "127.0.0.1",
+ "port": "8806",
+ "ttl": 0
+ }]
+}]
+MSB_SVC_CALALOG_URL = "/api/microservices/v1/services/catalog/version/v1"
+MSB_SVC_NSD_URL = "/api/microservices/v1/services/nsd/version/v1"
+MSB_SVC_VNFPKGM_URL = "/api/microservices/v1/services/vnfpkgm/version/v1"
+
+# catalog path(values is defined in settings.py)
+CATALOG_ROOT_PATH = None
+CATALOG_URL_PATH = None
+
+# [sdc config]
+SDC_BASE_URL = "http://msb-iag/api"
+SDC_USER = "aai"
+SDC_PASSWD = "Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U"
+
+VNFD_SCHEMA_VERSION_DEFAULT = "base"
diff --git a/catalog/pub/database/__init__.py b/catalog/pub/database/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/pub/database/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/pub/database/admin.py b/catalog/pub/database/admin.py
new file mode 100644
index 0000000..bff70cb
--- /dev/null
+++ b/catalog/pub/database/admin.py
@@ -0,0 +1,361 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.contrib import admin
+
+from catalog.pub.database.models import NSPackageModel
+from catalog.pub.database.models import ServicePackageModel
+from catalog.pub.database.models import VnfPackageModel
+from catalog.pub.database.models import PnfPackageModel
+from catalog.pub.database.models import SoftwareImageModel
+from catalog.pub.database.models import JobModel
+from catalog.pub.database.models import JobStatusModel
+from catalog.pub.database.models import NsdmSubscriptionModel
+from catalog.pub.database.models import VnfPkgSubscriptionModel
+
+
+@admin.register(NSPackageModel)
+class NSPackageModelAdmin(admin.ModelAdmin):
+ list_display_links = ('nsPackageId', 'nsdName')
+ fields = [
+ "nsPackageId",
+ "nsPackageUri",
+ "checksum",
+ "sdcCsarId",
+ "onboardingState",
+ "operationalState",
+ "usageState",
+ "deletionPending",
+ "nsdId",
+ "invariantId",
+ "nsdName",
+ "nsdDesginer",
+ "nsdDescription",
+ "nsdVersion",
+ "userDefinedData",
+ "localFilePath",
+ "nsdModel"
+ ]
+
+ list_display = [
+ "nsPackageId",
+ "nsPackageUri",
+ "checksum",
+ "sdcCsarId",
+ "onboardingState",
+ "operationalState",
+ "usageState",
+ "deletionPending",
+ "nsdId",
+ "invariantId",
+ "nsdName",
+ "nsdDesginer",
+ "nsdDescription",
+ "nsdVersion",
+ "userDefinedData",
+ "localFilePath",
+ "nsdModel"
+ ]
+
+ search_fields = (
+ "nsPackageId",
+ "nsdId",
+ "nsdName",
+ "sdcCsarId"
+ )
+
+
+@admin.register(ServicePackageModel)
+class ServicePackageModelAdmin(admin.ModelAdmin):
+ list_display_links = ('servicePackageId', 'servicedName')
+ fields = [
+ "servicePackageId",
+ "servicePackageUri",
+ "checksum",
+ "sdcCsarId",
+ "onboardingState",
+ "operationalState",
+ "usageState",
+ "deletionPending",
+ "servicedId",
+ "invariantId",
+ "servicedName",
+ "servicedDesigner",
+ "servicedDescription",
+ "servicedVersion",
+ "userDefinedData",
+ "localFilePath",
+ "servicedModel"
+ ]
+
+ list_display = [
+ "servicePackageId",
+ "servicePackageUri",
+ "checksum",
+ "sdcCsarId",
+ "onboardingState",
+ "operationalState",
+ "usageState",
+ "deletionPending",
+ "servicedId",
+ "invariantId",
+ "servicedName",
+ "servicedDesigner",
+ "servicedDescription",
+ "servicedVersion",
+ "userDefinedData",
+ "localFilePath",
+ "servicedModel"
+ ]
+
+ search_fields = (
+ "servicePackageId",
+ "sdcCsarId",
+ "servicedName",
+ "onboardingState"
+ )
+
+
+@admin.register(VnfPackageModel)
+class VnfPackageModelAdmin(admin.ModelAdmin):
+ list_display_links = ('vnfPackageId', 'vnfdId')
+ fields = [
+ "vnfPackageId",
+ "vnfPackageUri",
+ "SdcCSARUri",
+ "checksum",
+ "onboardingState",
+ "operationalState",
+ "usageState",
+ "deletionPending",
+ "vnfdId",
+ "vnfVendor",
+ "vnfdProductName",
+ "vnfdVersion",
+ "vnfSoftwareVersion",
+ "userDefinedData",
+ "localFilePath",
+ "vnfdModel"
+ ]
+
+ list_display = [
+ "vnfPackageId",
+ "vnfPackageUri",
+ "SdcCSARUri",
+ "checksum",
+ "onboardingState",
+ "operationalState",
+ "usageState",
+ "deletionPending",
+ "vnfdId",
+ "vnfVendor",
+ "vnfdProductName",
+ "vnfdVersion",
+ "vnfSoftwareVersion",
+ "userDefinedData",
+ "localFilePath",
+ "vnfdModel"
+ ]
+
+ search_fields = (
+ "vnfPackageId",
+ "onboardingState",
+ "vnfdId"
+ )
+
+
+@admin.register(PnfPackageModel)
+class PnfPackageModelAdmin(admin.ModelAdmin):
+ list_display_links = ('pnfPackageId', 'pnfdId')
+ fields = [
+ "pnfPackageId",
+ "pnfPackageUri",
+ "sdcCSARUri",
+ "checksum",
+ "onboardingState",
+ "usageState",
+ "deletionPending",
+ "pnfdId",
+ "pnfVendor",
+ "pnfdProductName",
+ "pnfdVersion",
+ "pnfSoftwareVersion",
+ "userDefinedData",
+ "localFilePath",
+ "pnfdModel",
+ "pnfdName"
+ ]
+
+ list_display = [
+ "pnfPackageId",
+ "pnfPackageUri",
+ "sdcCSARUri",
+ "checksum",
+ "onboardingState",
+ "usageState",
+ "deletionPending",
+ "pnfdId",
+ "pnfVendor",
+ "pnfdProductName",
+ "pnfdVersion",
+ "pnfSoftwareVersion",
+ "userDefinedData",
+ "localFilePath",
+ "pnfdModel",
+ "pnfdName"
+ ]
+
+ search_fields = (
+ "pnfPackageId",
+ "onboardingState",
+ "pnfdId"
+ )
+
+
+@admin.register(SoftwareImageModel)
+class SoftwareImageModelAdmin(admin.ModelAdmin):
+ list_display_links = ('imageid', 'vnfPackageId')
+ fields = [
+ "imageid",
+ "containerFormat",
+ "diskFormat",
+ "mindisk",
+ "minram",
+ "usermetadata",
+ "vnfPackageId",
+ "filePath",
+ "status",
+ "vimid"
+ ]
+
+ list_display = [
+ "imageid",
+ "containerFormat",
+ "diskFormat",
+ "mindisk",
+ "minram",
+ "usermetadata",
+ "vnfPackageId",
+ "filePath",
+ "status",
+ "vimid"
+ ]
+
+ search_fields = (
+ "imageid",
+ "vnfPackageId",
+ "vimid"
+ )
+
+
+@admin.register(NsdmSubscriptionModel)
+class NsdmSubscriptionModelAdmin(admin.ModelAdmin):
+ list_display_links = ('subscriptionid', 'notificationTypes')
+ fields = [
+ "subscriptionid",
+ "notificationTypes",
+ "auth_info",
+ "callback_uri",
+ "nsdInfoId",
+ "nsdId",
+ "nsdName",
+ "nsdVersion",
+ "nsdDesigner",
+ "nsdInvariantId",
+ "vnfPkgIds",
+ "pnfdInfoIds",
+ "nestedNsdInfoIds",
+ "nsdOnboardingState",
+ "nsdOperationalState",
+ "nsdUsageState",
+ "pnfdId",
+ "pnfdName",
+ "pnfdVersion",
+ "pnfdProvider",
+ "pnfdInvariantId",
+ "pnfdOnboardingState",
+ "pnfdUsageState",
+ "links"
+ ]
+
+ list_display = [
+ "subscriptionid",
+ "notificationTypes",
+ "auth_info",
+ "callback_uri",
+ "nsdInfoId",
+ "nsdId",
+ "nsdName",
+ "nsdVersion",
+ "nsdDesigner",
+ "nsdInvariantId",
+ "vnfPkgIds",
+ "pnfdInfoIds",
+ "nestedNsdInfoIds",
+ "nsdOnboardingState",
+ "nsdOperationalState",
+ "nsdUsageState",
+ "pnfdId",
+ "pnfdName",
+ "pnfdVersion",
+ "pnfdProvider",
+ "pnfdInvariantId",
+ "pnfdOnboardingState",
+ "pnfdUsageState",
+ "links"
+ ]
+
+ search_fields = (
+ "subscriptionid",
+ "notificationTypes"
+ )
+
+
+@admin.register(VnfPkgSubscriptionModel)
+class VnfPkgSubscriptionModelAdmin(admin.ModelAdmin):
+ list_display_links = ('subscription_id', 'notification_types')
+ fields = [
+ "subscription_id",
+ "callback_uri",
+ "auth_info",
+ "usage_states",
+ "notification_types",
+ "vnfd_id",
+ "vnf_pkg_id",
+ "operation_states",
+ "vnf_products_from_provider",
+ "links"
+ ]
+
+ list_display = [
+ "subscription_id",
+ "callback_uri",
+ "auth_info",
+ "usage_states",
+ "notification_types",
+ "vnfd_id",
+ "vnf_pkg_id",
+ "operation_states",
+ "vnf_products_from_provider",
+ "links"
+ ]
+
+ search_fields = (
+ "subscription_id",
+ "notification_types"
+ )
+
+
+admin.site.register(JobModel)
+admin.site.register(JobStatusModel)
diff --git a/catalog/pub/database/migrations/0001_initial.py b/catalog/pub/database/migrations/0001_initial.py
new file mode 100644
index 0000000..98ca84c
--- /dev/null
+++ b/catalog/pub/database/migrations/0001_initial.py
@@ -0,0 +1,229 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# Generated by Django 1.11.9 on 2019-04-16 03:53
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ initial = True
+
+ dependencies = [
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='JobModel',
+ fields=[
+ ('jobid', models.CharField(db_column='JOBID', max_length=255, primary_key=True, serialize=False)),
+ ('jobtype', models.CharField(db_column='JOBTYPE', max_length=255)),
+ ('jobaction', models.CharField(db_column='JOBACTION', max_length=255)),
+ ('resid', models.CharField(db_column='RESID', max_length=255)),
+ ('status', models.IntegerField(blank=True, db_column='STATUS', null=True)),
+ ('starttime', models.CharField(blank=True, db_column='STARTTIME', max_length=255, null=True)),
+ ('endtime', models.CharField(blank=True, db_column='ENDTIME', max_length=255, null=True)),
+ ('progress', models.IntegerField(blank=True, db_column='PROGRESS', null=True)),
+ ('user', models.CharField(blank=True, db_column='USER', max_length=255, null=True)),
+ ('parentjobid', models.CharField(blank=True, db_column='PARENTJOBID', max_length=255, null=True)),
+ ('resname', models.CharField(blank=True, db_column='RESNAME', max_length=255, null=True)),
+ ],
+ options={
+ 'db_table': 'CATALOG_JOB',
+ },
+ ),
+ migrations.CreateModel(
+ name='JobStatusModel',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('indexid', models.IntegerField(db_column='INDEXID')),
+ ('jobid', models.CharField(db_column='JOBID', max_length=255)),
+ ('status', models.CharField(db_column='STATUS', max_length=255)),
+ ('progress', models.IntegerField(blank=True, db_column='PROGRESS', null=True)),
+ ('descp', models.TextField(db_column='DESCP', max_length=65535)),
+ ('errcode', models.CharField(blank=True, db_column='ERRCODE', max_length=255, null=True)),
+ ('addtime', models.CharField(blank=True, db_column='ADDTIME', max_length=255, null=True)),
+ ],
+ options={
+ 'db_table': 'CATALOG_JOB_STATUS',
+ },
+ ),
+ migrations.CreateModel(
+ name='NsdmSubscriptionModel',
+ fields=[
+ ('subscriptionid', models.CharField(db_column='SUBSCRIPTIONID', max_length=255, primary_key=True, serialize=False)),
+ ('notificationTypes', models.TextField(db_column='NOTIFICATIONTYPES', null=True)),
+ ('auth_info', models.TextField(db_column='AUTHINFO', null=True)),
+ ('callback_uri', models.CharField(db_column='CALLBACKURI', max_length=255)),
+ ('nsdInfoId', models.TextField(db_column='NSDINFOID', null=True)),
+ ('nsdId', models.TextField(db_column='NSDID', null=True)),
+ ('nsdName', models.TextField(db_column='NSDNAME', null=True)),
+ ('nsdVersion', models.TextField(db_column='NSDVERSION', null=True)),
+ ('nsdDesigner', models.TextField(db_column='NSDDESIGNER', null=True)),
+ ('nsdInvariantId', models.TextField(db_column='NSDINVARIANTID', null=True)),
+ ('vnfPkgIds', models.TextField(db_column='VNFPKGIDS', null=True)),
+ ('pnfdInfoIds', models.TextField(db_column='PNFDINFOIDS', null=True)),
+ ('nestedNsdInfoIds', models.TextField(db_column='NESTEDNSDINFOIDS', null=True)),
+ ('nsdOnboardingState', models.TextField(db_column='NSDONBOARDINGSTATE', null=True)),
+ ('nsdOperationalState', models.TextField(db_column='NSDOPERATIONALSTATE', null=True)),
+ ('nsdUsageState', models.TextField(db_column='NSDUSAGESTATE', null=True)),
+ ('pnfdId', models.TextField(db_column='PNFDID', null=True)),
+ ('pnfdName', models.TextField(db_column='PNFDNAME', null=True)),
+ ('pnfdVersion', models.TextField(db_column='PNFDVERSION', null=True)),
+ ('pnfdProvider', models.TextField(db_column='PNFDPROVIDER', null=True)),
+ ('pnfdInvariantId', models.TextField(db_column='PNFDINVARIANTID', null=True)),
+ ('pnfdOnboardingState', models.TextField(db_column='PNFDONBOARDINGSTATE', null=True)),
+ ('pnfdUsageState', models.TextField(db_column='PNFDUSAGESTATE', null=True)),
+ ('links', models.TextField(db_column='LINKS')),
+ ],
+ options={
+ 'db_table': 'CATALOG_NSDM_SUBSCRIPTION',
+ },
+ ),
+ migrations.CreateModel(
+ name='NSPackageModel',
+ fields=[
+ ('nsPackageId', models.CharField(db_column='NSPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('nsPackageUri', models.CharField(blank=True, db_column='NSPACKAGEURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('sdcCsarId', models.CharField(blank=True, db_column='SDCCSARID', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('nsdId', models.CharField(blank=True, db_column='NSDID', max_length=50, null=True)),
+ ('invariantId', models.CharField(blank=True, db_column='INVARIANTID', max_length=50, null=True)),
+ ('nsdName', models.CharField(blank=True, db_column='NSDNAME', max_length=50, null=True)),
+ ('nsdDesginer', models.CharField(blank=True, db_column='NSDDESIGNER', max_length=50, null=True)),
+ ('nsdDescription', models.CharField(blank=True, db_column='NSDDESCRIPTION', max_length=100, null=True)),
+ ('nsdVersion', models.CharField(blank=True, db_column='NSDVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('nsdModel', models.TextField(blank=True, db_column='NSDMODEL', max_length=65535, null=True)),
+ ],
+ options={
+ 'db_table': 'CATALOG_NSPACKAGE',
+ },
+ ),
+ migrations.CreateModel(
+ name='PnfPackageModel',
+ fields=[
+ ('pnfPackageId', models.CharField(db_column='PNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('pnfPackageUri', models.CharField(blank=True, db_column='PNFPACKAGEURI', max_length=300, null=True)),
+ ('sdcCSARUri', models.CharField(blank=True, db_column='SDCCSARURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('pnfdId', models.CharField(blank=True, db_column='PNFDID', max_length=50, null=True)),
+ ('pnfVendor', models.CharField(blank=True, db_column='VENDOR', max_length=50, null=True)),
+ ('pnfdProductName', models.CharField(blank=True, db_column='PNFDPRODUCTNAME', max_length=50, null=True)),
+ ('pnfdVersion', models.CharField(blank=True, db_column='PNFDVERSION', max_length=20, null=True)),
+ ('pnfSoftwareVersion', models.CharField(blank=True, db_column='PNFSOFTWAREVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('pnfdModel', models.TextField(blank=True, db_column='PNFDMODEL', max_length=65535, null=True)),
+ ('pnfdName', models.TextField(blank=True, db_column='PNFDNAME', max_length=65535, null=True)),
+ ],
+ options={
+ 'db_table': 'CATALOG_PNFPACKAGE',
+ },
+ ),
+ migrations.CreateModel(
+ name='ServicePackageModel',
+ fields=[
+ ('servicePackageId', models.CharField(db_column='SERVICEPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('servicePackageUri', models.CharField(blank=True, db_column='SERVICEPACKAGEURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('sdcCsarId', models.CharField(blank=True, db_column='SDCCSARID', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('servicedId', models.CharField(blank=True, db_column='SERVICEDID', max_length=50, null=True)),
+ ('invariantId', models.CharField(blank=True, db_column='INVARIANTID', max_length=50, null=True)),
+ ('servicedName', models.CharField(blank=True, db_column='SERVICEDNAME', max_length=50, null=True)),
+ ('servicedDesigner', models.CharField(blank=True, db_column='SERVICEDDESIGNER', max_length=50, null=True)),
+ ('servicedDescription', models.CharField(blank=True, db_column='SERVICEDDESCRIPTION', max_length=100, null=True)),
+ ('servicedVersion', models.CharField(blank=True, db_column='SERVICEDVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('servicedModel', models.TextField(blank=True, db_column='SERVICEDMODEL', max_length=65535, null=True)),
+ ],
+ options={
+ 'db_table': 'CATALOG_SERVICEPACKAGE',
+ },
+ ),
+ migrations.CreateModel(
+ name='SoftwareImageModel',
+ fields=[
+ ('imageid', models.CharField(db_column='IMAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('containerFormat', models.CharField(db_column='CONTAINERFORMAT', max_length=20)),
+ ('diskFormat', models.CharField(db_column='DISKFORMAT', max_length=20)),
+ ('mindisk', models.CharField(db_column='MINDISK', max_length=20)),
+ ('minram', models.CharField(db_column='MINRAM', max_length=20)),
+ ('usermetadata', models.CharField(db_column='USAERMETADATA', max_length=1024)),
+ ('vnfPackageId', models.CharField(db_column='VNFPACKAGEID', max_length=50)),
+ ('filePath', models.CharField(db_column='FILEPATH', max_length=300)),
+ ('status', models.CharField(db_column='STATUS', max_length=10)),
+ ('vimid', models.CharField(db_column='VIMID', max_length=50)),
+ ],
+ options={
+ 'db_table': 'CATALOG_SOFTWAREIMAGEMODEL',
+ },
+ ),
+ migrations.CreateModel(
+ name='VnfPackageModel',
+ fields=[
+ ('vnfPackageId', models.CharField(db_column='VNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('vnfPackageUri', models.CharField(blank=True, db_column='VNFPACKAGEURI', max_length=300, null=True)),
+ ('SdcCSARUri', models.CharField(blank=True, db_column='SDCCSARURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('vnfdId', models.CharField(blank=True, db_column='VNFDID', max_length=50, null=True)),
+ ('vnfVendor', models.CharField(blank=True, db_column='VENDOR', max_length=50, null=True)),
+ ('vnfdProductName', models.CharField(blank=True, db_column='VNFDPRODUCTNAME', max_length=50, null=True)),
+ ('vnfdVersion', models.CharField(blank=True, db_column='VNFDVERSION', max_length=20, null=True)),
+ ('vnfSoftwareVersion', models.CharField(blank=True, db_column='VNFSOFTWAREVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('vnfdModel', models.TextField(blank=True, db_column='VNFDMODEL', max_length=65535, null=True)),
+ ],
+ options={
+ 'db_table': 'CATALOG_VNFPACKAGE',
+ },
+ ),
+ migrations.CreateModel(
+ name='VnfPkgSubscriptionModel',
+ fields=[
+ ('subscription_id', models.CharField(db_column='SUBSCRIPTION_ID', max_length=255, primary_key=True, serialize=False)),
+ ('callback_uri', models.URLField(db_column='CALLBACK_URI', max_length=255)),
+ ('auth_info', models.TextField(db_column='AUTH_INFO')),
+ ('usage_states', models.TextField(db_column='USAGE_STATES')),
+ ('notification_types', models.TextField(db_column='NOTIFICATION_TYPES')),
+ ('vnfd_id', models.TextField(db_column='VNFD_ID')),
+ ('vnf_pkg_id', models.TextField(db_column='VNF_PKG_ID')),
+ ('operation_states', models.TextField(db_column='OPERATION_STATES')),
+ ('vnf_products_from_provider', models.TextField(db_column='VNF_PRODUCTS_FROM_PROVIDER')),
+ ('links', models.TextField(db_column='LINKS')),
+ ],
+ options={
+ 'db_table': 'VNF_PKG_SUBSCRIPTION',
+ },
+ ),
+ ]
diff --git a/catalog/pub/database/migrations/__init__.py b/catalog/pub/database/migrations/__init__.py
new file mode 100644
index 0000000..0c847b7
--- /dev/null
+++ b/catalog/pub/database/migrations/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/pub/database/models.py b/catalog/pub/database/models.py
new file mode 100644
index 0000000..9f0b498
--- /dev/null
+++ b/catalog/pub/database/models.py
@@ -0,0 +1,234 @@
+# Copyright 2016-2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.db import models
+
+
+class NSPackageModel(models.Model):
+ nsPackageId = models.CharField(db_column='NSPACKAGEID', primary_key=True, max_length=50)
+ nsPackageUri = models.CharField(db_column='NSPACKAGEURI', max_length=300, null=True, blank=True)
+ checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum
+ sdcCsarId = models.CharField(db_column='SDCCSARID', max_length=50, null=True, blank=True) # SdcCSARUri
+ onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
+ operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True) # operationalState
+ usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState
+ deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending
+ nsdId = models.CharField(db_column='NSDID', max_length=50, blank=True, null=True)
+ invariantId = models.CharField(db_column='INVARIANTID', max_length=50, blank=True, null=True) # nsdInvariantId
+ nsdName = models.CharField(db_column='NSDNAME', max_length=50, blank=True, null=True)
+ nsdDesginer = models.CharField(db_column='NSDDESIGNER', max_length=50, null=True, blank=True)
+ nsdDescription = models.CharField(db_column='NSDDESCRIPTION', max_length=100, null=True, blank=True)
+ nsdVersion = models.CharField(db_column='NSDVERSION', max_length=20, null=True, blank=True)
+ userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData
+ localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
+ nsdModel = models.TextField(db_column='NSDMODEL', max_length=65535, null=True, blank=True)
+
+ class Meta:
+ db_table = 'CATALOG_NSPACKAGE'
+
+
+class ServicePackageModel(models.Model):
+ servicePackageId = models.CharField(db_column='SERVICEPACKAGEID', primary_key=True, max_length=50)
+ servicePackageUri = models.CharField(db_column='SERVICEPACKAGEURI', max_length=300, null=True, blank=True)
+ checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum
+ sdcCsarId = models.CharField(db_column='SDCCSARID', max_length=50, null=True, blank=True) # SdcCSARUri
+ onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
+ operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True) # operationalState
+ usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState
+ deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending
+ servicedId = models.CharField(db_column='SERVICEDID', max_length=50, blank=True, null=True)
+ invariantId = models.CharField(db_column='INVARIANTID', max_length=50, blank=True, null=True) # servicedInvariantId
+ servicedName = models.CharField(db_column='SERVICEDNAME', max_length=50, blank=True, null=True)
+ servicedDesigner = models.CharField(db_column='SERVICEDDESIGNER', max_length=50, null=True, blank=True)
+ servicedDescription = models.CharField(db_column='SERVICEDDESCRIPTION', max_length=100, null=True, blank=True)
+ servicedVersion = models.CharField(db_column='SERVICEDVERSION', max_length=20, null=True, blank=True)
+ userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData
+ localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
+ servicedModel = models.TextField(db_column='SERVICEDMODEL', max_length=65535, null=True, blank=True)
+
+ class Meta:
+ db_table = 'CATALOG_SERVICEPACKAGE'
+
+
+class VnfPackageModel(models.Model):
+ # uuid = models.CharField(db_column='UUID', primary_key=True, max_length=255)
+ vnfPackageId = models.CharField(db_column='VNFPACKAGEID', primary_key=True, max_length=50) # onboardedVnfPkgInfoId
+ vnfPackageUri = models.CharField(db_column='VNFPACKAGEURI', max_length=300, null=True, blank=True) # downloadUri
+ SdcCSARUri = models.CharField(db_column='SDCCSARURI', max_length=300, null=True, blank=True) # SdcCSARUri
+ checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum
+ onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
+ operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True) # operationalState
+ usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState
+ deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending
+ vnfdId = models.CharField(db_column='VNFDID', max_length=50, blank=True, null=True) # vnfdId
+ vnfVendor = models.CharField(db_column='VENDOR', max_length=50, blank=True, null=True) # vnfProvider
+ vnfdProductName = models.CharField(db_column='VNFDPRODUCTNAME', max_length=50, blank=True, null=True) # vnfProductName
+ vnfdVersion = models.CharField(db_column='VNFDVERSION', max_length=20, blank=True, null=True) # vnfdVersion
+ vnfSoftwareVersion = models.CharField(db_column='VNFSOFTWAREVERSION', max_length=20, blank=True, null=True) # vnfSoftwareVersion
+ userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData
+ localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
+ vnfdModel = models.TextField(db_column='VNFDMODEL', max_length=65535, blank=True, null=True) # vnfd
+
+ class Meta:
+ db_table = 'CATALOG_VNFPACKAGE'
+
+
+class PnfPackageModel(models.Model):
+ # uuid = models.CharField(db_column='UUID', primary_key=True, max_length=255)
+ pnfPackageId = models.CharField(db_column='PNFPACKAGEID', primary_key=True, max_length=50) # onboardedPnfPkgInfoId
+ pnfPackageUri = models.CharField(db_column='PNFPACKAGEURI', max_length=300, null=True, blank=True) # downloadUri
+ sdcCSARUri = models.CharField(db_column='SDCCSARURI', max_length=300, null=True, blank=True) # sdcCSARUri
+ checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum
+ onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
+ usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState
+ deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending
+ pnfdId = models.CharField(db_column='PNFDID', max_length=50, blank=True, null=True) # pnfdId
+ pnfVendor = models.CharField(db_column='VENDOR', max_length=50, blank=True, null=True) # pnfProvider
+ pnfdProductName = models.CharField(db_column='PNFDPRODUCTNAME', max_length=50, blank=True, null=True) # pnfProductName
+ pnfdVersion = models.CharField(db_column='PNFDVERSION', max_length=20, blank=True, null=True) # pnfdVersion
+ pnfSoftwareVersion = models.CharField(db_column='PNFSOFTWAREVERSION', max_length=20, blank=True, null=True) # pnfSoftwareVersion
+ userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData
+ localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
+ pnfdModel = models.TextField(db_column='PNFDMODEL', max_length=65535, blank=True, null=True) # pnfd
+ pnfdName = models.TextField(db_column='PNFDNAME', max_length=65535, blank=True, null=True) # pnfd_name
+
+ class Meta:
+ db_table = 'CATALOG_PNFPACKAGE'
+
+
+class SoftwareImageModel(models.Model):
+ imageid = models.CharField(db_column='IMAGEID', primary_key=True, max_length=50)
+ containerFormat = models.CharField(db_column='CONTAINERFORMAT', max_length=20)
+ diskFormat = models.CharField(db_column='DISKFORMAT', max_length=20)
+ mindisk = models.CharField(db_column='MINDISK', max_length=20)
+ minram = models.CharField(db_column='MINRAM', max_length=20)
+ usermetadata = models.CharField(db_column='USAERMETADATA', max_length=1024)
+ vnfPackageId = models.CharField(db_column='VNFPACKAGEID', max_length=50)
+ filePath = models.CharField(db_column='FILEPATH', max_length=300)
+ status = models.CharField(db_column='STATUS', max_length=10)
+ vimid = models.CharField(db_column='VIMID', max_length=50)
+ # filetype = models.CharField(db_column='FILETYPE', max_length=2)
+ # vimuser = models.CharField(db_column='VIMUSER', max_length=50)
+ # tenant = models.CharField(db_column='TENANT', max_length=50)
+ # purpose = models.CharField(db_column='PURPOSE', max_length=1000)
+
+ class Meta:
+ db_table = 'CATALOG_SOFTWAREIMAGEMODEL'
+
+
+class JobModel(models.Model):
+ jobid = models.CharField(db_column='JOBID', primary_key=True, max_length=255)
+ jobtype = models.CharField(db_column='JOBTYPE', max_length=255)
+ jobaction = models.CharField(db_column='JOBACTION', max_length=255)
+ resid = models.CharField(db_column='RESID', max_length=255)
+ status = models.IntegerField(db_column='STATUS', null=True, blank=True)
+ starttime = models.CharField(db_column='STARTTIME', max_length=255, null=True, blank=True)
+ endtime = models.CharField(db_column='ENDTIME', max_length=255, null=True, blank=True)
+ progress = models.IntegerField(db_column='PROGRESS', null=True, blank=True)
+ user = models.CharField(db_column='USER', max_length=255, null=True, blank=True)
+ parentjobid = models.CharField(db_column='PARENTJOBID', max_length=255, null=True, blank=True)
+ resname = models.CharField(db_column='RESNAME', max_length=255, null=True, blank=True)
+
+ class Meta:
+ db_table = 'CATALOG_JOB'
+
+ def toJSON(self):
+ import json
+ return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]]))
+
+
+class JobStatusModel(models.Model):
+ indexid = models.IntegerField(db_column='INDEXID')
+ jobid = models.CharField(db_column='JOBID', max_length=255)
+ status = models.CharField(db_column='STATUS', max_length=255)
+ progress = models.IntegerField(db_column='PROGRESS', null=True, blank=True)
+ descp = models.TextField(db_column='DESCP', max_length=65535)
+ errcode = models.CharField(db_column='ERRCODE', max_length=255, null=True, blank=True)
+ addtime = models.CharField(db_column='ADDTIME', max_length=255, null=True, blank=True)
+
+ class Meta:
+ db_table = 'CATALOG_JOB_STATUS'
+
+ def toJSON(self):
+ import json
+ return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]]))
+
+
+class NsdmSubscriptionModel(models.Model):
+ subscriptionid = models.CharField(db_column='SUBSCRIPTIONID', max_length=255, primary_key=True)
+ notificationTypes = models.TextField(db_column='NOTIFICATIONTYPES', null=True)
+ auth_info = models.TextField(db_column='AUTHINFO', null=True)
+ callback_uri = models.CharField(db_column='CALLBACKURI', max_length=255)
+ nsdInfoId = models.TextField(db_column='NSDINFOID', null=True)
+ nsdId = models.TextField(db_column='NSDID', null=True)
+ nsdName = models.TextField(db_column='NSDNAME', null=True)
+ nsdVersion = models.TextField(db_column='NSDVERSION', null=True)
+ nsdDesigner = models.TextField(db_column='NSDDESIGNER', null=True)
+ nsdInvariantId = models.TextField(db_column='NSDINVARIANTID', null=True)
+ vnfPkgIds = models.TextField(db_column='VNFPKGIDS', null=True)
+ pnfdInfoIds = models.TextField(db_column='PNFDINFOIDS', null=True)
+ nestedNsdInfoIds = models.TextField(db_column='NESTEDNSDINFOIDS', null=True)
+ nsdOnboardingState = models.TextField(db_column='NSDONBOARDINGSTATE', null=True)
+ nsdOperationalState = models.TextField(db_column='NSDOPERATIONALSTATE', null=True)
+ nsdUsageState = models.TextField(db_column='NSDUSAGESTATE', null=True)
+ pnfdId = models.TextField(db_column='PNFDID', null=True)
+ pnfdName = models.TextField(db_column='PNFDNAME', null=True)
+ pnfdVersion = models.TextField(db_column='PNFDVERSION', null=True)
+ pnfdProvider = models.TextField(db_column='PNFDPROVIDER', null=True)
+ pnfdInvariantId = models.TextField(db_column='PNFDINVARIANTID', null=True)
+ pnfdOnboardingState = models.TextField(db_column='PNFDONBOARDINGSTATE', null=True)
+ pnfdUsageState = models.TextField(db_column='PNFDUSAGESTATE', null=True)
+ links = models.TextField(db_column='LINKS')
+
+ class Meta:
+ db_table = 'CATALOG_NSDM_SUBSCRIPTION'
+
+ def toJSON(self):
+ import json
+ return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]]))
+
+
+class VnfPkgSubscriptionModel(models.Model):
+ subscription_id = models.CharField(max_length=255, primary_key=True, db_column='SUBSCRIPTION_ID')
+ callback_uri = models.URLField(db_column="CALLBACK_URI", max_length=255)
+ auth_info = models.TextField(db_column="AUTH_INFO")
+ usage_states = models.TextField(db_column="USAGE_STATES")
+ notification_types = models.TextField(db_column="NOTIFICATION_TYPES")
+ vnfd_id = models.TextField(db_column="VNFD_ID")
+ vnf_pkg_id = models.TextField(db_column="VNF_PKG_ID")
+ operation_states = models.TextField(db_column="OPERATION_STATES")
+ vnf_products_from_provider = \
+ models.TextField(db_column="VNF_PRODUCTS_FROM_PROVIDER")
+ links = models.TextField(db_column="LINKS")
+
+ class Meta:
+ db_table = 'VNF_PKG_SUBSCRIPTION'
+
+ def toDict(self):
+ import json
+ subscription_obj = {
+ "id": self.subscription_id,
+ "callbackUri": self.callback_uri,
+ "_links": json.loads(self.links)
+ }
+ filter_obj = {
+ "notificationTypes": json.loads(self.notification_types),
+ "vnfdId": json.loads(self.vnfd_id),
+ "vnfPkgId": json.loads(self.vnf_pkg_id),
+ "operationalState": json.loads(self.operation_states),
+ "usageState": json.loads(self.usage_states),
+ "vnfProductsFromProviders": json.loads(self.vnf_products_from_provider)
+ }
+ subscription_obj["filter"] = filter_obj
+ return subscription_obj
diff --git a/catalog/pub/exceptions.py b/catalog/pub/exceptions.py
new file mode 100644
index 0000000..e0dac0e
--- /dev/null
+++ b/catalog/pub/exceptions.py
@@ -0,0 +1,57 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class CatalogException(Exception):
+ pass
+
+
+class BadRequestException(CatalogException):
+ pass
+
+
+class ResourceNotFoundException(CatalogException):
+ pass
+
+
+class PackageNotFoundException(CatalogException):
+ pass
+
+
+class PackageHasExistsException(CatalogException):
+ pass
+
+
+class VnfPkgSubscriptionException(CatalogException):
+ pass
+
+
+class VnfPkgDuplicateSubscriptionException(CatalogException):
+ pass
+
+
+class SubscriptionDoesNotExistsException(CatalogException):
+ pass
+
+
+class NsdmBadRequestException(CatalogException):
+ pass
+
+
+class NsdmDuplicateSubscriptionException(CatalogException):
+ pass
+
+
+class ArtifactNotFoundException(CatalogException):
+ pass
diff --git a/catalog/pub/msapi/__init__.py b/catalog/pub/msapi/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/pub/msapi/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/pub/msapi/extsys.py b/catalog/pub/msapi/extsys.py
new file mode 100644
index 0000000..e038f26
--- /dev/null
+++ b/catalog/pub/msapi/extsys.py
@@ -0,0 +1,175 @@
+# Copyright 2016 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import uuid
+
+from catalog.pub.config.config import AAI_BASE_URL, AAI_USER, AAI_PASSWD
+from catalog.pub.exceptions import CatalogException
+from catalog.pub.utils import restcall
+from catalog.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+def call_aai(resource, method, content=''):
+ additional_headers = {
+ 'X-FromAppId': 'VFC-CATALOG',
+ 'X-TransactionId': str(uuid.uuid1())
+ }
+ return restcall.call_req(AAI_BASE_URL,
+ AAI_USER,
+ AAI_PASSWD,
+ restcall.rest_no_auth,
+ resource,
+ method,
+ content,
+ additional_headers)
+
+
+def get_vims():
+ ret = call_aai("/cloud-infrastructure/cloud-regions?depth=all", "GET")
+ if ret[0] != 0:
+ logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+ raise CatalogException("Failed to query vims from extsys.")
+ # convert vim_info_aai to internal vim_info
+ vims_aai = json.JSONDecoder().decode(ret[1])
+ vims_aai = ignore_case_get(vims_aai, "cloud-region")
+ vims_info = []
+ for vim in vims_aai:
+ vim = convert_vim_info(vim)
+ vims_info.append(vim)
+ return vims_info
+
+
+def get_vim_by_id(vim_id):
+ cloud_owner, cloud_region = split_vim_to_owner_region(vim_id)
+ ret = call_aai("/cloud-infrastructure/cloud-regions/cloud-region/%s/%s?depth=all"
+ % (cloud_owner, cloud_region), "GET")
+ if ret[0] != 0:
+ logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+ raise CatalogException("Failed to query vim(%s) from extsys." % vim_id)
+ # convert vim_info_aai to internal vim_info
+ vim_info_aai = json.JSONDecoder().decode(ret[1])
+ vim_info = convert_vim_info(vim_info_aai)
+ return vim_info
+
+
+def split_vim_to_owner_region(vim_id):
+ split_vim = vim_id.split('_')
+ cloud_owner = split_vim[0]
+ cloud_region = "".join(split_vim[1:])
+ return cloud_owner, cloud_region
+
+
+def convert_vim_info(vim_info_aai):
+ vim_id = vim_info_aai["cloud-owner"] + "_" + vim_info_aai["cloud-region-id"]
+ esr_system_info = ignore_case_get(ignore_case_get(vim_info_aai, "esr-system-info-list"), "esr-system-info")
+ vim_info = {
+ "vimId": vim_id,
+ "name": vim_id,
+ "url": ignore_case_get(esr_system_info[0], "service-url"),
+ "userName": ignore_case_get(esr_system_info[0], "user-name"),
+ "password": ignore_case_get(esr_system_info[0], "password"),
+ "tenant": ignore_case_get(esr_system_info[0], "default-tenant"),
+ "vendor": ignore_case_get(esr_system_info[0], "vendor"),
+ "version": ignore_case_get(esr_system_info[0], "version"),
+ "description": "vim",
+ "domain": "",
+ "type": ignore_case_get(esr_system_info[0], "type"),
+ "createTime": "2016-07-18 12:22:53"
+ }
+ return vim_info
+
+
+def get_sdn_controller_by_id(sdn_ontroller_id):
+ ret = call_aai("/external-system/esr-thirdparty-sdnc-list/esr-thirdparty-sdnc/%s?depth=all"
+ % sdn_ontroller_id, "GET")
+ if ret[0] != 0:
+ logger.error("Failed to query sdn ontroller(%s) from extsys. detail is %s.", sdn_ontroller_id, ret[1])
+ raise CatalogException("Failed to query sdn ontroller(%s) from extsys." % sdn_ontroller_id)
+ # convert vim_info_aai to internal vim_info
+ sdnc_info_aai = json.JSONDecoder().decode(ret[1])
+ sdnc_info = convert_sdnc_info(sdnc_info_aai)
+ return sdnc_info
+
+
+def convert_sdnc_info(sdnc_info_aai):
+ esr_system_info = ignore_case_get(ignore_case_get(sdnc_info_aai, "esr-system-info-list"), "esr-system-info")
+ sdnc_info = {
+ "sdnControllerId": sdnc_info_aai["thirdparty-sdnc-id"],
+ "name": sdnc_info_aai["thirdparty-sdnc-id"],
+ "url": ignore_case_get(esr_system_info[0], "service-url"),
+ "userName": ignore_case_get(esr_system_info[0], "user-name"),
+ "password": ignore_case_get(esr_system_info[0], "password"),
+ "vendor": ignore_case_get(esr_system_info[0], "vendor"),
+ "version": ignore_case_get(esr_system_info[0], "version"),
+ "description": "",
+ "protocol": ignore_case_get(esr_system_info[0], "protocal"),
+ "productName": ignore_case_get(sdnc_info_aai, "product-name"),
+ "type": ignore_case_get(esr_system_info[0], "type"),
+ "createTime": "2016-07-18 12:22:53"
+ }
+ return sdnc_info
+
+
+def get_vnfm_by_id(vnfm_inst_id):
+ uri = "/external-system/esr-vnfm-list/esr-vnfm/%s?depth=all" % vnfm_inst_id
+ ret = call_aai(uri, "GET")
+ if ret[0] > 0:
+ logger.error('Send get VNFM information request to extsys failed.')
+ raise CatalogException('Send get VNFM information request to extsys failed.')
+ # convert vnfm_info_aai to internal vnfm_info
+ vnfm_info_aai = json.JSONDecoder().decode(ret[1])
+ vnfm_info = convert_vnfm_info(vnfm_info_aai)
+ return vnfm_info
+
+
+def convert_vnfm_info(vnfm_info_aai):
+ esr_system_info = ignore_case_get(ignore_case_get(vnfm_info_aai, "esr-system-info-list"), "esr-system-info")
+ vnfm_info = {
+ "vnfmId": vnfm_info_aai["vnfm-id"],
+ "name": vnfm_info_aai["vnfm-id"],
+ "type": ignore_case_get(esr_system_info[0], "type"),
+ "vimId": vnfm_info_aai["vim-id"],
+ "vendor": ignore_case_get(esr_system_info[0], "vendor"),
+ "version": ignore_case_get(esr_system_info[0], "version"),
+ "description": "vnfm",
+ "certificateUrl": vnfm_info_aai["certificate-url"],
+ "url": ignore_case_get(esr_system_info[0], "service-url"),
+ "userName": ignore_case_get(esr_system_info[0], "user-name"),
+ "password": ignore_case_get(esr_system_info[0], "password"),
+ "createTime": "2016-07-06 15:33:18"
+ }
+ return vnfm_info
+
+
+def select_vnfm(vnfm_type, vim_id):
+ uri = "/external-system/esr-vnfm-list?depth=all"
+ ret = call_aai(uri, "GET")
+ if ret[0] > 0:
+ logger.error("Failed to call %s: %s", uri, ret[1])
+ raise CatalogException('Failed to get vnfms from extsys.')
+ vnfms = json.JSONDecoder().decode(ret[1])
+ vnfms = ignore_case_get(vnfms, "esr-vnfm")
+ for vnfm in vnfms:
+ esr_system_info = ignore_case_get(vnfm, "esr-system-info")
+ type = ignore_case_get(esr_system_info, "type")
+ vimId = vnfm["vnfm-id"]
+ if type == vnfm_type and vimId == vim_id:
+ # convert vnfm_info_aai to internal vnfm_info
+ vnfm = convert_vnfm_info(vnfm)
+ return vnfm
+ raise CatalogException('No vnfm found with %s in vim(%s)' % (vnfm_type, vim_id))
diff --git a/catalog/pub/msapi/sdc.py b/catalog/pub/msapi/sdc.py
new file mode 100644
index 0000000..bb473cc
--- /dev/null
+++ b/catalog/pub/msapi/sdc.py
@@ -0,0 +1,129 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+
+from catalog.pub.config.config import SDC_BASE_URL, SDC_USER, SDC_PASSWD
+from catalog.pub.exceptions import CatalogException
+from catalog.pub.utils import fileutil
+from catalog.pub.utils import restcall
+
+logger = logging.getLogger(__name__)
+
+ASSETTYPE_RESOURCES = "resources"
+ASSETTYPE_SERVICES = "services"
+DISTRIBUTED = "DISTRIBUTED"
+
+
+def call_sdc(resource, method, content=''):
+ additional_headers = {
+ 'X-ECOMP-InstanceID': 'VFC',
+ }
+ return restcall.call_req(base_url=SDC_BASE_URL,
+ user=SDC_USER,
+ passwd=SDC_PASSWD,
+ auth_type=restcall.rest_no_auth,
+ resource=resource,
+ method=method,
+ content=content,
+ additional_headers=additional_headers)
+
+
+"""
+sample of return value
+[
+ {
+ "uuid": "c94490a0-f7ef-48be-b3f8-8d8662a37236",
+ "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb",
+ "name": "underlayvpn",
+ "version": "2.0",
+ "toscaModelURL": "/sdc/v1/catalog/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel",
+ "category": "Volte",
+ "subCategory": "VolteVF",
+ "resourceType": "VF",
+ "lifecycleState": "CERTIFIED",
+ "lastUpdaterUserId": "jh0003"
+ }
+]
+"""
+
+
+def get_artifacts(asset_type):
+ resource = "/sdc/v1/catalog/{assetType}"
+ resource = resource.format(assetType=asset_type)
+ ret = call_sdc(resource, "GET")
+ if ret[0] != 0:
+ logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+ raise CatalogException("Failed to query artifacts(%s) from sdc." % asset_type)
+ return json.JSONDecoder().decode(ret[1])
+
+
+def get_artifact(asset_type, csar_id):
+ artifacts = get_artifacts(asset_type)
+ for artifact in artifacts:
+ if artifact["uuid"] == csar_id:
+ if asset_type == ASSETTYPE_SERVICES and \
+ artifact.get("distributionStatus", None) != DISTRIBUTED:
+ raise CatalogException("The artifact (%s,%s) is not distributed from sdc." % (asset_type, csar_id))
+ else:
+ return artifact
+ raise CatalogException("Failed to query artifact(%s,%s) from sdc." % (asset_type, csar_id))
+
+
+def get_asset(asset_type, uuid):
+ resource = "/sdc/v1/catalog/{assetType}/{uuid}/metadata".format(assetType=asset_type, uuid=uuid)
+ ret = call_sdc(resource, "GET")
+ if ret[0] != 0:
+ logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+ raise CatalogException("Failed to get asset(%s, %s) from sdc." % (asset_type, uuid))
+ asset = json.JSONDecoder().decode(ret[1])
+ if asset.get("distributionStatus", None) != DISTRIBUTED:
+ raise CatalogException("The asset (%s,%s) is not distributed from sdc." % (asset_type, uuid))
+ else:
+ return asset
+
+
+def delete_artifact(asset_type, asset_id, artifact_id):
+ resource = "/sdc/v1/catalog/{assetType}/{uuid}/artifacts/{artifactUUID}"
+ resource = resource.format(assetType=asset_type, uuid=asset_id, artifactUUID=artifact_id)
+ ret = call_sdc(resource, "DELETE")
+ if ret[0] != 0:
+ logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+ raise CatalogException("Failed to delete artifacts(%s) from sdc." % artifact_id)
+ return json.JSONDecoder().decode(ret[1])
+
+
+def download_artifacts(download_url, local_path, file_name):
+ additional_headers = {
+ 'X-ECOMP-InstanceID': 'VFC',
+ 'accept': 'application/octet-stream'
+ }
+ ret = restcall.call_req(base_url=SDC_BASE_URL,
+ user=SDC_USER,
+ passwd=SDC_PASSWD,
+ auth_type=restcall.rest_no_auth,
+ resource=download_url,
+ method="GET",
+ additional_headers=additional_headers)
+ if ret[0] != 0:
+ logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+ raise CatalogException("Failed to download %s from sdc." % download_url)
+ fileutil.make_dirs(local_path)
+ local_file_name = os.path.join(local_path, file_name)
+ local_file = open(local_file_name, 'wb')
+ local_file.write(ret[1])
+ local_file.close()
+ return local_file_name
diff --git a/catalog/pub/redisco/__init__.py b/catalog/pub/redisco/__init__.py
new file mode 100644
index 0000000..217a232
--- /dev/null
+++ b/catalog/pub/redisco/__init__.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2010 Tim Medina
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# The original code link is https://github.com/iamteem/redisco/tree/master/redisco/__init__.py
+
+
+import redis
+
+
+class Client(object):
+ def __init__(self, **kwargs):
+ self.connection_settings = kwargs or {'host': 'localhost', 'port': 6379, 'db': 0}
+
+ def redis(self):
+ return redis.Redis(**self.connection_settings)
+
+ def update(self, d):
+ self.connection_settings.update(d)
+
+
+def connection_setup(**kwargs):
+ global connection, client
+ if client:
+ client.update(kwargs)
+ else:
+ client = Client(**kwargs)
+ connection = client.redis()
+
+
+def get_client():
+ global connection
+ return connection
+
+
+client = Client()
+connection = client.redis()
+
+__all__ = ['connection_setup', 'get_client']
diff --git a/catalog/pub/redisco/containers.py b/catalog/pub/redisco/containers.py
new file mode 100644
index 0000000..d30c227
--- /dev/null
+++ b/catalog/pub/redisco/containers.py
@@ -0,0 +1,116 @@
+# Copyright (c) 2010 Tim Medina
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# The original code link is https://github.com/iamteem/redisco/tree/master/redisco/containers.py
+
+"""
+This module contains the container classes to create objects
+that persist directly in a Redis server.
+"""
+
+import collections
+from functools import partial
+
+
+class Container(object):
+ """Create a container object saved in Redis.
+
+ Arguments:
+ key -- the Redis key this container is stored at
+ db -- the Redis client object. Default: None
+
+ When ``db`` is not set, the gets the default connection from
+ ``redisco.connection`` module.
+ """
+
+ def __init__(self, key, db=None, pipeline=None):
+ self._db = db
+ self.key = key
+ self.pipeline = pipeline
+
+ def clear(self):
+ """Remove container from Redis database."""
+ del self.db[self.key]
+
+ def __getattribute__(self, att):
+ if att in object.__getattribute__(self, 'DELEGATEABLE_METHODS'):
+ return partial(getattr(object.__getattribute__(self, 'db'), att), self.key)
+ else:
+ return object.__getattribute__(self, att)
+
+ @property
+ def db(self):
+ if self.pipeline:
+ return self.pipeline
+ if self._db:
+ return self._db
+ if hasattr(self, 'db_cache') and self.db_cache:
+ return self.db_cache
+ else:
+ from . import connection
+ self.db_cache = connection
+ return self.db_cache
+
+ DELEGATEABLE_METHODS = ()
+
+
+class Hash(Container, collections.MutableMapping):
+
+ def __getitem__(self, att):
+ return self.hget(att)
+
+ def __setitem__(self, att, val):
+ self.hset(att, val)
+
+ def __delitem__(self, att):
+ self.hdel(att)
+
+ def __len__(self):
+ return self.hlen()
+
+ def __iter__(self):
+ return self.hgetall().__iter__()
+
+ def __contains__(self, att):
+ return self.hexists(att)
+
+ def __repr__(self):
+ return "<%s '%s' %s>" % (self.__class__.__name__, self.key, self.hgetall())
+
+ def keys(self):
+ return self.hkeys()
+
+ def values(self):
+ return self.hvals()
+
+ def _get_dict(self):
+ return self.hgetall()
+
+ def _set_dict(self, new_dict):
+ self.clear()
+ self.update(new_dict)
+
+ dict = property(_get_dict, _set_dict)
+
+ DELEGATEABLE_METHODS = ('hlen', 'hset', 'hdel', 'hkeys', 'hgetall', 'hvals',
+ 'hget', 'hexists', 'hincrby', 'hmget', 'hmset')
diff --git a/catalog/pub/ssl/cert/foobar.crt b/catalog/pub/ssl/cert/foobar.crt
new file mode 100644
index 0000000..7ab6dd3
--- /dev/null
+++ b/catalog/pub/ssl/cert/foobar.crt
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDRDCCAiwCCQD8zmUqCHOp2zANBgkqhkiG9w0BAQsFADBjMQswCQYDVQQGEwJD
+TjEQMA4GA1UECAwHQmVpSmluZzEQMA4GA1UEBwwHQmVpSmluZzENMAsGA1UECgwE
+Q21jYzESMBAGA1UECwwJQ21jYy1vbmFwMQ0wCwYDVQQDDARDbWNjMCAXDTE5MDMy
+NjAyNTI0N1oYDzIxMTkwMzAyMDI1MjQ3WjBjMQswCQYDVQQGEwJDTjEQMA4GA1UE
+CAwHQmVpSmluZzEQMA4GA1UEBwwHQmVpSmluZzENMAsGA1UECgwEQ21jYzESMBAG
+A1UECwwJQ21jYy1vbmFwMQ0wCwYDVQQDDARDbWNjMIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEA4DurchTgEw/A1y/Q5gpSSJTLC+KFOV4Vmbz2hlvOGLwV
+NIX1+r7DpaiJTGjEKLCtGsD2tGm69KiUX9FBY1CStnwK2R4wA5NKW+ZKQLd3sRTc
+Hl+2bLFk7E5KvmKZZM4xhsN3ey7Ia8H0sSfKiGlxB1hZI2HibRNy8GWyi95j8MkP
+v+H7HbJlX1kIKb7p2y8aG8AnAzBWikJFcQ1y3bJA2r31wOht63pIekwh+nntt5u+
+Yh/STXHiAe2gT7b9x6RAn09tC6TsBKzdZ4ZKrBLfRwPv6+cbDLcqkhbPukqaFaEs
+rDCLhuWX10sGLEsqXULDwZRoYxTUueLek9v+/8f5EwIDAQABMA0GCSqGSIb3DQEB
+CwUAA4IBAQCenowNpFiy9vH18+9PL4rZjZ1NH+frGqsWvDiyHPnLpneCLOuiXvgv
+kcuLJDYatc6vTlXkJElxwF1fCaJEn6dNq3WtQxdJjhXidAKx8Hsf1Nxkwbvmahv2
+TIWV/FMvop+9SdonDBGZojrYKRsY3EilQf+7/rGEM52HE8S3yE8CCe9xTZSYUs1B
+B8CzOPBVU7SWSRSLUKfdRhjyl4Rqsslxzal+8A36yViHBPhJgmDRoVWVR+E289IH
+FCQ0d8qVvdTGkM79dvZrEH9WSzPwlTR0NSkBMWTNLcWyP8caDjg+fbSVOF+s+sd/
+bLuAyHyeXUzClJx6CA5zwLZz5K5SVxw+
+-----END CERTIFICATE-----
diff --git a/catalog/pub/ssl/cert/foobar.csr b/catalog/pub/ssl/cert/foobar.csr
new file mode 100644
index 0000000..30b381b
--- /dev/null
+++ b/catalog/pub/ssl/cert/foobar.csr
@@ -0,0 +1,18 @@
+-----BEGIN CERTIFICATE REQUEST-----
+MIIC1DCCAbwCAQAwYzELMAkGA1UEBhMCQ04xEDAOBgNVBAgMB0JlaUppbmcxEDAO
+BgNVBAcMB0JlaUppbmcxDTALBgNVBAoMBENtY2MxEjAQBgNVBAsMCUNtY2Mtb25h
+cDENMAsGA1UEAwwEQ21jYzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AOA7q3IU4BMPwNcv0OYKUkiUywvihTleFZm89oZbzhi8FTSF9fq+w6WoiUxoxCiw
+rRrA9rRpuvSolF/RQWNQkrZ8CtkeMAOTSlvmSkC3d7EU3B5ftmyxZOxOSr5imWTO
+MYbDd3suyGvB9LEnyohpcQdYWSNh4m0TcvBlsoveY/DJD7/h+x2yZV9ZCCm+6dsv
+GhvAJwMwVopCRXENct2yQNq99cDobet6SHpMIfp57bebvmIf0k1x4gHtoE+2/cek
+QJ9PbQuk7ASs3WeGSqwS30cD7+vnGwy3KpIWz7pKmhWhLKwwi4bll9dLBixLKl1C
+w8GUaGMU1Lni3pPb/v/H+RMCAwEAAaAsMBMGCSqGSIb3DQEJAjEGDARDbWNjMBUG
+CSqGSIb3DQEJBzEIDAYxMjM0NTYwDQYJKoZIhvcNAQELBQADggEBAGr8XkV5G9bK
+lPc3jUvmS+KSg9UB1wrvf6kQUGDvCvXqZCGw1mRZekN4rH5c1fk9iLwLqDkWDnNo
+79jzAWV76U62GarTOng32TLTItxI/EeUhQFCf+AI/YcJEdHf8HGrDuvC0iSz6/9q
+Fe5HhVSO7zsHdP28J05wTyII+2k4ecAj3oXutUnGCBg0nlexDmxAZoe8x4XHpqkt
+tEKquZdq3l17+v5DKlKwczQcXUBC1yGw0ki67U5w9fVKzpAf7Frr7YnbGS35i5Pv
+ny4SlXPW167hRQKXCniY5QtCocP+GoPD+81uWwf+bjHyAZ3HCd532YFgXW01yJhM
+imRDxx2gDds=
+-----END CERTIFICATE REQUEST-----
diff --git a/catalog/pub/ssl/cert/foobar.key b/catalog/pub/ssl/cert/foobar.key
new file mode 100644
index 0000000..266f502
--- /dev/null
+++ b/catalog/pub/ssl/cert/foobar.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEA4DurchTgEw/A1y/Q5gpSSJTLC+KFOV4Vmbz2hlvOGLwVNIX1
++r7DpaiJTGjEKLCtGsD2tGm69KiUX9FBY1CStnwK2R4wA5NKW+ZKQLd3sRTcHl+2
+bLFk7E5KvmKZZM4xhsN3ey7Ia8H0sSfKiGlxB1hZI2HibRNy8GWyi95j8MkPv+H7
+HbJlX1kIKb7p2y8aG8AnAzBWikJFcQ1y3bJA2r31wOht63pIekwh+nntt5u+Yh/S
+TXHiAe2gT7b9x6RAn09tC6TsBKzdZ4ZKrBLfRwPv6+cbDLcqkhbPukqaFaEsrDCL
+huWX10sGLEsqXULDwZRoYxTUueLek9v+/8f5EwIDAQABAoIBAQCL+dPBqHRkkc3w
+xsGiCMlq06+Y4LQHpsrXKNW/8+lJGYgnPITcHV+mtvnWgAQL3paA//pBj0sM1Xui
+AM/PvomHbxGajbStVrHxgmXR1nXaTkpGj7siSO7WcN1J0eUtv2W9WaHpfL/SPMaS
+HGPbGe9cBXPHmaAuNRjoJqP5mj9LHB0SebJImjiaCYsUkBgC0Ooo4UuwOXLYO/ak
+gZrbM8WwY21rRVc3uDyg5Ez8gxbFG3L39t26gpqBYosqNlPe7/JVkTpxUKk1Allf
+fAJNyfpS2CuY+nQWtCleJFtF1Yq9jwfPvtNUTrXeJq97xFqSIRnJbygttsokbPto
+tLqB4rSBAoGBAPPgidT0KyfYVUaWNEXtOOJyh3MCk0ssalRKf+Dap9J9Bgpjldbu
+/tBBrrbxSEAieXe8gKDwgDY2qBcsUUvEY+EWL7tiMBnS4HvK8/4aEIx14xMgiuCS
+bTnMGlIlImjMKdj0iKOd0N2NPQcfr0NTUdZJ/p1o965lq/9i7xcfHinTAoGBAOth
+JqwyGQ6oP005Vry3S/7E7UJjYxMaUfhRmMGoVz+qXAEfq0r4TkNrcEvP7mu72pVe
+q1P4imQjvvPXqoPBdh310a6OCQ7BrFpkOghHBIG0koblncml4hdBSReUA1auW2Qr
+c/MUSeV96DDbI2mZJulVdqINyaAt/JDMnfdcbCvBAoGAYPTI91/ndFzeckSvHYnV
+TrnnvcKtWnqa/03rDzL++4D3ENRMsvmrVpJ2aob8iXrrPb40iUd0QZlzNFtLKss2
+Rjty2JWNuAaNdsnWPRSRtbX8hBMxA11TjWHmqPfYeT+J95YoaJwKeLp5I8bl/+c1
+JvOeBWjA55XGTq8/jLqzXD8CgYEAiQVyJNW5Hn4083iIlK1DkRkEYRxIRYuR4jNl
+8H5V5BsBGipcZfUsYjT+FzQBQDgII+ILbIOH1Im2lG6ctbx+TSyXlrzaavu1oJ0t
+5zmoVvVOQzcR5pwphI4dxZsFYoV3cFWXVw8dgXoNG7vF3qgoLbbxq57JG/UJTSXA
+Y4oq8kECgYEAlgh6v+o6jCUD7l0JWdRtZy52rhC3W/HrhcHE0/l3RjeV+kLIWr9u
+WbNltgZQGvPVQ+ZwPIYj1gaGP17wm5pAsJNSN4LQ1v4Fj/XjT7zdwYwYOrXIJati
+5HTeyHjm+wwOPYrmH4YLGwAh6T1is42E0K2L7LG8HnO4bHbfV2mKji0=
+-----END RSA PRIVATE KEY-----
diff --git a/catalog/pub/utils/__init__.py b/catalog/pub/utils/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/pub/utils/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/pub/utils/fileutil.py b/catalog/pub/utils/fileutil.py
new file mode 100644
index 0000000..6ddfc72
--- /dev/null
+++ b/catalog/pub/utils/fileutil.py
@@ -0,0 +1,78 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import shutil
+import logging
+import tempfile
+import traceback
+import urllib
+import zipfile
+
+
+logger = logging.getLogger(__name__)
+
+
+def make_dirs(path):
+ if not os.path.exists(path):
+ os.makedirs(path, 0o777)
+
+
+def delete_dirs(path):
+ try:
+ if os.path.exists(path):
+ shutil.rmtree(path)
+ except Exception as e:
+ logger.error(traceback.format_exc())
+ logger.error("Failed to delete %s:%s", path, e.args[0])
+
+
+def download_file_from_http(url, local_dir, file_name):
+ local_file_name = os.path.join(local_dir, file_name)
+ is_download_ok = False
+ try:
+ make_dirs(local_dir)
+ req = urllib.request.urlopen(url)
+ save_file = open(local_file_name, 'w')
+ save_file.write(req.read())
+ save_file.close()
+ req.close()
+ is_download_ok = True
+ except:
+ logger.error(traceback.format_exc())
+ logger.error("Failed to download %s to %s.", url, local_file_name)
+ return is_download_ok, local_file_name
+
+
+def unzip_file(zip_src, dst_dir, csar_path):
+ if os.path.exists(zip_src):
+ fz = zipfile.ZipFile(zip_src, 'r')
+ for file in fz.namelist():
+ fz.extract(file, dst_dir)
+ return os.path.join(dst_dir, csar_path)
+ else:
+ return ""
+
+
+def unzip_csar_to_tmp(zip_src):
+ dirpath = tempfile.mkdtemp()
+ zip_ref = zipfile.ZipFile(zip_src, 'r')
+ zip_ref.extractall(dirpath)
+ return dirpath
+
+
+def get_artifact_path(vnf_path, artifact_file):
+ for root, dirs, files in os.walk(vnf_path):
+ if artifact_file in files:
+ return os.path.join(root, artifact_file)
+ return None
diff --git a/catalog/pub/utils/idutil.py b/catalog/pub/utils/idutil.py
new file mode 100644
index 0000000..c2347c1
--- /dev/null
+++ b/catalog/pub/utils/idutil.py
@@ -0,0 +1,20 @@
+# Copyright 2016 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from catalog.pub.redisco import containers as cont
+
+
+def get_auto_id(id_type, id_group="auto_id_hash"):
+ auto_id_hash = cont.Hash(id_group)
+ auto_id_hash.hincrby(id_type, 1)
+ return auto_id_hash.hget(id_type)
diff --git a/catalog/pub/utils/jobutil.py b/catalog/pub/utils/jobutil.py
new file mode 100644
index 0000000..3d79c7a
--- /dev/null
+++ b/catalog/pub/utils/jobutil.py
@@ -0,0 +1,145 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import datetime
+import logging
+import uuid
+import traceback
+from functools import reduce
+
+from catalog.pub.database.models import JobStatusModel, JobModel
+from catalog.pub.utils import idutil
+
+logger = logging.getLogger(__name__)
+
+
+def enum(**enums):
+ return type('Enum', (), enums)
+
+
+JOB_STATUS = enum(PROCESSING=0, FINISH=1)
+JOB_MODEL_STATUS = enum(STARTED='started', PROCESSING='processing', FINISHED='finished', ERROR='error',
+ TIMEOUT='timeout')
+JOB_TYPE = enum(CREATE_VNF="create vnf", TERMINATE_VNF="terminate vnf", GRANT_VNF="grant vnf", MANUAL_SCALE_VNF="manual scale vnf",
+ HEAL_VNF="heal vnf")
+
+
+class JobUtil(object):
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def __gen_job_id(job_name):
+ return "%s-%s" % (job_name if job_name else "UnknownJob", uuid.uuid1())
+
+ @staticmethod
+ def query_job_status(job_id, index_id=-1):
+ # logger.info("Query job status, jobid =[%s], responseid [%d]" % (job_id, index_id))
+ jobs = []
+ if index_id < 0:
+ row = JobStatusModel.objects.filter(jobid=job_id).order_by("-indexid").first()
+ if row:
+ jobs.append(row)
+ else:
+ [jobs.append(job) for job in JobStatusModel.objects.filter(jobid=job_id).order_by("-indexid")
+ if job.indexid > index_id]
+
+ # logger.info("Query job status, rows=%s" % str(jobs))
+ return jobs
+
+ @staticmethod
+ def is_job_exists(job_id):
+ jobs = JobModel.objects.filter(jobid=job_id)
+ return len(jobs) > 0
+
+ @staticmethod
+ def create_job(inst_type, jobaction, inst_id, user='', job_id=None, res_name=''):
+ if job_id is None:
+ job_id = JobUtil.__gen_job_id(
+ '%s-%s-%s' % (str(inst_type).replace(' ', '_'), str(jobaction).replace(' ', '_'), str(inst_id)))
+ job = JobModel()
+ job.jobid = job_id
+ job.jobtype = inst_type
+ job.jobaction = jobaction
+ job.resid = str(inst_id)
+ job.status = JOB_STATUS.PROCESSING
+ job.user = user
+ job.starttime = datetime.datetime.now().strftime('%Y-%m-%d %X')
+ job.progress = 0
+ job.resname = res_name
+ logger.debug("create a new job, jobid=%s, jobtype=%s, jobaction=%s, resid=%s, status=%d" %
+ (job.jobid, job.jobtype, job.jobaction, job.resid, job.status))
+ job.save()
+ return job_id
+
+ @staticmethod
+ def clear_job(job_id):
+ [job.delete() for job in JobModel.objects.filter(jobid=job_id)]
+ logger.debug("Clear job, job_id=%s" % job_id)
+
+ @staticmethod
+ def add_job_status(job_id, progress, status_decs, error_code=""):
+ jobs = JobModel.objects.filter(jobid=job_id)
+ if not jobs:
+ logger.error("Job[%s] is not exists, please create job first." % job_id)
+ raise Exception("Job[%s] is not exists." % job_id)
+ try:
+ int_progress = int(progress)
+ job_status = JobStatusModel()
+ job_status.indexid = int(idutil.get_auto_id(job_id))
+ job_status.jobid = job_id
+ job_status.status = "processing"
+ job_status.progress = int_progress
+
+ if job_status.progress == 0:
+ job_status.status = "started"
+ elif job_status.progress == 100:
+ job_status.status = "finished"
+ elif job_status.progress == 101:
+ job_status.status = "partly_finished"
+ elif job_status.progress > 101:
+ job_status.status = "error"
+
+ if error_code == "255":
+ job_status.status = "error"
+
+ job_status.descp = status_decs
+ # job_status.errcode = error_code
+ job_status.errcode = error_code if error_code else "0"
+ job_status.addtime = datetime.datetime.now().strftime('%Y-%m-%d %X')
+ job_status.save()
+ logger.debug("Add a new job status, jobid=%s, indexid=%d,"
+ " status=%s, description=%s, progress=%d, errcode=%s, addtime=%r" %
+ (job_status.jobid, job_status.indexid, job_status.status, job_status.descp,
+ job_status.progress, job_status.errcode, job_status.addtime))
+
+ job = jobs[0]
+ job.progress = int_progress
+ if job_status.progress >= 100:
+ job.status = JOB_STATUS.FINISH
+ job.endtime = datetime.datetime.now().strftime('%Y-%m-%d %X')
+ job.save()
+ logger.debug("update job, jobid=%s, progress=%d" % (job_status.jobid, int_progress))
+ except:
+ logger.error(traceback.format_exc())
+
+ @staticmethod
+ def clear_job_status(job_id):
+ [job.delete() for job in JobStatusModel.objects.filter(jobid=job_id)]
+ logger.debug("Clear job status, job_id=%s" % job_id)
+
+ @staticmethod
+ def get_unfinished_jobs(url_prefix, inst_id, inst_type):
+ jobs = JobModel.objects.filter(resid=inst_id, jobtype=inst_type, status=JOB_STATUS.PROCESSING)
+ progresses = reduce(lambda content, job: content + [url_prefix + "/" + job.jobid], jobs, [])
+ return progresses
diff --git a/catalog/pub/utils/restcall.py b/catalog/pub/utils/restcall.py
new file mode 100644
index 0000000..8f03259
--- /dev/null
+++ b/catalog/pub/utils/restcall.py
@@ -0,0 +1,114 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import traceback
+import logging
+import urllib
+import uuid
+import httplib2
+import base64
+
+from catalog.pub.config.config import MSB_SERVICE_IP, MSB_SERVICE_PORT
+
+rest_no_auth, rest_oneway_auth, rest_bothway_auth = 0, 1, 2
+HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED = '200', '201', '204', '202'
+status_ok_list = [HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED]
+HTTP_404_NOTFOUND, HTTP_403_FORBIDDEN, HTTP_401_UNAUTHORIZED, HTTP_400_BADREQUEST = '404', '403', '401', '400'
+
+logger = logging.getLogger(__name__)
+
+
+def call_req(base_url, user, passwd, auth_type, resource, method, content='', additional_headers={}):
+ callid = str(uuid.uuid1())
+ logger.debug("[%s]call_req('%s','%s','%s',%s,'%s','%s','%s')" % (
+ callid, base_url, user, passwd, auth_type, resource, method, content))
+ ret = None
+ resp_status = ''
+ try:
+ full_url = combine_url(base_url, resource)
+ headers = {'content-type': 'application/json', 'accept': 'application/json'}
+ if user:
+ headers['Authorization'] = 'Basic %s' % base64.b64encode(bytes('%s:%s' % (user, passwd), "utf-8")).decode()
+ ca_certs = None
+ if additional_headers:
+ headers.update(additional_headers)
+ for retry_times in range(3):
+ http = httplib2.Http(ca_certs=ca_certs, disable_ssl_certificate_validation=(auth_type == rest_no_auth))
+ http.follow_all_redirects = True
+ try:
+ resp, resp_content = http.request(full_url, method=method.upper(), body=content, headers=headers)
+ resp_status, resp_body = resp['status'], resp_content
+ logger.debug("[%s][%d]status=%s)" % (callid, retry_times, resp_status))
+ if headers['accept'] == 'application/json':
+ resp_body = resp_content.decode('UTF-8')
+ logger.debug("resp_body=%s", resp_body)
+ if resp_status in status_ok_list:
+ ret = [0, resp_body, resp_status]
+ else:
+ ret = [1, resp_body, resp_status]
+ break
+ except Exception as ex:
+ if 'httplib.ResponseNotReady' in str(sys.exc_info()):
+ logger.debug("retry_times=%d", retry_times)
+ logger.error(traceback.format_exc())
+ ret = [1, "Unable to connect to %s" % full_url, resp_status]
+ continue
+ raise ex
+ except urllib.error.URLError as err:
+ ret = [2, str(err), resp_status]
+ except Exception as ex:
+ logger.error(traceback.format_exc())
+ logger.error("[%s]ret=%s" % (callid, str(sys.exc_info())))
+ res_info = str(sys.exc_info())
+ if 'httplib.ResponseNotReady' in res_info:
+ res_info = "The URL[%s] request failed or is not responding." % full_url
+ ret = [3, res_info, resp_status]
+ except:
+ logger.error(traceback.format_exc())
+ ret = [4, str(sys.exc_info()), resp_status]
+
+ logger.debug("[%s]ret=%s" % (callid, str(ret)))
+ return ret
+
+
+def req_by_msb(resource, method, content=''):
+ base_url = "http://%s:%s/" % (MSB_SERVICE_IP, MSB_SERVICE_PORT)
+ return call_req(base_url, "", "", rest_no_auth, resource, method, content)
+
+
+def upload_by_msb(resource, method, file_data={}):
+ headers = {'Content-Type': 'application/octet-stream'}
+ full_url = "http://%s:%s/%s" % (MSB_SERVICE_IP, MSB_SERVICE_PORT, resource)
+ http = httplib2.Http()
+ resp, resp_content = http.request(full_url, method=method.upper(), body=file_data, headers=headers)
+ resp_status, resp_body = resp['status'], resp_content.decode('UTF-8')
+ if resp_status not in status_ok_list:
+ logger.error("Status code is %s, detail is %s.", resp_status, resp_body)
+ return [1, "Failed to upload file.", resp_status]
+ logger.debug("resp_body=%s", resp_body)
+ return [0, resp_body, resp_status]
+
+
+def combine_url(base_url, resource):
+ full_url = None
+ if base_url.endswith('/') and resource.startswith('/'):
+ full_url = base_url[:-1] + resource
+ elif base_url.endswith('/') and not resource.startswith('/'):
+ full_url = base_url + resource
+ elif not base_url.endswith('/') and resource.startswith('/'):
+ full_url = base_url + resource
+ else:
+ full_url = base_url + '/' + resource
+ return full_url
diff --git a/catalog/pub/utils/syscomm.py b/catalog/pub/utils/syscomm.py
new file mode 100644
index 0000000..89219ec
--- /dev/null
+++ b/catalog/pub/utils/syscomm.py
@@ -0,0 +1,19 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import inspect
+
+
+def fun_name():
+ return inspect.stack()[1][3]
diff --git a/catalog/pub/utils/tests.py b/catalog/pub/utils/tests.py
new file mode 100644
index 0000000..0f02467
--- /dev/null
+++ b/catalog/pub/utils/tests.py
@@ -0,0 +1,221 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import platform
+import unittest
+import mock
+from . import fileutil
+import urllib
+from . import syscomm
+from . import timeutil
+from . import values
+
+from catalog.pub.database.models import JobStatusModel, JobModel
+from catalog.pub.utils.jobutil import JobUtil
+
+
+class MockReq():
+ def read(self):
+ return "1"
+
+ def close(self):
+ pass
+
+
+class UtilsTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def test_create_and_delete_dir(self):
+ dirs = "abc/def/hij"
+ fileutil.make_dirs(dirs)
+ fileutil.make_dirs(dirs)
+ fileutil.delete_dirs(dirs)
+
+ @mock.patch.object(urllib.request, 'urlopen')
+ def test_download_file_from_http(self, mock_urlopen):
+ mock_urlopen.return_value = MockReq()
+ fileutil.delete_dirs("abc")
+ is_ok, f_name = fileutil.download_file_from_http("1", "abc", "1.txt")
+ self.assertTrue(is_ok)
+ if 'Windows' in platform.system():
+ self.assertTrue(f_name.endswith("abc\\1.txt"))
+ else:
+ self.assertTrue(f_name.endswith("abc/1.txt"))
+ fileutil.delete_dirs("abc")
+
+ def test_query_job_status(self):
+ job_id = "1"
+ JobStatusModel.objects.filter().delete()
+ JobStatusModel(
+ indexid=1,
+ jobid=job_id,
+ status="success",
+ progress=10
+ ).save()
+ JobStatusModel(
+ indexid=2,
+ jobid=job_id,
+ status="success",
+ progress=50
+ ).save()
+ JobStatusModel(
+ indexid=3,
+ jobid=job_id,
+ status="success",
+ progress=100
+ ).save()
+ jobs = JobUtil.query_job_status(job_id)
+ self.assertEqual(1, len(jobs))
+ self.assertEqual(3, jobs[0].indexid)
+ jobs = JobUtil.query_job_status(job_id, 1)
+ self.assertEqual(2, len(jobs))
+ self.assertEqual(3, jobs[0].indexid)
+ self.assertEqual(2, jobs[1].indexid)
+ JobStatusModel.objects.filter().delete()
+
+ def test_is_job_exists(self):
+ job_id = "1"
+ JobModel.objects.filter().delete()
+ JobModel(
+ jobid=job_id,
+ jobtype="1",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ self.assertTrue(JobUtil.is_job_exists(job_id))
+ JobModel.objects.filter().delete()
+
+ def test_create_job(self):
+ job_id = "5"
+ JobModel.objects.filter().delete()
+ JobUtil.create_job(
+ inst_type="1",
+ jobaction="2",
+ inst_id="3",
+ user="4",
+ job_id=5,
+ res_name="6")
+ self.assertEqual(1, len(JobModel.objects.filter(jobid=job_id)))
+ JobModel.objects.filter().delete()
+
+ def test_clear_job(self):
+ job_id = "1"
+ JobModel.objects.filter().delete()
+ JobModel(
+ jobid=job_id,
+ jobtype="1",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ JobUtil.clear_job(job_id)
+ self.assertEqual(0, len(JobModel.objects.filter(jobid=job_id)))
+
+ def test_add_job_status_when_job_is_not_created(self):
+ JobModel.objects.filter().delete()
+ self.assertRaises(
+ Exception,
+ JobUtil.add_job_status,
+ job_id="1",
+ progress=1,
+ status_decs="2",
+ error_code="0"
+ )
+
+ def test_add_job_status_normal(self):
+ job_id = "1"
+ JobModel.objects.filter().delete()
+ JobStatusModel.objects.filter().delete()
+ JobModel(
+ jobid=job_id,
+ jobtype="1",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ JobUtil.add_job_status(
+ job_id="1",
+ progress=1,
+ status_decs="2",
+ error_code="0"
+ )
+ self.assertEqual(1, len(JobStatusModel.objects.filter(jobid=job_id)))
+ JobStatusModel.objects.filter().delete()
+ JobModel.objects.filter().delete()
+
+ def test_clear_job_status(self):
+ job_id = "1"
+ JobStatusModel.objects.filter().delete()
+ JobStatusModel(
+ indexid=1,
+ jobid=job_id,
+ status="success",
+ progress=10
+ ).save()
+ JobUtil.clear_job_status(job_id)
+ self.assertEqual(0, len(JobStatusModel.objects.filter(jobid=job_id)))
+
+ def test_get_unfinished_jobs(self):
+ JobModel.objects.filter().delete()
+ JobModel(
+ jobid="11",
+ jobtype="InstVnf",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ JobModel(
+ jobid="22",
+ jobtype="InstVnf",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ JobModel(
+ jobid="33",
+ jobtype="InstVnf",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ progresses = JobUtil.get_unfinished_jobs(
+ url_prefix="/vnfinst",
+ inst_id="3",
+ inst_type="InstVnf"
+ )
+ expect_progresses = ['/vnfinst/11', '/vnfinst/22', '/vnfinst/33']
+ self.assertEqual(expect_progresses, progresses)
+ JobModel.objects.filter().delete()
+
+ def test_fun_name(self):
+ self.assertEqual("test_fun_name", syscomm.fun_name())
+
+ def test_now_time(self):
+ self.assertIn(":", timeutil.now_time())
+ self.assertIn("-", timeutil.now_time())
+
+ def test_ignore_case_get(self):
+ data = {
+ "Abc": "def",
+ "HIG": "klm"
+ }
+ self.assertEqual("def", values.ignore_case_get(data, 'ABC'))
+ self.assertEqual("def", values.ignore_case_get(data, 'abc'))
+ self.assertEqual("klm", values.ignore_case_get(data, 'hig'))
+ self.assertEqual("bbb", values.ignore_case_get(data, 'aaa', 'bbb'))
diff --git a/catalog/pub/utils/timeutil.py b/catalog/pub/utils/timeutil.py
new file mode 100644
index 0000000..1d97e9d
--- /dev/null
+++ b/catalog/pub/utils/timeutil.py
@@ -0,0 +1,19 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+
+def now_time(fmt="%Y-%m-%d %H:%M:%S"):
+ return datetime.datetime.now().strftime(fmt)
diff --git a/catalog/pub/utils/toscaparser/__init__.py b/catalog/pub/utils/toscaparser/__init__.py
new file mode 100644
index 0000000..62ead96
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/__init__.py
@@ -0,0 +1,54 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+
+from catalog.pub.utils.toscaparser.nsdmodel import NsdInfoModel
+from catalog.pub.utils.toscaparser.pnfmodel import PnfdInfoModel
+from catalog.pub.utils.toscaparser.sdmodel import SdInfoModel
+from catalog.pub.utils.toscaparser.vnfdmodel import EtsiVnfdInfoModel
+
+
+def parse_nsd(path, input_parameters=[]):
+ tosca_obj = NsdInfoModel(path, input_parameters).model
+ strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+ strResponse = strResponse.replace(': null', ': ""')
+ return strResponse
+
+
+def parse_sd(path, input_parameters=[]):
+ tosca_obj = SdInfoModel(path, input_parameters)
+ strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+ strResponse = strResponse.replace(': null', ': ""')
+ return strResponse
+
+
+def parse_vnfd(path, input_parameters=[], isETSI=True):
+ if isETSI:
+ tosca_obj = EtsiVnfdInfoModel(path, input_parameters)
+ else:
+ tosca_obj = {}
+ strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+ strResponse = strResponse.replace(': null', ': ""')
+ return strResponse
+
+
+def parse_pnfd(path, input_parameters=[], isETSI=True):
+ if isETSI:
+ tosca_obj = PnfdInfoModel(path, input_parameters)
+ else:
+ tosca_obj = {}
+ strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+ strResponse = strResponse.replace(': null', ': ""')
+ return strResponse
diff --git a/catalog/pub/utils/toscaparser/basemodel.py b/catalog/pub/utils/toscaparser/basemodel.py
new file mode 100644
index 0000000..6ed26aa
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/basemodel.py
@@ -0,0 +1,534 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ftplib
+import json
+import logging
+import os
+import re
+import shutil
+import urllib
+
+# import paramiko
+from toscaparser.tosca_template import ToscaTemplate
+from toscaparser.properties import Property
+from toscaparser.functions import Function, Concat, GetInput, get_function, function_mappings
+from catalog.pub.utils.toscaparser.graph import Graph
+
+from catalog.pub.utils.toscaparser.dataentityext import DataEntityExt
+
+logger = logging.getLogger(__name__)
+
+METADATA = "metadata"
+PROPERTIES = "properties"
+DESCRIPTION = "description"
+REQUIREMENTS = "requirements"
+INTERFACES = "interfaces"
+TOPOLOGY_TEMPLATE = "topology_template"
+INPUTS = "inputs"
+CAPABILITIES = "capabilities"
+ATTRIBUTES = "attributes"
+ARTIFACTS = "artifacts"
+DERIVED_FROM = "derived_from"
+
+NODE_NAME = "name"
+NODE_TYPE = "nodeType"
+NODE_ROOT = "tosca.nodes.Root"
+GROUP_TYPE = "groupType"
+GROUPS_ROOT = "tosca.groups.Root"
+
+
+class BaseInfoModel(object):
+
+ def __init__(self, path=None, params=None, tosca=None):
+ if tosca:
+ _tosca = tosca
+ else:
+ _tosca = self.buildToscaTemplate(path, params)
+ self.description = getattr(_tosca, "description", "")
+ self.parseModel(_tosca)
+
+ def parseModel(self, tosca):
+ pass
+
+ def buildInputs(self, tosca):
+ topo = tosca.tpl.get(TOPOLOGY_TEMPLATE, None)
+ return topo.get(INPUTS, {}) if topo else {}
+
+ def buildToscaTemplate(self, path, params):
+ file_name = None
+ try:
+ file_name = self._check_download_file(path)
+ valid_params = self._validate_input_params(file_name, params)
+ return self._create_tosca_template(file_name, valid_params)
+ finally:
+ if file_name is not None and file_name != path and os.path.exists(file_name):
+ try:
+ os.remove(file_name)
+ except Exception as e:
+ logger.error("Failed to parse package, error: %s", e.args[0])
+
+ def _validate_input_params(self, path, params):
+ valid_params = {}
+ inputs = {}
+ if isinstance(params, list):
+ for param in params:
+ key = param.get('key', 'undefined')
+ value = param.get('value', 'undefined')
+ inputs[key] = value
+ params = inputs
+
+ if params:
+ tmp = self._create_tosca_template(path, None)
+ if isinstance(params, dict):
+ for key, value in list(params.items()):
+ if hasattr(tmp, 'inputs') and len(tmp.inputs) > 0:
+ for input_def in tmp.inputs:
+ if (input_def.name == key):
+ valid_params[key] = DataEntityExt.validate_datatype(input_def.type, value)
+ return valid_params
+
+ def _create_tosca_template(self, file_name, valid_params):
+ tosca_tpl = None
+ try:
+ tosca_tpl = ToscaTemplate(path=file_name,
+ parsed_params=valid_params,
+ no_required_paras_check=True,
+ debug_mode=True)
+ except Exception as e:
+ print(e.args[0])
+ finally:
+ if tosca_tpl is not None and hasattr(tosca_tpl, "temp_dir") and os.path.exists(tosca_tpl.temp_dir):
+ try:
+ shutil.rmtree(tosca_tpl.temp_dir)
+ except Exception as e:
+ logger.error("Failed to create tosca template, error: %s", e.args[0])
+ print("-----------------------------")
+ print('\n'.join(['%s:%s' % item for item in list(tosca_tpl.__dict__.items())]))
+ print("-----------------------------")
+ return tosca_tpl
+
+ def _check_download_file(self, path):
+ if (path.startswith("ftp") or path.startswith("sftp")):
+ return self.downloadFileFromFtpServer(path)
+ elif (path.startswith("http")):
+ return self.download_file_from_httpserver(path)
+ return path
+
+ def download_file_from_httpserver(self, path):
+ path = path.encode("utf-8")
+ tmps = str.split(path, '/')
+ localFileName = tmps[len(tmps) - 1]
+ urllib.request.urlretrieve(path, localFileName)
+ return localFileName
+
+ def downloadFileFromFtpServer(self, path):
+ path = path.encode("utf-8")
+ tmp = str.split(path, '://')
+ protocol = tmp[0]
+ tmp = str.split(tmp[1], ':')
+ if len(tmp) == 2:
+ userName = tmp[0]
+ tmp = str.split(tmp[1], '@')
+ userPwd = tmp[0]
+ index = tmp[1].index('/')
+ hostIp = tmp[1][0:index]
+ remoteFileName = tmp[1][index:len(tmp[1])]
+ if protocol.lower() == 'ftp':
+ hostPort = 21
+ else:
+ hostPort = 22
+
+ if len(tmp) == 3:
+ userName = tmp[0]
+ userPwd = str.split(tmp[1], '@')[0]
+ hostIp = str.split(tmp[1], '@')[1]
+ index = tmp[2].index('/')
+ hostPort = tmp[2][0:index]
+ remoteFileName = tmp[2][index:len(tmp[2])]
+
+ localFileName = str.split(remoteFileName, '/')
+ localFileName = localFileName[len(localFileName) - 1]
+
+ if protocol.lower() == 'sftp':
+ self.sftp_get(userName, userPwd, hostIp, hostPort, remoteFileName, localFileName)
+ else:
+ self.ftp_get(userName, userPwd, hostIp, hostPort, remoteFileName, localFileName)
+ return localFileName
+
+ # def sftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName):
+ # # return
+ # t = None
+ # try:
+ # t = paramiko.Transport(hostIp, int(hostPort))
+ # t.connect(username=userName, password=userPwd)
+ # sftp = paramiko.SFTPClient.from_transport(t)
+ # sftp.get(remoteFileName, localFileName)
+ # finally:
+ # if t is not None:
+ # t.close()
+
+ def ftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName):
+ f = None
+ try:
+ ftp = ftplib.FTP()
+ ftp.connect(hostIp, hostPort)
+ ftp.login(userName, userPwd)
+ f = open(localFileName, 'wb')
+ ftp.retrbinary('RETR ' + remoteFileName, f.write, 1024)
+ f.close()
+ finally:
+ if f is not None:
+ f.close()
+
+ def buildMetadata(self, tosca):
+ return tosca.tpl.get(METADATA, {}) if tosca else {}
+
+ def buildNode(self, nodeTemplate, tosca):
+ inputs = tosca.inputs
+ parsed_params = tosca.parsed_params
+ ret = {}
+ ret[NODE_NAME] = nodeTemplate.name
+ ret[NODE_TYPE] = nodeTemplate.type
+ if DESCRIPTION in nodeTemplate.entity_tpl:
+ ret[DESCRIPTION] = nodeTemplate.entity_tpl[DESCRIPTION]
+ else:
+ ret[DESCRIPTION] = ''
+ if METADATA in nodeTemplate.entity_tpl:
+ ret[METADATA] = nodeTemplate.entity_tpl[METADATA]
+ else:
+ ret[METADATA] = ''
+ props = self.buildProperties_ex(nodeTemplate, tosca.topology_template)
+ ret[PROPERTIES] = self.verify_properties(props, inputs, parsed_params)
+ ret[REQUIREMENTS] = self.build_requirements(nodeTemplate)
+ self.buildCapabilities(nodeTemplate, inputs, ret)
+ self.buildArtifacts(nodeTemplate, inputs, ret)
+ interfaces = self.build_interfaces(nodeTemplate)
+ if interfaces:
+ ret[INTERFACES] = interfaces
+ return ret
+
+ def buildProperties(self, nodeTemplate, parsed_params):
+ properties = {}
+ isMappingParams = parsed_params and len(parsed_params) > 0
+ for k, item in list(nodeTemplate.get_properties().items()):
+ properties[k] = item.value
+ if isinstance(item.value, GetInput):
+ if item.value.result() and isMappingParams:
+ properties[k] = DataEntityExt.validate_datatype(item.type, item.value.result())
+ else:
+ tmp = {}
+ tmp[item.value.name] = item.value.input_name
+ properties[k] = tmp
+ if ATTRIBUTES in nodeTemplate.entity_tpl:
+ for k, item in list(nodeTemplate.entity_tpl[ATTRIBUTES].items()):
+ properties[k] = str(item)
+ return properties
+
+ def buildProperties_ex(self, nodeTemplate, topology_template, properties=None):
+ if properties is None:
+ properties = nodeTemplate.get_properties()
+ _properties = {}
+ if isinstance(properties, dict):
+ for name, prop in list(properties.items()):
+ if isinstance(prop, Property):
+ if isinstance(prop.value, Function):
+ if isinstance(prop.value, Concat): # support one layer inner function.
+ value_str = ''
+ for arg in prop.value.args:
+ if isinstance(arg, str):
+ value_str += arg
+ elif isinstance(arg, dict):
+ raw_func = {}
+ for k, v in list(arg.items()):
+ func_args = []
+ func_args.append(v)
+ raw_func[k] = func_args
+ func = get_function(topology_template, nodeTemplate, raw_func)
+ value_str += str(func.result())
+ _properties[name] = value_str
+ else:
+ _properties[name] = prop.value.result()
+ elif isinstance(prop.value, dict) or isinstance(prop.value, list):
+ _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop.value)
+ elif prop.type == 'string':
+ _properties[name] = prop.value
+ else:
+ _properties[name] = json.dumps(prop.value)
+ elif isinstance(prop, dict):
+ _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop)
+ elif isinstance(prop, list):
+ _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop)
+ elif name in function_mappings:
+ raw_func = {}
+ func_args = []
+ func_args.append(prop)
+ raw_func[name] = func_args
+ if name == 'CONCAT':
+ value_str = ''
+ for arg in prop:
+ if isinstance(arg, str):
+ value_str += arg
+ elif isinstance(arg, dict):
+ raw_func = {}
+ for k, v in list(arg.items()):
+ func_args = []
+ func_args.append(v)
+ raw_func[k] = func_args
+ value_str += str(
+ get_function(topology_template, nodeTemplate, raw_func).result())
+ value = value_str
+ else:
+ return get_function(topology_template, nodeTemplate, raw_func).result()
+ else:
+ _properties[name] = prop
+ elif isinstance(properties, list):
+ value = []
+ for para in properties:
+ if isinstance(para, dict) or isinstance(para, list):
+ value.append(self.buildProperties_ex(nodeTemplate, topology_template, para))
+ else:
+ value.append(para)
+ return value
+ return _properties
+
+ def verify_properties(self, props, inputs, parsed_params):
+ ret_props = {}
+ if (props and len(props) > 0):
+ for key, value in list(props.items()):
+ ret_props[key] = self._verify_value(value, inputs, parsed_params)
+ # if isinstance(value, str):
+ # ret_props[key] = self._verify_string(inputs, parsed_params, value);
+ # continue
+ # if isinstance(value, list):
+ # ret_props[key] = map(lambda x: self._verify_dict(inputs, parsed_params, x), value)
+ # continue
+ # if isinstance(value, dict):
+ # ret_props[key] = self._verify_map(inputs, parsed_params, value)
+ # continue
+ # ret_props[key] = value
+ return ret_props
+
+ def build_requirements(self, node_template):
+ rets = []
+ for req in node_template.requirements:
+ for req_name, req_value in list(req.items()):
+ if (isinstance(req_value, dict)):
+ if ('node' in req_value and req_value['node'] not in node_template.templates):
+ continue # No target requirement for aria parser, not add to result.
+ rets.append({req_name: req_value})
+ return rets
+
+ def buildCapabilities(self, nodeTemplate, inputs, ret):
+ capabilities = json.dumps(nodeTemplate.entity_tpl.get(CAPABILITIES, None))
+ match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', capabilities)
+ for m in match:
+ aa = [input_def for input_def in inputs if m == input_def.name][0]
+ capabilities = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), capabilities, 1)
+ if capabilities != 'null':
+ ret[CAPABILITIES] = json.loads(capabilities)
+
+ def buildArtifacts(self, nodeTemplate, inputs, ret):
+ artifacts = json.dumps(nodeTemplate.entity_tpl.get('artifacts', None))
+ match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', artifacts)
+ for m in match:
+ aa = [input_def for input_def in inputs if m == input_def.name][0]
+ artifacts = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), artifacts, 1)
+ if artifacts != 'null':
+ ret[ARTIFACTS] = json.loads(artifacts)
+
+ def build_interfaces(self, node_template):
+ if INTERFACES in node_template.entity_tpl:
+ return node_template.entity_tpl[INTERFACES]
+ return None
+
+ def isNodeTypeX(self, node, nodeTypes, x):
+ node_type = node[NODE_TYPE]
+ while node_type != x:
+ node_type_derived = node_type
+ node_type = nodeTypes[node_type][DERIVED_FROM]
+ if node_type == NODE_ROOT or node_type == node_type_derived:
+ return False
+ return True
+
+ def get_requirement_node_name(self, req_value):
+ return self.get_prop_from_obj(req_value, 'node')
+
+ def getRequirementByNodeName(self, nodeTemplates, storage_name, prop):
+ for node in nodeTemplates:
+ if node[NODE_NAME] == storage_name:
+ if prop in node:
+ return node[prop]
+
+ def get_prop_from_obj(self, obj, prop):
+ if isinstance(obj, str):
+ return obj
+ if (isinstance(obj, dict) and prop in obj):
+ return obj[prop]
+ return None
+
+ def getNodeDependencys(self, node):
+ return self.getRequirementByName(node, 'dependency')
+
+ def getRequirementByName(self, node, requirementName):
+ requirements = []
+ if REQUIREMENTS in node:
+ for item in node[REQUIREMENTS]:
+ for key, value in list(item.items()):
+ if key == requirementName:
+ requirements.append(value)
+ return requirements
+
+ def _verify_value(self, value, inputs, parsed_params):
+ if value == '{}':
+ return ''
+ if isinstance(value, str):
+ return self._verify_string(inputs, parsed_params, value)
+ if isinstance(value, list) or isinstance(value, dict):
+ return self._verify_object(value, inputs, parsed_params)
+ return value
+
+ def _verify_object(self, value, inputs, parsed_params):
+ s = self._verify_string(inputs, parsed_params, json.dumps(value))
+ return json.loads(s)
+
+ def _get_input_name(self, getInput):
+ input_name = getInput.split(':')[1]
+ input_name = input_name.strip()
+ return input_name.replace('"', '').replace('}', '')
+
+ def _verify_string(self, inputs, parsed_params, value):
+ getInputs = re.findall(r'{"get_input": "[a-zA-Z_0-9]+"}', value)
+ for getInput in getInputs:
+ input_name = self._get_input_name(getInput)
+ if parsed_params and input_name in parsed_params:
+ value = value.replace(getInput, json.dumps(parsed_params[input_name]))
+ else:
+ for input_def in inputs:
+ if input_def.default and input_name == input_def.name:
+ value = value.replace(getInput, json.dumps(input_def.default))
+ return value
+
+ def get_node_by_name(self, node_templates, name):
+ for node in node_templates:
+ if node[NODE_NAME] == name:
+ return node
+ return None
+
+ def getCapabilityByName(self, node, capabilityName):
+ if CAPABILITIES in node and capabilityName in node[CAPABILITIES]:
+ return node[CAPABILITIES][capabilityName]
+ return None
+
+ def get_base_path(self, tosca):
+ fpath, fname = os.path.split(tosca.path)
+ return fpath
+
+ def build_artifacts(self, node):
+ rets = []
+ if ARTIFACTS in node and len(node[ARTIFACTS]) > 0:
+ artifacts = node[ARTIFACTS]
+ for name, value in list(artifacts.items()):
+ ret = {}
+ ret['artifact_name'] = name
+ ret['file'] = value
+ if isinstance(value, dict):
+ ret.update(value)
+ rets.append(ret)
+ else:
+ # TODO It is workaround for SDC-1900.
+ logger.error("VCPE specific code")
+ ret = {}
+ ret['artifact_name'] = "sw_image"
+ ret['file'] = "ubuntu_16.04"
+ ret['type'] = "tosca.artifacts.nfv.SwImage"
+ rets.append(ret)
+
+ return rets
+
+ def get_node_by_req(self, node_templates, req):
+ req_node_name = self.get_requirement_node_name(req)
+ return self.get_node_by_name(node_templates, req_node_name)
+
+ def isGroupTypeX(self, group, groupTypes, x):
+ group_type = group[GROUP_TYPE]
+ while group_type != x:
+ group_type_derived = group_type
+ group_type = groupTypes[group_type][DERIVED_FROM]
+ if group_type == GROUPS_ROOT or group_type == group_type_derived:
+ return False
+ return True
+
+ def setTargetValues(self, dict_target, target_keys, dict_source, source_keys):
+ i = 0
+ for item in source_keys:
+ dict_target[target_keys[i]] = dict_source.get(item, "")
+ i += 1
+ return dict_target
+
+ def get_deploy_graph(self, tosca, relations):
+ nodes = tosca.graph.nodetemplates
+ graph = Graph()
+ for node in nodes:
+ self._build_deploy_path(node, [], graph, relations)
+ return graph.to_dict()
+
+ def _build_deploy_path(self, node, node_parent, graph, relations):
+ graph.add_node(node.name, node_parent)
+ type_require_set = {}
+ type_requires = node.type_definition.requirements
+ for type_require in type_requires:
+ type_require_set.update(type_require)
+ for requirement in node.requirements:
+ for k in list(requirement.keys()):
+ if type_require_set[k].get('relationship', None) in relations[0] or type_require_set[k].get('capability', None) in relations[0]:
+ if isinstance(requirement[k], dict):
+ next_node = requirement[k].get('node', None)
+ else:
+ next_node = requirement[k]
+ graph.add_node(next_node, [node.name])
+ if type_require_set[k].get('relationship', None) in relations[1]:
+ if isinstance(requirement[k], dict):
+ next_node = requirement[k].get('node', None)
+ else:
+ next_node = requirement[k]
+ graph.add_node(next_node, [node.name])
+
+ def get_substitution_mappings(self, tosca):
+ node = {
+ 'properties': {},
+ 'requirements': {},
+ 'capabilities': {},
+ 'metadata': {}
+ }
+ metadata = None
+ substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None)
+ if substitution_mappings:
+ nodeType = substitution_mappings['node_type']
+ logger.debug("nodeType %s", nodeType)
+ if "type" not in node or node['type'] == "":
+ node['type'] = nodeType
+ node['properties'] = substitution_mappings.get('properties', {})
+ node['requirements'] = substitution_mappings.get('requirements', {})
+ node['capabilities'] = substitution_mappings.get('capabilities', {})
+ metadata = substitution_mappings.get('metadata', {})
+
+ if "node_types" in tosca.tpl:
+ node_types = tosca.tpl['node_types'].get(nodeType, None)
+ derivedFrom = node_types.get('derived_from', "")
+ node['type'] = derivedFrom
+ node['properties'] = node_types.get('properties', {})
+
+ node['metadata'] = metadata if metadata and metadata != {} else self.buildMetadata(tosca)
+ return node
diff --git a/catalog/pub/utils/toscaparser/const.py b/catalog/pub/utils/toscaparser/const.py
new file mode 100644
index 0000000..9c61c48
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/const.py
@@ -0,0 +1,30 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+NS_METADATA_SECTIONS = (NS_UUID, NS_INVARIANTUUID, NS_NAME, NS_VERSION, NS_DESIGNER, NSD_RELEASE_DATE) =\
+ ("nsd_id", "nsd_invariant_id", "nsd_name", "nsd_file_structure_version", "nsd_designer", "nsd_release_date_time")
+# ("id", "invariant_id", "name", "version", "designer", "description")
+
+SDC_SERVICE_METADATA_SECTIONS = (SRV_UUID, SRV_INVARIANTUUID, SRV_NAME) = ('UUID', 'invariantUUID', 'name')
+
+PNF_METADATA_SECTIONS = (PNF_UUID, PNF_INVARIANTUUID, PNF_NAME, PNF_METADATA_DESCRIPTION, PNF_VERSION, PNF_PROVIDER) = \
+ ("descriptor_id", "descriptor_invariant_id", "name", "description", "version", "provider")
+PNF_SECTIONS = (PNF_ID, PNF_METADATA, PNF_PROPERTIES, PNF_DESCRIPTION) = \
+ ("pnf_id", "metadata", "properties", "description")
+
+VNF_SECTIONS = (VNF_ID, VNF_METADATA, VNF_PROPERTIES, VNF_DESCRIPTION) = \
+ ("vnf_id", "metadata", "properties", "description")
+
+VL_SECTIONS = (VL_ID, VL_METADATA, VL_PROPERTIES, VL_DESCRIPTION) = \
+ ("vl_id", "metadata", "properties", "description")
diff --git a/catalog/pub/utils/toscaparser/dataentityext.py b/catalog/pub/utils/toscaparser/dataentityext.py
new file mode 100644
index 0000000..825e93b
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/dataentityext.py
@@ -0,0 +1,33 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from toscaparser.dataentity import DataEntity
+from toscaparser.elements.constraints import Schema
+from toscaparser.common.exception import ExceptionCollector
+
+
+class DataEntityExt(object):
+ '''A complex data value entity ext.'''
+ @staticmethod
+ def validate_datatype(type, value, entry_schema=None, custom_def=None):
+ if value:
+ if (type == Schema.STRING):
+ return str(value)
+ elif type == Schema.FLOAT:
+ try:
+ return float(value)
+ except Exception:
+ ExceptionCollector.appendException(ValueError(('"%s" is not an float.') % value))
+ return DataEntity.validate_datatype(type, value, entry_schema, custom_def)
+ return value
diff --git a/catalog/pub/utils/toscaparser/graph.py b/catalog/pub/utils/toscaparser/graph.py
new file mode 100644
index 0000000..0af2a14
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/graph.py
@@ -0,0 +1,74 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import deque
+from collections import OrderedDict
+
+
+class Graph(object):
+
+ def __init__(self, graph_dict=None):
+ self.graph = OrderedDict()
+ if graph_dict:
+ for node, dep_nodes in list(graph_dict.items()):
+ self.add_node(node, dep_nodes)
+
+ def add_node(self, node, dep_nodes):
+ if node not in self.graph:
+ self.graph[node] = set()
+ if isinstance(dep_nodes, list):
+ for dep_node in dep_nodes:
+ if dep_node not in self.graph:
+ self.graph[dep_node] = set()
+ if dep_node not in self.graph[node]:
+ self.graph[node].add(dep_node)
+
+ def get_pre_nodes(self, node):
+ return [k for k in self.graph if node in self.graph[k]]
+
+ def topo_sort(self):
+ degree = {}
+ for node in self.graph:
+ degree[node] = 0
+
+ for node in self.graph:
+ for dependent in self.graph[node]:
+ degree[dependent] += 1
+
+ queue = deque()
+ for node in degree:
+ if degree[node] == 0:
+ queue.appendleft(node)
+
+ sort_list = []
+ while queue:
+ node = queue.pop()
+ sort_list.append(node)
+ for dependent in self.graph[node]:
+ degree[dependent] -= 1
+ if degree[dependent] == 0:
+ queue.appendleft(dependent)
+
+ if len(sort_list) == len(self.graph):
+ return sort_list
+ else:
+ return None
+
+ def to_dict(self):
+ dict = {}
+ for node, dependents in self.graph.items():
+ dict[node] = []
+ for dep in dependents:
+ dict[node].append(dep)
+ return dict
diff --git a/catalog/pub/utils/toscaparser/nsdmodel.py b/catalog/pub/utils/toscaparser/nsdmodel.py
new file mode 100644
index 0000000..f742640
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/nsdmodel.py
@@ -0,0 +1,220 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from catalog.pub.utils.toscaparser.basemodel import BaseInfoModel
+from catalog.pub.utils.toscaparser.const import SDC_SERVICE_METADATA_SECTIONS
+from catalog.pub.utils.toscaparser.servicemodel import SdcServiceModel
+
+logger = logging.getLogger(__name__)
+
+SECTIONS = (NS_TYPE, NS_VNF_TYPE, NS_VL_TYPE, NS_PNF_TYPE, NS_NFP_TYPE, NS_VNFFG_TYPE) = \
+ ('tosca.nodes.nfv.NS',
+ 'tosca.nodes.nfv.VNF',
+ 'tosca.nodes.nfv.NsVirtualLink',
+ 'tosca.nodes.nfv.PNF',
+ 'tosca.nodes.nfv.NFP',
+ 'tosca.nodes.nfv.VNFFG')
+
+NFV_NS_RELATIONSHIPS = [["tosca.relationships.nfv.VirtualLinksTo", "tosca.relationships.DependsOn"], []]
+
+
+class NsdInfoModel(BaseInfoModel):
+ def __init__(self, path, params):
+ super(NsdInfoModel, self).__init__(path, params)
+
+ def parseModel(self, tosca):
+ metadata = self.buildMetadata(tosca)
+ self.model = {}
+ if self._is_etsi(metadata):
+ self.model = EtsiNsdInfoModel(tosca)
+ elif self._is_ecomp(metadata):
+ self.model = SdcServiceModel(tosca)
+
+ def _is_etsi(self, metadata):
+ NS_METADATA_MUST = ["nsd_invariant_id", "nsd_name", "nsd_file_structure_version", "nsd_designer", "nsd_release_date_time"]
+ return True if len([1 for key in NS_METADATA_MUST if key in metadata]) == len(NS_METADATA_MUST) else False
+
+ def _is_ecomp(self, metadata):
+ return True if len([1 for key in SDC_SERVICE_METADATA_SECTIONS if key in metadata]) == len(SDC_SERVICE_METADATA_SECTIONS) else False
+
+
+class EtsiNsdInfoModel(BaseInfoModel):
+
+ def __init__(self, tosca):
+ super(EtsiNsdInfoModel, self).__init__(tosca=tosca)
+
+ def parseModel(self, tosca):
+ self.metadata = self.buildMetadata(tosca)
+ self.ns = self._build_ns(tosca)
+ self.inputs = self.buildInputs(tosca)
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
+ types = tosca.topology_template.custom_defs
+ self.basepath = self.get_base_path(tosca)
+ self.vnfs = self._get_all_vnf(nodeTemplates, types)
+ self.pnfs = self._get_all_pnf(nodeTemplates, types)
+ self.vls = self._get_all_vl(nodeTemplates, types)
+ self.fps = self._get_all_fp(nodeTemplates, types)
+ self.vnffgs = self._get_all_vnffg(tosca.topology_template.groups, types)
+ self.ns_exposed = self._get_all_endpoint_exposed(tosca.topology_template)
+ self.nested_ns = self._get_all_nested_ns(nodeTemplates, types)
+ self.graph = self.get_deploy_graph(tosca, NFV_NS_RELATIONSHIPS)
+
+ def _get_all_vnf(self, nodeTemplates, node_types):
+ vnfs = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, NS_VNF_TYPE):
+ vnf = {}
+ vnf['vnf_id'] = node['name']
+ vnf['description'] = node['description']
+ vnf['properties'] = node['properties']
+ if not vnf['properties'].get('id', None):
+ vnf['properties']['id'] = vnf['properties'].get('descriptor_id', None)
+ vnf['dependencies'] = self._get_networks(node, node_types)
+ vnf['networks'] = self._get_networks(node, node_types)
+ vnfs.append(vnf)
+ return vnfs
+
+ def _get_all_pnf(self, nodeTemplates, node_types):
+ pnfs = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, NS_PNF_TYPE):
+ pnf = {}
+ pnf['pnf_id'] = node['name']
+ pnf['description'] = node['description']
+ pnf['properties'] = node['properties']
+ pnf['networks'] = self._get_networks(node, node_types)
+ pnfs.append(pnf)
+ return pnfs
+
+ def _get_all_vl(self, nodeTemplates, node_types):
+ vls = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, NS_VL_TYPE):
+ vl = dict()
+ vl['vl_id'] = node['name']
+ vl['description'] = node['description']
+ vl['properties'] = node['properties']
+ vls.append(vl)
+ return vls
+
+ def _get_all_fp(self, nodeTemplates, node_types):
+ fps = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, NS_NFP_TYPE):
+ fp = {}
+ fp['fp_id'] = node['name']
+ fp['description'] = node['description']
+ fp['properties'] = node['properties']
+ fp['forwarder_list'] = self._getForwarderList(node, nodeTemplates, node_types)
+ fps.append(fp)
+ return fps
+
+ def _getForwarderList(self, node, node_templates, node_types):
+ forwarderList = []
+ if 'requirements' in node:
+ for item in node['requirements']:
+ for key, value in list(item.items()):
+ if key == 'forwarder':
+ tmpnode = self.get_node_by_req(node_templates, value)
+ type = 'pnf' if self.isNodeTypeX(tmpnode, node_types, NS_PNF_TYPE) else 'vnf'
+ req_node_name = self.get_requirement_node_name(value)
+ if isinstance(value, dict) and 'capability' in value:
+ forwarderList.append(
+ {"type": type, "node_name": req_node_name, "capability": value['capability']})
+ else:
+ forwarderList.append({"type": type, "node_name": req_node_name, "capability": ""})
+ return forwarderList
+
+ def _get_all_vnffg(self, groups, group_types):
+ vnffgs = []
+ for group in groups:
+ if self.isGroupTypeX(group, group_types, NS_VNFFG_TYPE):
+ vnffg = {}
+ vnffg['vnffg_id'] = group.name
+ vnffg['description'] = group.description
+ if 'properties' in group.tpl:
+ vnffg['properties'] = group.tpl['properties']
+ vnffg['members'] = group.members
+ vnffgs.append(vnffg)
+ return vnffgs
+
+ def _get_all_endpoint_exposed(self, topo_tpl):
+ if 'substitution_mappings' in topo_tpl.tpl:
+ external_cps = self._get_external_cps(topo_tpl.tpl['substitution_mappings'])
+ forward_cps = self._get_forward_cps(topo_tpl.tpl['substitution_mappings'])
+ return {"external_cps": external_cps, "forward_cps": forward_cps}
+ return {}
+
+ def _get_external_cps(self, subs_mappings):
+ external_cps = []
+ if 'requirements' in subs_mappings:
+ for key, value in list(subs_mappings['requirements'].items()):
+ if isinstance(value, list) and len(value) > 0:
+ external_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ external_cps.append({"key_name": key, "cpd_id": value})
+ return external_cps
+
+ def _get_forward_cps(self, subs_mappings):
+ forward_cps = []
+ if 'capabilities' in subs_mappings:
+ for key, value in list(subs_mappings['capabilities'].items()):
+ if isinstance(value, list) and len(value) > 0:
+ forward_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ forward_cps.append({"key_name": key, "cpd_id": value})
+ return forward_cps
+
+ def _get_all_nested_ns(self, nodes, node_types):
+ nss = []
+ for node in nodes:
+ if self.isNodeTypeX(node, node_types, NS_TYPE):
+ ns = {}
+ ns['ns_id'] = node['name']
+ ns['description'] = node['description']
+ ns['properties'] = node['properties']
+ ns['networks'] = self._get_networks(node, node_types)
+ nss.append(ns)
+ return nss
+
+ def _get_networks(self, node, node_types):
+ rets = []
+ if 'requirements' in node and (self.isNodeTypeX(node, node_types, NS_TYPE) or self.isNodeTypeX(node, node_types, NS_VNF_TYPE)):
+ for item in node['requirements']:
+ for key, value in list(item.items()):
+ rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
+ return rets
+
+ def _build_ns(self, tosca):
+ ns = self.get_substitution_mappings(tosca)
+ properties = ns.get("properties", {})
+ metadata = ns.get("metadata", {})
+ if properties.get("descriptor_id", "") == "":
+ descriptor_id = metadata.get("nsd_id", "")
+ properties["descriptor_id"] = descriptor_id
+ if properties.get("verison", "") == "":
+ version = metadata.get("nsd_file_structure_version", "")
+ properties["verison"] = version
+ if properties.get("designer", "") == "":
+ author = metadata.get("nsd_designer", "")
+ properties["designer"] = author
+ if properties.get("name", "") == "":
+ template_name = metadata.get("nsd_name", "")
+ properties["name"] = template_name
+ if properties.get("invariant_id", "") == "":
+ nsd_invariant_id = metadata.get("nsd_invariant_id", "")
+ properties["invariant_id"] = nsd_invariant_id
+ return ns
diff --git a/catalog/pub/utils/toscaparser/pnfmodel.py b/catalog/pub/utils/toscaparser/pnfmodel.py
new file mode 100644
index 0000000..9ad8686
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/pnfmodel.py
@@ -0,0 +1,53 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from catalog.pub.utils.toscaparser.basemodel import BaseInfoModel
+logger = logging.getLogger(__name__)
+
+
+class PnfdInfoModel(BaseInfoModel):
+
+ def __init__(self, path, params):
+ super(PnfdInfoModel, self).__init__(path, params)
+
+ def parseModel(self, tosca):
+ self.metadata = self.buildMetadata(tosca)
+ self.inputs = self.buildInputs(tosca)
+ nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca),
+ tosca.nodetemplates)
+ self.basepath = self.get_base_path(tosca)
+ self.pnf = {}
+ self.get_substitution_mappings(tosca)
+ self.get_all_cp(nodeTemplates)
+
+ def get_substitution_mappings(self, tosca):
+ pnf_substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None)
+ if pnf_substitution_mappings:
+ self.pnf['type'] = pnf_substitution_mappings['node_type']
+ self.pnf['properties'] = pnf_substitution_mappings.get('properties', {})
+
+ def get_all_cp(self, nodeTemplates):
+ self.pnf['ExtPorts'] = []
+ for node in nodeTemplates:
+ if self.isPnfExtPort(node):
+ cp = {}
+ cp['id'] = node['name']
+ cp['type'] = node['nodeType']
+ cp['properties'] = node['properties']
+ self.pnf['ExtPorts'].append(cp)
+
+ def isPnfExtPort(self, node):
+ return node['nodeType'].find('tosca.nodes.nfv.PnfExtPort') >= 0
diff --git a/catalog/pub/utils/toscaparser/sdmodel.py b/catalog/pub/utils/toscaparser/sdmodel.py
new file mode 100644
index 0000000..05e0caf
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/sdmodel.py
@@ -0,0 +1,93 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from catalog.pub.utils.toscaparser.basemodel import BaseInfoModel
+from catalog.pub.utils.toscaparser.servicemodel import SdcServiceModel
+
+logger = logging.getLogger(__name__)
+
+
+class SdInfoModel(BaseInfoModel):
+ def __init__(self, path, params):
+ super(SdInfoModel, self).__init__(path, params)
+
+ def parseModel(self, tosca):
+ self.metadata = self.buildMetadata(tosca)
+ self.inputs = self.build_inputs(tosca)
+
+ sdcModle = SdcServiceModel(tosca)
+ if sdcModle:
+ self.service = sdcModle.ns
+ if hasattr(tosca, 'nodetemplates'):
+ self.basepath = sdcModle.basepath
+ self.vnfs = sdcModle.vnfs
+ self.pnfs = sdcModle.pnfs
+ self.vls = sdcModle.vls
+ self.graph = sdcModle.graph
+
+ def build_inputs(self, tosca):
+ """ Get all the inputs for complex type"""
+ result_inputs = {}
+
+ if not tosca.inputs:
+ return {}
+
+ for input in tosca.inputs:
+ type = input.schema.type
+ if type.__eq__('list') or type.__eq__('map'):
+ complex_input = []
+ entry_schema = self.get_entry_schema(input.schema.schema['entry_schema'])
+ self.get_child_input_repeat(complex_input, entry_schema, input)
+ result_inputs[input.schema.name] = complex_input
+
+ else:
+ simple_input = {
+ "type": input.schema.type,
+ "description": input.schema.description,
+ "required": input.schema.required,
+ }
+ result_inputs[input.schema.name] = simple_input
+ return result_inputs
+
+ def get_child_input_repeat(self, complex_input, entry_schema, input):
+ custom_defs = input.custom_defs
+ properties = custom_defs[entry_schema]['properties']
+ for key, value in properties.items():
+ if value['type'].__eq__('list'):
+ child_complex_input = []
+ child_entry_schema = self.get_entry_schema(value['entry_schema'])
+ self.get_child_input_repeat(child_complex_input, child_entry_schema, input)
+ complex_input.append({key: child_complex_input})
+ else:
+ if 'description' in list(value.keys()):
+ simple_input = {
+ key: "",
+ "type": value['type'],
+ "required": value['required'],
+ "description": value['description'],
+ }
+ else:
+ simple_input = {
+ key: "",
+ "type": value['type'],
+ "required": value['required'],
+ }
+ complex_input.append(simple_input)
+
+ def get_entry_schema(self, entry_schema):
+ if isinstance(entry_schema, dict):
+ if 'type' in list(entry_schema.keys()):
+ entry_schema = entry_schema['type']
+ return entry_schema
diff --git a/catalog/pub/utils/toscaparser/servicemodel.py b/catalog/pub/utils/toscaparser/servicemodel.py
new file mode 100644
index 0000000..47d6630
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/servicemodel.py
@@ -0,0 +1,188 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from catalog.pub.utils.toscaparser.const import NS_METADATA_SECTIONS, PNF_METADATA_SECTIONS, VNF_SECTIONS, PNF_SECTIONS, VL_SECTIONS
+from catalog.pub.utils.toscaparser.basemodel import BaseInfoModel
+
+logger = logging.getLogger(__name__)
+
+SDC_SERVICE_SECTIONS = (SERVICE_TYPE, SRV_DESCRIPTION) = (
+ 'org.openecomp.resource.abstract.nodes.service', 'description')
+
+SDC_SERVICE_METADATA_SECTIONS = (SRV_UUID, SRV_INVARIANTUUID, SRV_NAME) = (
+ 'UUID', 'invariantUUID', 'name')
+
+SDC_VL = (VL_TYPE) = ('tosca.nodes.nfv.ext.zte.VL')
+SDC_VL_SECTIONS = (VL_ID, VL_METADATA, VL_PROPERTIES, VL_DESCRIPTION) = \
+ ("name", "metadata", "properties", "description")
+
+SDC_VF = (VF_TYPE, VF_UUID) = \
+ ('org.openecomp.resource.abstract.nodes.VF', 'UUID')
+SDC_VF_SECTIONS = (VF_ID, VF_METADATA, VF_PROPERTIES, VF_DESCRIPTION) = \
+ ("name", "metadata", "properties", "description")
+
+SDC_PNF = (PNF_TYPE) = \
+ ('org.openecomp.resource.abstract.nodes.PNF')
+SDC_PNF_METADATA_SECTIONS = (SDC_PNF_UUID, SDC_PNF_INVARIANTUUID, SDC_PNF_NAME, SDC_PNF_METADATA_DESCRIPTION, SDC_PNF_VERSION) = \
+ ("UUID", "invariantUUID", "name", "description", "version")
+SDC_PNF_SECTIONS = (SDC_PNF_ID, SDC_PNF_METADATA, SDC_PNF_PROPERTIES, SDC_PNF_DESCRIPTION) = \
+ ("name", "metadata", "properties", "description")
+
+SERVICE_RELATIONSHIPS = [["tosca.relationships.network.LinksTo", "tosca.relationships.nfv.VirtualLinksTo", "tosca.capabilities.nfv.VirtualLinkable", "tosca.relationships.DependsOn"], []]
+
+
+class SdcServiceModel(BaseInfoModel):
+
+ def __init__(self, tosca):
+ super(SdcServiceModel, self).__init__(tosca=tosca)
+
+ def parseModel(self, tosca):
+ self.metadata = self._buildServiceMetadata(tosca)
+ self.ns = self._build_ns(tosca)
+ self.inputs = self.buildInputs(tosca)
+ if hasattr(tosca, 'nodetemplates'):
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
+ types = tosca.topology_template.custom_defs
+ self.basepath = self.get_base_path(tosca)
+ self.vnfs = self._get_all_vnf(nodeTemplates, types)
+ self.pnfs = self._get_all_pnf(nodeTemplates, types)
+ self.vls = self._get_all_vl(nodeTemplates, types)
+ self.graph = self.get_deploy_graph(tosca, SERVICE_RELATIONSHIPS)
+
+ def _buildServiceMetadata(self, tosca):
+ """ SDC service Meta Format
+ invariantUUID: e2618ee1 - a29a - 44c4 - a52a - b718fe1269f4
+ UUID: 2362d14a - 115f - 4a2b - b449 - e2f93c0b7c89
+ name: demoVLB
+ description: catalogservicedescription
+ type: Service
+ category: NetworkL1 - 3
+ serviceType: ''
+ serviceRole: ''
+ serviceEcompNaming: true
+ ecompGeneratedNaming: true
+ namingPolicy: ''
+ """
+ metadata_temp = self.buildMetadata(tosca)
+ metadata = {}
+ return self.setTargetValues(metadata, NS_METADATA_SECTIONS, metadata_temp, SDC_SERVICE_METADATA_SECTIONS)
+
+ def _get_all_vnf(self, nodeTemplates, node_types):
+ """ SDC Resource Metadata
+ invariantUUID: 9ed46ddc-8eb7-4cb0-a1b6-04136c921af4
+ UUID: b56ba35d-45fb-41e3-b6b8-b4f66917baa1
+ customizationUUID: af0a6e64-967b-476b-87bc-959dcf59c305
+ version: '1.0'
+ name: b7d2fceb-dd11-43cd-a3fa
+ description: vendor software product
+ type: VF
+ category: Generic
+ subcategory: Abstract
+ resourceVendor: b9d9f9f7-7994-4f0d-8104
+ resourceVendorRelease: '1.0'
+ resourceVendorModelNumber: ''
+ """
+ vnfs = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, VF_TYPE):
+ vnf = {}
+ self.setTargetValues(vnf, VNF_SECTIONS, node, SDC_VF_SECTIONS)
+ if not vnf['properties'].get('id', None) and node['metadata']:
+ vnf['properties']['id'] = node['metadata'].get('UUID', None)
+ vnf['properties']['vnfm_info'] = vnf['properties'].get('nf_type', None)
+ vnf['dependencies'] = self._get_networks(node, node_types)
+ vnf['networks'] = self._get_networks(node, node_types)
+ vnfs.append(vnf)
+ return vnfs
+
+ def _get_all_pnf(self, nodeTemplates, node_types):
+ pnfs = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, PNF_TYPE):
+ pnf = {}
+ self.setTargetValues(pnf, PNF_SECTIONS, node, SDC_PNF_SECTIONS)
+ self.setTargetValues(pnf['properties'], PNF_METADATA_SECTIONS, node['metadata'], SDC_PNF_METADATA_SECTIONS)
+ pnf['networks'] = self._get_networks(node, node_types)
+ pnfs.append(pnf)
+ return pnfs
+
+ def _get_all_vl(self, nodeTemplates, node_types):
+ vls = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, VL_TYPE):
+ vl = {}
+ self.setTargetValues(vl, VL_SECTIONS, node, SDC_VL_SECTIONS)
+ vl_profile = {}
+ if 'segmentation_id' in vl['properties']:
+ vl_profile['segmentationId'] = vl['properties'].get('segmentation_id')
+ if 'network_name' in vl['properties']:
+ vl_profile['networkName'] = vl['properties'].get('network_name')
+ if 'cidr' in vl['properties']:
+ vl_profile['cidr'] = vl['properties'].get('cidr')
+ if 'network_name' in vl['properties']:
+ vl_profile['networkName'] = vl['properties'].get('network_name')
+ if 'start_ip' in vl['properties']:
+ vl_profile['startIp'] = vl['properties'].get('start_ip', '')
+ if 'end_ip' in vl['properties']:
+ vl_profile['endIp'] = vl['properties'].get('end_ip', '')
+ if 'gateway_ip' in vl['properties']:
+ vl_profile['gatewayIp'] = vl['properties'].get('gateway_ip', '')
+ if 'physical_network' in vl['properties']:
+ vl_profile['physicalNetwork'] = vl['properties'].get('physical_network', '')
+ if 'network_type' in vl['properties']:
+ vl_profile['networkType'] = vl['properties'].get('network_type', '')
+ if 'dhcp_enabled' in vl['properties']:
+ vl_profile['dhcpEnabled'] = vl['properties'].get('dhcp_enabled', '')
+ if 'vlan_transparent' in vl['properties']:
+ vl_profile['vlanTransparent'] = vl['properties'].get('vlan_transparent', '')
+ if 'mtu' in vl['properties']:
+ vl_profile['mtu'] = vl['properties'].get('mtu', '')
+ if 'ip_version' in vl['properties']:
+ vl_profile['ip_version'] = vl['properties'].get('ip_version', '')
+ if 'dns_nameservers' in vl['properties']:
+ vl_profile['dns_nameservers'] = vl['properties'].get('dns_nameservers', [])
+ if 'host_routes' in vl['properties']:
+ vl_profile['host_routes'] = vl['properties'].get('host_routes', [])
+ if 'network_id' in vl['properties']:
+ vl_profile['network_id'] = vl['properties'].get('network_id', '')
+ vl['properties']['vl_profile'] = vl_profile
+ vls.append(vl)
+ return vls
+
+ def _get_networks(self, node, node_types):
+ rets = []
+ if 'requirements' in node and self.isNodeTypeX(node, node_types, VF_TYPE):
+ for item in node['requirements']:
+ for key, value in list(item.items()):
+ rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
+ return rets
+
+ def _build_ns(self, tosca):
+ ns = self.get_substitution_mappings(tosca)
+ properties = ns.get("properties", {})
+ metadata = ns.get("metadata", {})
+ if properties.get("descriptor_id", "") == "":
+ descriptor_id = metadata.get(SRV_UUID, "")
+ properties["descriptor_id"] = descriptor_id
+ properties["verison"] = ""
+ properties["designer"] = ""
+ if properties.get("name", "") == "":
+ template_name = metadata.get(SRV_NAME, "")
+ properties["name"] = template_name
+ if properties.get("invariant_id", "") == "":
+ nsd_invariant_id = metadata.get(SRV_INVARIANTUUID, "")
+ properties["invariant_id"] = nsd_invariant_id
+ return ns
diff --git a/catalog/pub/utils/toscaparser/testdata/ns/ran.csar b/catalog/pub/utils/toscaparser/testdata/ns/ran.csar
new file mode 100644
index 0000000..9ea868c
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/ns/ran.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/ns/service-vIMS.csar b/catalog/pub/utils/toscaparser/testdata/ns/service-vIMS.csar
new file mode 100644
index 0000000..0aeed58
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/ns/service-vIMS.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/pnf/ran-du.csar b/catalog/pub/utils/toscaparser/testdata/pnf/ran-du.csar
new file mode 100644
index 0000000..45168a9
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/pnf/ran-du.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vSBC.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vSBC.csar
new file mode 100644
index 0000000..921eafd
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vSBC.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/infra.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/infra.csar
new file mode 100644
index 0000000..5c9fbcf
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/infra.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbng.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbng.csar
new file mode 100644
index 0000000..b11a6ef
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbng.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbrgemu.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbrgemu.csar
new file mode 100644
index 0000000..730ea8d
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbrgemu.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgmux.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgmux.csar
new file mode 100644
index 0000000..b0f37a7
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgmux.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgw.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgw.csar
new file mode 100644
index 0000000..ca652bf
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgw.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/infra.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/infra.csar
new file mode 100644
index 0000000..c91c034
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/infra.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbng.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbng.csar
new file mode 100644
index 0000000..5011563
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbng.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbrgemu.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbrgemu.csar
new file mode 100644
index 0000000..0f99199
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbrgemu.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgmux.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgmux.csar
new file mode 100644
index 0000000..3d2dbf7
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgmux.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgw.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgw.csar
new file mode 100644
index 0000000..79e0d20
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgw.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/tests.py b/catalog/pub/utils/toscaparser/tests.py
new file mode 100644
index 0000000..285d970
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/tests.py
@@ -0,0 +1,101 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+import os
+import logging
+import tempfile
+import shutil
+
+from django.test import TestCase
+
+from catalog.pub.utils.toscaparser import parse_vnfd, parse_pnfd, parse_nsd
+from catalog.pub.utils.toscaparser.graph import Graph
+
+logger = logging.getLogger(__name__)
+
+
+class TestToscaparser(TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def test_vnfd_parse(self):
+ self.remove_temp_dir()
+ input_parameters = [{"value": "222222", "key": "sdncontroller"}]
+ # vcpe = ["vgw", "infra", "vbng", "vbrgemu", "vgmux"]
+ vcpe_part = 'vgw'
+ sriov_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/vnf/vcpesriov"
+ csar_file = ("%s/%s.csar" % (sriov_path, vcpe_part))
+ logger.debug("csar_file:%s", csar_file)
+ vnfd_json = parse_vnfd(csar_file, input_parameters)
+ metadata = json.loads(vnfd_json).get("metadata")
+ logger.debug("sriov metadata:%s", metadata)
+ self.assertEqual(("vCPE_%s" % vcpe_part), metadata.get("template_name", ""))
+ if vcpe_part == "infra":
+ self.assertEqual("b1bb0ce7-1111-4fa7-95ed-4840d70a1177", json.loads(vnfd_json)["vnf"]["properties"]["descriptor_id"])
+
+ dpdk_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/vnf/vcpedpdk"
+ csar_file = ("%s/%s.csar" % (dpdk_path, vcpe_part))
+ logger.debug("csar_file:%s", csar_file)
+ vnfd_json = parse_vnfd(csar_file, input_parameters)
+ metadata = json.loads(vnfd_json).get("metadata")
+ logger.debug("dpdk metadata:%s", metadata)
+ self.assertEqual(("vCPE_%s" % vcpe_part), metadata.get("template_name", ""))
+
+ def test_pnfd_parse(self):
+ self.remove_temp_dir()
+ csar_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/pnf/ran-du.csar"
+ pnfd_json = parse_pnfd(csar_path)
+ pnfd_dict = json.loads(pnfd_json)
+ metadata = pnfd_dict.get("metadata")
+ self.assertEqual("RAN_DU", metadata.get("template_name", ""))
+ descriptor_id = pnfd_dict["pnf"]["properties"]["descriptor_id"]
+ self.assertEqual(1, descriptor_id)
+
+ def test_nsd_parse(self):
+ self.remove_temp_dir()
+ # ran_csar = os.path.dirname(os.path.abspath(__file__)) + "/testdata/ns/ran.csar"
+ # nsd_json = parse_nsd(ran_csar, [])
+ # logger.debug("NS ran json: %s" % nsd_json)
+ # metadata = json.loads(nsd_json).get("metadata")
+ # self.assertEqual("RAN-NS", metadata.get("nsd_name", ""))
+
+ def test_service_descriptor_parse(self):
+ self.remove_temp_dir()
+ service_test_csar = os.path.dirname(os.path.abspath(__file__)) + "/testdata/ns/service-vIMS.csar"
+ test_json = parse_nsd(service_test_csar, [])
+ logger.debug("service-vIMS json: %s" % test_json)
+ metadata = json.loads(test_json).get("metadata")
+ self.assertEqual("vIMS_v2", metadata.get("nsd_name", ""))
+
+ def remove_temp_dir(self):
+ tempdir = tempfile.gettempdir()
+ for dir in os.listdir(tempdir):
+ if dir.startswith("tmp"):
+ path = tempfile.tempdir + "/" + dir
+ if (not os.path.isfile(path)) and os.path.exists(path):
+ shutil.rmtree(tempfile.tempdir + "/" + dir)
+
+ def test_graph(self):
+ data = {
+ "cucp": [],
+ "du": [],
+ "vl_flat_net": ["cucp", "cuup"],
+ "vl_ext_net": ["cucp", "cuup"],
+ "cuup": []
+ }
+ graph = Graph(data)
+ self.assertEqual(['vl_ext_net', 'vl_flat_net'].sort(), graph.get_pre_nodes("cucp").sort())
diff --git a/catalog/pub/utils/toscaparser/vnfdmodel.py b/catalog/pub/utils/toscaparser/vnfdmodel.py
new file mode 100644
index 0000000..1ed0659
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/vnfdmodel.py
@@ -0,0 +1,48 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from catalog.pub.config.config import VNFD_SCHEMA_VERSION_DEFAULT
+from catalog.pub.utils.toscaparser.basemodel import BaseInfoModel
+from catalog.pub.utils.toscaparser.vnfdparser import CreateVnfdSOLParser
+
+
+logger = logging.getLogger(__name__)
+
+NFV_VNF_RELATIONSHIPS = [["tosca.relationships.nfv.VirtualLinksTo", "tosca.relationships.nfv.VduAttachesTo", "tosca.relationships.nfv.AttachesTo", "tosca.relationships.nfv.Vdu.AttachedTo", "tosca.relationships.DependsOn"],
+ ["tosca.nodes.relationships.VirtualBindsTo", "tosca.relationships.nfv.VirtualBindsTo"]]
+
+
+class EtsiVnfdInfoModel(BaseInfoModel):
+
+ def __init__(self, path, params):
+ self.vnf = {}
+ super(EtsiVnfdInfoModel, self).__init__(path, params)
+
+ def parseModel(self, tosca):
+ self.metadata = self.buildMetadata(tosca)
+ self.inputs = self.buildInputs(tosca)
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
+ self.basepath = self.get_base_path(tosca)
+ node_types = tosca.topology_template.custom_defs
+ sol_version = self.metadata.get("VNFD_SCHEMA_VERSION", VNFD_SCHEMA_VERSION_DEFAULT) if isinstance(self.metadata, dict) else VNFD_SCHEMA_VERSION_DEFAULT
+ vnfd_sol_parser = CreateVnfdSOLParser(sol_version, self)
+ self.vnf = vnfd_sol_parser.build_vnf(tosca)
+ self.volume_storages = vnfd_sol_parser.get_all_volume_storage(nodeTemplates, node_types)
+ self.vdus = vnfd_sol_parser.get_all_vdu(nodeTemplates, node_types)
+ self.vls = vnfd_sol_parser.get_all_vl(nodeTemplates, node_types)
+ self.cps = vnfd_sol_parser.get_all_cp(nodeTemplates, node_types)
+ self.vnf_exposed = vnfd_sol_parser.get_all_endpoint_exposed()
+ self.graph = self.get_deploy_graph(tosca, NFV_VNF_RELATIONSHIPS)
diff --git a/catalog/pub/utils/toscaparser/vnfdparser/__init__.py b/catalog/pub/utils/toscaparser/vnfdparser/__init__.py
new file mode 100644
index 0000000..911de2c
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/vnfdparser/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from catalog.pub.utils.toscaparser.vnfdparser.vnfd_sol_base import VnfdSOLBase
+from catalog.pub.utils.toscaparser.vnfdparser.vnfd_sol_251 import VnfdSOL251
+
+
+def CreateVnfdSOLParser(sol_version, etsi_vnfd_model):
+ switcher = {
+ "base": VnfdSOLBase(etsi_vnfd_model),
+ "2.5.1+1": VnfdSOL251(etsi_vnfd_model)
+ }
+ return switcher.get(sol_version, lambda: "Invalid Version")
diff --git a/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py
new file mode 100644
index 0000000..d1b0d14
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py
@@ -0,0 +1,300 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import base64
+
+
+logger = logging.getLogger(__name__)
+
+SECTIONS = (VDU_COMPUTE_TYPE, VNF_VL_TYPE, VDU_CP_TYPE, VDU_STORAGE_TYPE) = \
+ ('tosca.nodes.nfv.Vdu.Compute', 'tosca.nodes.nfv.VnfVirtualLink', 'tosca.nodes.nfv.VduCp', 'tosca.nodes.nfv.Vdu.VirtualStorage')
+
+
+class VnfdSOL251():
+
+ def __init__(self, model):
+ self.model = model
+
+ def build_vnf(self, tosca):
+ vnf = self.model.get_substitution_mappings(tosca)
+ properties = vnf.get("properties", {})
+ metadata = vnf.get("metadata", {})
+
+ for key, value in list(properties.items()):
+ if isinstance(value, dict):
+ if value["type"] == "string":
+ properties[key] = value.get("default", "")
+ elif value["type"] == "list":
+ properties[key] = value.get("default", {})
+ else:
+ properties[key] = value.get("default", "")
+ ptype = "descriptor_id"
+ meta_types = ["descriptor_id", "id", "UUID"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "descriptor_version"
+ meta_types = ["template_version", "version"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "provider"
+ meta_types = ["template_author", "provider"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "template_name"
+ meta_types = ["template_name"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "software_version"
+ meta_types = ["software_version"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "product_name"
+ meta_types = ["product_name"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "flavour_description"
+ meta_types = ["flavour_description"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "vnfm_info"
+ meta_types = ["vnfm_info"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "flavour_id"
+ meta_types = ["flavour_id"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ logger.debug("vnf:%s", vnf)
+
+ return vnf
+
+ def get_all_vl(self, nodeTemplates, node_types):
+ vls = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VNF_VL_TYPE):
+ vl = dict()
+ vl['vl_id'] = node['name']
+ vl['description'] = node['description']
+ vl['properties'] = node['properties']
+ vlp = vl['properties']
+ nodep = node['properties']
+ vlp['connectivity_type']['layer_protocol'] = nodep['connectivity_type']['layer_protocols'][0]
+ vlp['vl_profile']['max_bit_rate_requirements'] = nodep['vl_profile']['max_bitrate_requirements']
+ vlp['vl_profile']['min_bit_rate_requirements'] = nodep['vl_profile']['min_bitrate_requirements']
+ if 'virtual_link_protocol_data' in nodep['vl_profile']:
+ protocol_data = nodep['vl_profile']['virtual_link_protocol_data'][0]
+ vlp['vl_profile']['associated_layer_protocol'] = protocol_data['associated_layer_protocol']
+ if 'l3_protocol_data' in protocol_data:
+ l3 = protocol_data['l3_protocol_data']
+ vlp['vl_profile']['networkName'] = l3.get("name", "")
+ vlp['vl_profile']['cidr'] = l3.get("cidr", "")
+ vlp['vl_profile']['dhcpEnabled'] = l3.get("dhcp_enabled", "")
+ vlp['vl_profile']['ip_version'] = l3.get("ip_version", "")
+ if 'l2_protocol_data' in protocol_data:
+ l2 = protocol_data['l2_protocol_data']
+ vlp['vl_profile']['physicalNetwork'] = l2.get("physical_network", "")
+ vls.append(vl)
+ return vls
+
+ def get_all_cp(self, nodeTemplates, node_types):
+ cps = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VDU_CP_TYPE):
+ cp = {}
+ cp['cp_id'] = node['name']
+ cp['cpd_id'] = node['name']
+ cp['description'] = node['description']
+ cp['properties'] = {}
+ nodep = node['properties']
+ cp['properties']['trunk_mode'] = nodep.get("trunk_mode", "")
+ cp['properties']['layer_protocol'] = nodep.get("layer_protocols", "")
+ if 'vnic_type' in nodep:
+ cp['properties']['vnic_type'] = nodep.get("vnic_type", "normal")
+ if 'virtual_network_interface_requirements' in nodep:
+ cp['properties']['virtual_network_interface_requirements'] = nodep.get("virtual_network_interface_requirements", "")
+ if "protocol" in nodep:
+ node_protocol = nodep['protocol'][0]
+ cp['properties']['protocol_data'] = nodep['protocol']
+ cp_protocol = cp['properties']['protocol_data'][0]
+ cp_protocol['asscociated_layer_protocol'] = node_protocol['associated_layer_protocol']
+ if "address_data" in node_protocol:
+ cp_protocol['address_data'] = node_protocol['address_data'][0]
+
+ cp['vl_id'] = self._get_node_vl_id(node)
+ cp['vdu_id'] = self._get_node_vdu_id(node)
+ vls = self._buil_cp_vls(node)
+ if len(vls) > 1:
+ cp['vls'] = vls
+ cps.append(cp)
+ return cps
+
+ def get_all_volume_storage(self, nodeTemplates, node_types):
+ rets = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VDU_STORAGE_TYPE):
+ ret = {}
+ ret['volume_storage_id'] = node['name']
+ if 'description' in node:
+ ret['description'] = node['description']
+ ret['properties'] = node['properties']
+ rets.append(ret)
+ return rets
+
+ def get_all_vdu(self, nodeTemplates, node_types):
+ rets = []
+ inject_files = []
+ for node in nodeTemplates:
+ logger.debug("nodeTemplates :%s", node)
+ if self.model.isNodeTypeX(node, node_types, VDU_COMPUTE_TYPE):
+ ret = {}
+ ret['vdu_id'] = node['name']
+ ret['type'] = node['nodeType']
+ if 'description' in node:
+ ret['description'] = node['description']
+ ret['properties'] = node['properties']
+ if 'boot_data' in node['properties']:
+ ret['properties']['user_data'] = node['properties']['boot_data']
+ del ret['properties']['boot_data']
+ if 'inject_files' in node['properties']:
+ inject_files = node['properties']['inject_files']
+ if inject_files is not None:
+ if isinstance(inject_files, list):
+ for inject_file in inject_files:
+ source_path = os.path.join(self.model.basepath, inject_file['source_path'])
+ with open(source_path, "rb") as f:
+ source_data = f.read()
+ source_data_base64 = base64.b64encode(source_data)
+ inject_file["source_data_base64"] = source_data_base64.decode()
+ if isinstance(inject_files, dict):
+ source_path = os.path.join(self.model.basepath, inject_files['source_path'])
+ with open(source_path, "rb") as f:
+ source_data = f.read()
+ source_data_base64 = base64.b64encode(source_data)
+ inject_files["source_data_base64"] = source_data_base64.decode()
+ ret['dependencies'] = [self.model.get_requirement_node_name(x) for x in self.model.getNodeDependencys(node)]
+ virtual_compute = self.model.getCapabilityByName(node, 'virtual_compute')
+ if virtual_compute is not None and 'properties' in virtual_compute:
+ vc = {}
+ vc['virtual_cpu'] = virtual_compute['properties']['virtual_cpu']
+ vc['virtual_memory'] = virtual_compute['properties']['virtual_memory']
+ vc['virtual_storages'] = virtual_compute['properties'].get("virtual_local_storage", {})
+ ret['virtual_compute'] = vc
+ ret['vls'] = self._get_linked_vl_ids(node, nodeTemplates)
+ ret['cps'] = self._get_virtal_binding_cp_ids(node, nodeTemplates)
+ ret['artifacts'] = self.model.build_artifacts(node)
+ rets.append(ret)
+ logger.debug("rets:%s", rets)
+ return rets
+
+ def get_all_endpoint_exposed(self):
+ if self.model.vnf:
+ external_cps = self._get_external_cps(self.model.vnf.get('requirements', None))
+ forward_cps = self._get_forward_cps(self.model.vnf.get('capabilities', None))
+ return {"external_cps": external_cps, "forward_cps": forward_cps}
+ return {}
+
+ def _get_property(self, properties, metadata, ptype, meta_types):
+ if ptype not in properties or properties[ptype] == "":
+ for mtype in meta_types:
+ data = metadata.get(mtype, "")
+ if data != "":
+ properties[ptype] = data
+
+ def _trans_virtual_storage(self, virtual_storage):
+ if isinstance(virtual_storage, str):
+ return {"virtual_storage_id": virtual_storage}
+ else:
+ ret = {}
+ ret['virtual_storage_id'] = self.model.get_requirement_node_name(virtual_storage)
+ return ret
+
+ def _get_linked_vl_ids(self, node, node_templates):
+ vl_ids = []
+ cps = self._get_virtal_binding_cps(node, node_templates)
+ for cp in cps:
+ vl_reqs = self.model.getRequirementByName(cp, 'virtual_link')
+ for vl_req in vl_reqs:
+ vl_ids.append(self.model.get_requirement_node_name(vl_req))
+ return vl_ids
+
+ def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
+ return [x['name'] for x in self._get_virtal_binding_cps(node, nodeTemplates)]
+
+ def _get_virtal_binding_cps(self, node, nodeTemplates):
+ cps = []
+ for tmpnode in nodeTemplates:
+ if 'requirements' in tmpnode:
+ for item in tmpnode['requirements']:
+ for key, value in list(item.items()):
+ if key.upper().startswith('VIRTUAL_BINDING'):
+ req_node_name = self.model.get_requirement_node_name(value)
+ if req_node_name is not None and req_node_name == node['name']:
+ cps.append(tmpnode)
+ return cps
+
+ def _get_node_vdu_id(self, node):
+ vdu_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_binding')]
+ if len(vdu_ids) > 0:
+ return vdu_ids[0]
+ return ""
+
+ def _get_node_vl_id(self, node):
+ vl_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
+ if len(vl_ids) > 0:
+ return vl_ids[0]
+ return ""
+
+ def _buil_cp_vls(self, node):
+ return [self._build_cp_vl(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
+
+ def _build_cp_vl(self, req):
+ cp_vl = {}
+ cp_vl['vl_id'] = self.model.get_prop_from_obj(req, 'node')
+ relationship = self.model.get_prop_from_obj(req, 'relationship')
+ if relationship is not None:
+ properties = self.model.get_prop_from_obj(relationship, 'properties')
+ if properties is not None and isinstance(properties, dict):
+ for key, value in list(properties.items()):
+ cp_vl[key] = value
+ return cp_vl
+
+ def _get_external_cps(self, vnf_requirements):
+ external_cps = []
+ if vnf_requirements:
+ if isinstance(vnf_requirements, dict):
+ for key, value in list(vnf_requirements.items()):
+ if isinstance(value, list) and len(value) > 0:
+ external_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ external_cps.append({"key_name": key, "cpd_id": value})
+ elif isinstance(vnf_requirements, list):
+ for vnf_requirement in vnf_requirements:
+ for key, value in list(vnf_requirement.items()):
+ if isinstance(value, list) and len(value) > 0:
+ external_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ external_cps.append({"key_name": key, "cpd_id": value})
+ return external_cps
+
+ def _get_forward_cps(self, vnf_capabilities):
+ forward_cps = []
+ if vnf_capabilities:
+ for key, value in list(vnf_capabilities.items()):
+ if isinstance(value, list) and len(value) > 0:
+ forward_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ forward_cps.append({"key_name": key, "cpd_id": value})
+ return forward_cps
diff --git a/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py
new file mode 100644
index 0000000..7b3a1a0
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py
@@ -0,0 +1,236 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+import os
+import base64
+
+
+logger = logging.getLogger(__name__)
+
+SECTIONS = (VDU_COMPUTE_TYPE, VNF_VL_TYPE, VDU_CP_TYPE, VDU_STORAGE_TYPE) = \
+ ('tosca.nodes.nfv.Vdu.Compute', 'tosca.nodes.nfv.VnfVirtualLink', 'tosca.nodes.nfv.VduCp', 'tosca.nodes.nfv.Vdu.VirtualStorage')
+
+
+class VnfdSOLBase():
+
+ def __init__(self, model):
+ self.model = model
+
+ def build_vnf(self, tosca):
+ vnf = self.model.get_substitution_mappings(tosca)
+ properties = vnf.get("properties", {})
+ metadata = vnf.get("metadata", {})
+ if properties.get("descriptor_id", "") == "":
+ descriptor_id = metadata.get("descriptor_id", "")
+ if descriptor_id == "":
+ descriptor_id = metadata.get("id", "")
+ if descriptor_id == "":
+ descriptor_id = metadata.get("UUID", "")
+ properties["descriptor_id"] = descriptor_id
+
+ if properties.get("descriptor_version", "") == "":
+ version = metadata.get("template_version", "")
+ if version == "":
+ version = metadata.get("version", "")
+ properties["descriptor_version"] = version
+
+ if properties.get("provider", "") == "":
+ provider = metadata.get("template_author", "")
+ if provider == "":
+ provider = metadata.get("provider", "")
+ properties["provider"] = provider
+
+ if properties.get("template_name", "") == "":
+ template_name = metadata.get("template_name", "")
+ if template_name == "":
+ template_name = metadata.get("template_name", "")
+ properties["template_name"] = template_name
+ logger.debug("vnf:%s", vnf)
+ return vnf
+
+ def get_all_vl(self, nodeTemplates, node_types):
+ vls = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VNF_VL_TYPE):
+ vl = dict()
+ vl['vl_id'] = node['name']
+ vl['description'] = node['description']
+ vl['properties'] = node['properties']
+ vls.append(vl)
+ return vls
+
+ def get_all_cp(self, nodeTemplates, node_types):
+ cps = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VDU_CP_TYPE):
+ cp = {}
+ cp['cp_id'] = node['name']
+ cp['cpd_id'] = node['name']
+ cp['description'] = node['description']
+ cp['properties'] = node['properties']
+ cp['vl_id'] = self._get_node_vl_id(node)
+ cp['vdu_id'] = self._get_node_vdu_id(node)
+ vls = self._buil_cp_vls(node)
+ if len(vls) > 1:
+ cp['vls'] = vls
+ cps.append(cp)
+ return cps
+
+ def get_all_volume_storage(self, nodeTemplates, node_types):
+ rets = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VDU_STORAGE_TYPE):
+ ret = {}
+ ret['volume_storage_id'] = node['name']
+ if 'description' in node:
+ ret['description'] = node['description']
+ ret['properties'] = node['properties']
+ rets.append(ret)
+ return rets
+
+ def get_all_vdu(self, nodeTemplates, node_types):
+ rets = []
+ inject_files = []
+ for node in nodeTemplates:
+ logger.debug("nodeTemplates :%s", node)
+ if self.model.isNodeTypeX(node, node_types, VDU_COMPUTE_TYPE):
+ ret = {}
+ ret['vdu_id'] = node['name']
+ ret['type'] = node['nodeType']
+ if 'description' in node:
+ ret['description'] = node['description']
+ ret['properties'] = node['properties']
+ if 'inject_files' in node['properties']:
+ inject_files = node['properties']['inject_files']
+ if inject_files is not None:
+ if isinstance(inject_files, list):
+ for inject_file in inject_files:
+ source_path = os.path.join(self.model.basepath, inject_file['source_path'])
+ with open(source_path, "rb") as f:
+ source_data = f.read()
+ source_data_base64 = base64.b64encode(source_data)
+ inject_file["source_data_base64"] = source_data_base64.decode()
+ if isinstance(inject_files, dict):
+ source_path = os.path.join(self.model.basepath, inject_files['source_path'])
+ with open(source_path, "rb") as f:
+ source_data = f.read()
+ source_data_base64 = base64.b64encode(source_data)
+ inject_files["source_data_base64"] = source_data_base64.decode()
+ virtual_storages = self.model.getRequirementByName(node, 'virtual_storage')
+ ret['virtual_storages'] = list(map(functools.partial(self._trans_virtual_storage), virtual_storages))
+ ret['dependencies'] = [self.model.get_requirement_node_name(x) for x in self.model.getNodeDependencys(node)]
+ virtual_compute = self.model.getCapabilityByName(node, 'virtual_compute')
+ if virtual_compute is not None and 'properties' in virtual_compute:
+ ret['virtual_compute'] = virtual_compute['properties']
+ ret['vls'] = self._get_linked_vl_ids(node, nodeTemplates)
+ ret['cps'] = self._get_virtal_binding_cp_ids(node, nodeTemplates)
+ ret['artifacts'] = self.model.build_artifacts(node)
+ rets.append(ret)
+ logger.debug("rets:%s", rets)
+ return rets
+
+ def get_all_endpoint_exposed(self):
+ if self.model.vnf:
+ external_cps = self._get_external_cps(self.model.vnf.get('requirements', None))
+ forward_cps = self._get_forward_cps(self.model.vnf.get('capabilities', None))
+ return {"external_cps": external_cps, "forward_cps": forward_cps}
+ return {}
+
+ def _trans_virtual_storage(self, virtual_storage):
+ if isinstance(virtual_storage, str):
+ return {"virtual_storage_id": virtual_storage}
+ else:
+ ret = {}
+ ret['virtual_storage_id'] = self.model.get_requirement_node_name(virtual_storage)
+ return ret
+
+ def _get_linked_vl_ids(self, node, node_templates):
+ vl_ids = []
+ cps = self._get_virtal_binding_cps(node, node_templates)
+ for cp in cps:
+ vl_reqs = self.model.getRequirementByName(cp, 'virtual_link')
+ for vl_req in vl_reqs:
+ vl_ids.append(self.model.get_requirement_node_name(vl_req))
+ return vl_ids
+
+ def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
+ return [x['name'] for x in self._get_virtal_binding_cps(node, nodeTemplates)]
+
+ def _get_virtal_binding_cps(self, node, nodeTemplates):
+ cps = []
+ for tmpnode in nodeTemplates:
+ if 'requirements' in tmpnode:
+ for item in tmpnode['requirements']:
+ for key, value in list(item.items()):
+ if key.upper().startswith('VIRTUAL_BINDING'):
+ req_node_name = self.model.get_requirement_node_name(value)
+ if req_node_name is not None and req_node_name == node['name']:
+ cps.append(tmpnode)
+ return cps
+
+ def _get_node_vdu_id(self, node):
+ vdu_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_binding')]
+ if len(vdu_ids) > 0:
+ return vdu_ids[0]
+ return ""
+
+ def _get_node_vl_id(self, node):
+ vl_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
+ if len(vl_ids) > 0:
+ return vl_ids[0]
+ return ""
+
+ def _buil_cp_vls(self, node):
+ return [self._build_cp_vl(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
+
+ def _build_cp_vl(self, req):
+ cp_vl = {}
+ cp_vl['vl_id'] = self.model.get_prop_from_obj(req, 'node')
+ relationship = self.model.get_prop_from_obj(req, 'relationship')
+ if relationship is not None:
+ properties = self.model.get_prop_from_obj(relationship, 'properties')
+ if properties is not None and isinstance(properties, dict):
+ for key, value in list(properties.items()):
+ cp_vl[key] = value
+ return cp_vl
+
+ def _get_external_cps(self, vnf_requirements):
+ external_cps = []
+ if vnf_requirements:
+ if isinstance(vnf_requirements, dict):
+ for key, value in list(vnf_requirements.items()):
+ if isinstance(value, list) and len(value) > 0:
+ external_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ external_cps.append({"key_name": key, "cpd_id": value})
+ elif isinstance(vnf_requirements, list):
+ for vnf_requirement in vnf_requirements:
+ for key, value in list(vnf_requirement.items()):
+ if isinstance(value, list) and len(value) > 0:
+ external_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ external_cps.append({"key_name": key, "cpd_id": value})
+ return external_cps
+
+ def _get_forward_cps(self, vnf_capabilities):
+ forward_cps = []
+ if vnf_capabilities:
+ for key, value in list(vnf_capabilities.items()):
+ if isinstance(value, list) and len(value) > 0:
+ forward_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ forward_cps.append({"key_name": key, "cpd_id": value})
+ return forward_cps
diff --git a/catalog/pub/utils/values.py b/catalog/pub/utils/values.py
new file mode 100644
index 0000000..d02d544
--- /dev/null
+++ b/catalog/pub/utils/values.py
@@ -0,0 +1,33 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def ignore_case_get(args, key, def_val=""):
+ if not key:
+ return def_val
+ if key in args:
+ return args[key]
+ for old_key in args:
+ if old_key.upper() == key.upper():
+ return args[old_key]
+ return def_val
+
+
+def remove_none_key(data, none_list=None):
+ none_list = none_list if none_list else [None, '', 'NULL', 'None', [], {}]
+ if isinstance(data, dict):
+ data = dict([(k, remove_none_key(v, none_list)) for k, v in list(data.items()) if v not in none_list])
+ if isinstance(data, list):
+ data = [remove_none_key(s, none_list) for s in data if s not in none_list]
+ return data
diff --git a/catalog/samples/__init__.py b/catalog/samples/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/samples/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/samples/tests.py b/catalog/samples/tests.py
new file mode 100644
index 0000000..2be964a
--- /dev/null
+++ b/catalog/samples/tests.py
@@ -0,0 +1,45 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import json
+from django.test import Client
+from rest_framework import status
+
+
+class SampleViewTest(unittest.TestCase):
+ def setUp(self):
+ self.client = Client()
+
+ def tearDown(self):
+ pass
+
+ def test_sample(self):
+
+ response = self.client.get("/samples/")
+ self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+ resp_data = json.loads(response.content)
+ self.assertEqual({"status": "active"}, resp_data)
+
+
+class CallbackSampleTest(unittest.TestCase):
+ def setUp(self):
+ self.client = Client()
+
+ def tearDown(self):
+ pass
+
+ def test_callback(self):
+ response = self.client.get("/api/catalog/v1/callback_sample")
+ self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code, response.content)
diff --git a/catalog/samples/urls.py b/catalog/samples/urls.py
new file mode 100644
index 0000000..8e3483c
--- /dev/null
+++ b/catalog/samples/urls.py
@@ -0,0 +1,22 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import url
+from catalog.samples import views
+
+urlpatterns = [
+ url(r'^api/catalog/v1/mandb/(?P<modelName>[a-zA-Z\-]+)$', views.TablesList.as_view()),
+ url(r'^api/catalog/v1/callback_sample$', views.CallbackSample.as_view()),
+ url(r'^samples/$', views.SampleList.as_view())
+]
diff --git a/catalog/samples/views.py b/catalog/samples/views.py
new file mode 100644
index 0000000..006f0e5
--- /dev/null
+++ b/catalog/samples/views.py
@@ -0,0 +1,66 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import traceback
+
+from rest_framework import status
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+logger = logging.getLogger(__name__)
+
+
+class SampleList(APIView):
+ """
+ List all samples.
+ """
+ def get(self, request, format=None):
+ logger.debug("get")
+ return Response({"status": "active"})
+
+
+class CallbackSample(APIView):
+ """
+ Callback Sample.
+ """
+ def get(self, request, format=None):
+ logger.debug("Callback Sample")
+ return Response(data={}, status=status.HTTP_204_NO_CONTENT)
+
+
+class TablesList(APIView):
+ def delete(self, request, modelName):
+ logger.debug("Start delete model %s", modelName)
+ try:
+ modelNames = modelName.split("-")
+ for name in modelNames:
+ model_obj = eval("models.%s.objects" % name)
+ model_obj.filter().delete()
+ logger.debug("End delete model %s", name)
+ except:
+ logger.error(traceback.format_exc())
+ return Response(data={"error": "failed"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ return Response(data={}, status=status.HTTP_204_NO_CONTENT)
+
+ def get(self, request, modelName):
+ logger.debug("Get model %s", modelName)
+ count = 0
+ try:
+ model_obj = eval("models.%s.objects" % modelName)
+ count = len(model_obj.filter())
+ except:
+ logger.error(traceback.format_exc())
+ return Response(data={"error": "failed"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+ return Response(data={"count": count}, status=status.HTTP_200_OK)
diff --git a/catalog/settings.py b/catalog/settings.py
new file mode 100644
index 0000000..51c9a88
--- /dev/null
+++ b/catalog/settings.py
@@ -0,0 +1,197 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import platform
+
+import catalog.pub.redisco
+
+from catalog.pub.config.config import REDIS_HOST, REDIS_PORT, REDIS_PASSWD
+from catalog.pub.config.config import DB_NAME, DB_IP, DB_USER, DB_PASSWD, DB_PORT
+from catalog.pub.config import config as pub_config
+from logging import config as log_config
+from onaplogging import monkey
+monkey.patch_all()
+
+# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Quick-start development settings - unsuitable for production
+# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
+
+# SECURITY WARNING: keep the secret key used in production secret!
+SECRET_KEY = '3o-wney!99y)^h3v)0$j16l9=fdjxcb+a8g+q3tfbahcnu2b0o'
+
+# SECURITY WARNING: don't run with debug turned on in production!
+DEBUG = True
+
+ALLOWED_HOSTS = ['*']
+
+# Application definition
+
+INSTALLED_APPS = [
+ 'django.contrib.auth',
+ 'django.contrib.contenttypes',
+ 'django.contrib.sessions',
+ 'django.contrib.messages',
+ 'django.contrib.staticfiles',
+ 'django.contrib.admin',
+ 'rest_framework',
+ 'catalog.pub.database',
+ 'catalog.samples',
+ 'catalog.swagger',
+ 'drf_yasg',
+]
+
+# drf-yasg
+SWAGGER_SETTINGS = {
+ 'LOGIN_URL': '/admin/login',
+ 'LOGOUT_URL': '/admin/logout',
+ 'DEFAULT_INFO': 'catalog.swagger.urls.swagger_info'
+}
+
+TEMPLATES = [
+ {
+ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
+ 'DIRS': [],
+ 'APP_DIRS': True,
+ 'OPTIONS': {
+ 'context_processors': [
+ 'django.template.context_processors.debug',
+ 'django.template.context_processors.request',
+ 'django.contrib.auth.context_processors.auth',
+ 'django.contrib.messages.context_processors.messages',
+ ],
+ },
+ },
+]
+
+MIDDLEWARE_CLASSES = [
+ 'django.middleware.security.SecurityMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.common.CommonMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ 'django.middleware.clickjacking.XFrameOptionsMiddleware',
+ 'catalog.middleware.LogContextMiddleware',
+]
+
+ROOT_URLCONF = 'catalog.urls'
+
+WSGI_APPLICATION = 'catalog.wsgi.application'
+
+REST_FRAMEWORK = {
+ 'DEFAULT_RENDERER_CLASSES': (
+ 'rest_framework.renderers.JSONRenderer',
+ ),
+
+ 'DEFAULT_PARSER_CLASSES': (
+ 'rest_framework.parsers.JSONParser',
+ 'rest_framework.parsers.MultiPartParser',
+ )
+}
+
+DATABASES = {
+ 'default': {
+ 'ENGINE': 'django.db.backends.mysql',
+ 'NAME': DB_NAME,
+ 'HOST': DB_IP,
+ 'PORT': DB_PORT,
+ 'USER': DB_USER,
+ 'PASSWORD': DB_PASSWD,
+ },
+}
+
+catalog.pub.redisco.connection_setup(host=REDIS_HOST, port=REDIS_PORT, password=REDIS_PASSWD, db=0)
+# CACHE_BACKEND = 'redis_cache.cache://%s@%s:%s' % (REDIS_PASSWD, REDIS_HOST, REDIS_PORT)
+
+TIME_ZONE = 'UTC'
+
+# Static files (CSS, JavaScript, Images)
+# https://docs.djangoproject.com/en/1.6/howto/static-files/
+
+STATIC_URL = '/static/'
+
+STATICFILES_DIRS = [
+ os.path.join(BASE_DIR, "static")
+]
+
+pub_config.CATALOG_ROOT_PATH = os.path.join(STATICFILES_DIRS[0], "catalog")
+pub_config.CATALOG_URL_PATH = "static/catalog"
+pub_config.SDC_BASE_URL = "http://%s:%s/api" % (pub_config.MSB_SERVICE_IP, pub_config.MSB_SERVICE_PORT)
+
+if platform.system() == 'Windows' or 'test' in sys.argv:
+ LOGGING = {
+ 'version': 1,
+ 'disable_existing_loggers': True,
+ 'formatters': {
+ 'standard': {
+ 'format': '%(asctime)s:[%(name)s]:[%(filename)s]-[%(lineno)d] [%(levelname)s]:%(message)s',
+ },
+ },
+ 'filters': {
+ },
+ 'handlers': {
+ 'catalog_handler': {
+ 'level': 'DEBUG',
+ 'class': 'logging.handlers.RotatingFileHandler',
+ 'filename': os.path.join(BASE_DIR, 'logs/runtime_catalog.log'),
+ 'formatter': 'standard',
+ 'maxBytes': 1024 * 1024 * 50,
+ 'backupCount': 5,
+ },
+ },
+
+ 'loggers': {
+ 'catalog': {
+ 'handlers': ['catalog_handler'],
+ 'level': 'DEBUG',
+ 'propagate': False
+ },
+ 'tosca': {
+ 'handlers': ['catalog_handler'],
+ 'level': 'DEBUG',
+ 'propagate': False
+ },
+ }
+ }
+else:
+ LOGGING_CONFIG = None
+ # yaml configuration of logging
+ LOGGING_FILE = os.path.join(BASE_DIR, 'catalog/log.yml')
+ log_config.yamlConfig(filepath=LOGGING_FILE, watchDog=True)
+
+if 'test' in sys.argv:
+ pub_config.REG_TO_MSB_WHEN_START = False
+
+ DATABASES = {}
+ DATABASES['default'] = {
+ 'ENGINE': 'django.db.backends.sqlite3',
+ 'NAME': ':memory:',
+ }
+ REST_FRAMEWORK = {}
+
+ if platform.system() == 'Linux':
+ TEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner'
+ TEST_OUTPUT_VERBOSE = True
+ TEST_OUTPUT_DESCRIPTIONS = True
+ TEST_OUTPUT_DIR = 'test-reports'
+
+ import mock
+ from catalog.pub.utils import idutil
+ idutil.get_auto_id = mock.Mock()
+ idutil.get_auto_id.return_value = 1
diff --git a/catalog/swagger/__init__.py b/catalog/swagger/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/swagger/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/swagger/management/__init__.py b/catalog/swagger/management/__init__.py
new file mode 100644
index 0000000..342c2a8
--- /dev/null
+++ b/catalog/swagger/management/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/swagger/management/commands/__init__.py b/catalog/swagger/management/commands/__init__.py
new file mode 100644
index 0000000..342c2a8
--- /dev/null
+++ b/catalog/swagger/management/commands/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/swagger/management/commands/export_swagger.py b/catalog/swagger/management/commands/export_swagger.py
new file mode 100644
index 0000000..bc5fd1a
--- /dev/null
+++ b/catalog/swagger/management/commands/export_swagger.py
@@ -0,0 +1,36 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+
+from django.core.management.base import BaseCommand
+from django.test import Client
+
+
+class Command(BaseCommand):
+ def add_arguments(self, parser):
+ parser.add_argument(
+ '-f',
+ '--name',
+ action='store',
+ dest='name',
+ default='swagger.json',
+ help='name of swagger file.',
+ )
+
+ def handle(self, *args, **options):
+ self.client = Client()
+ response = self.client.get("/api/catalog/v1/swagger.json")
+ with open(options['name'], 'w') as swagger_file:
+ swagger_file.write(json.dumps(response.data))
+ print("swagger api is written to %s" % options['name'])
diff --git a/catalog/swagger/tests.py b/catalog/swagger/tests.py
new file mode 100644
index 0000000..fc51b62
--- /dev/null
+++ b/catalog/swagger/tests.py
@@ -0,0 +1,28 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+
+import unittest
+
+from django.test import Client
+from rest_framework import status
+
+
+class SwaggerViewTest(unittest.TestCase):
+ def setUp(self):
+ self.client = Client()
+
+ def tearDown(self):
+ pass
+
+ def test_swagger(self):
+ response = self.client.get("/api/catalog/v1/swagger.json")
+ self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+ self.assertEqual("2.0", response.data.get("swagger"))
diff --git a/catalog/swagger/urls.py b/catalog/swagger/urls.py
new file mode 100644
index 0000000..5437ee5
--- /dev/null
+++ b/catalog/swagger/urls.py
@@ -0,0 +1,43 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import url
+from drf_yasg import openapi
+from drf_yasg.views import get_schema_view
+from rest_framework import permissions
+
+# Add code for generating swagger automatically.
+swagger_info = openapi.Info(
+ title="VFC Catalog API",
+ default_version='v1',
+ description="""
+
+The `swagger-ui` view can be found [here](/api/catalog/v1/swagger).
+The `ReDoc` view can be found [here](/api/catalog/v1/redoc).
+The swagger YAML document can be found [here](/api/catalog/v1/swagger.yaml).
+The swagger JSON document can be found [here](/api/catalog/v1/swagger.json)."""
+)
+
+SchemaView = get_schema_view(
+ validators=['ssv', 'flex'],
+ public=True,
+ permission_classes=(permissions.AllowAny,),
+)
+
+urlpatterns = [
+ # url(r'^api/catalog/v1/swagger.json$', SwaggerJsonView.as_view()),
+ url(r'^api/catalog/v1/swagger(?P<format>.json|.yaml)$', SchemaView.without_ui(cache_timeout=0), name='schema-json'),
+ url(r'^api/catalog/v1/swagger$', SchemaView.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
+ url(r'^api/catalog/v1/redoc$', SchemaView.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
+]
diff --git a/catalog/swagger/vfc.catalog.swagger.json b/catalog/swagger/vfc.catalog.swagger.json
new file mode 100644
index 0000000..1327462
--- /dev/null
+++ b/catalog/swagger/vfc.catalog.swagger.json
@@ -0,0 +1,793 @@
+{
+ "swagger": "2.0",
+ "info": {
+ "version": "1.0.0",
+ "title": "ONAP VFC Catalog Rest API",
+ "description": "VFC Catalog Management API.",
+ "contact": {
+ "name": "ONAP VFC team",
+ "email": "onap-discuss@lists.onap.org",
+ "url": "https://gerrit.onap.org/r/#/admin/projects/vfc/nfvo/catalog"
+ }
+ },
+ "basePath": "/api/catalog/v1",
+ "schemes": [
+ "http",
+ "https"
+ ],
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "paths": {
+ "/nspackages": {
+ "get": {
+ "tags": [
+ "nspackage"
+ ],
+ "summary": "query ns packages info",
+ "description": "query ns packages info",
+ "operationId": "query_ns_packages",
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "$ref": "#/definitions/NsPkgListInfo"
+ }
+ },
+ "404": {
+ "description": "URL not found"
+ },
+ "500": {
+ "description": "internal error"
+ }
+ }
+ },
+ "post": {
+ "tags": [
+ "nspackage"
+ ],
+ "summary": "ns package distribute",
+ "description": "ns package distribute",
+ "operationId": "ns_pkg_distribute",
+ "parameters": [
+ {
+ "in": "body",
+ "name": "body",
+ "description": "distribute request param",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/NsPkgDistributeRequest"
+ }
+ }
+ ],
+ "responses": {
+ "202": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/NsPkgDistributeResponse"
+ }
+ },
+ "404": {
+ "description": "URL not found"
+ },
+ "500": {
+ "description": "internal error"
+ }
+ }
+ }
+ },
+ "/nspackages/{csarId}": {
+ "get": {
+ "tags": [
+ "nspackage"
+ ],
+ "summary": "query ns package info",
+ "description": "query ns package info via ns package csarId",
+ "operationId": "query_ns_package",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "csarId",
+ "in": "path",
+ "description": "csar id of ns package",
+ "required": true,
+ "type": "string"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "$ref": "#/definitions/NsPkgDetailInfo"
+ }
+ },
+ "404": {
+ "description": "URL not found"
+ },
+ "500": {
+ "description": "internal error"
+ }
+ }
+ },
+ "delete": {
+ "tags": [
+ "nspackage"
+ ],
+ "summary": "delete ns pkg",
+ "description": "delete ns pkg",
+ "operationId": "delete_ns_pkg",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "csarId",
+ "in": "path",
+ "description": "csar id of ns package",
+ "required": true,
+ "type": "string"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Delete NS Package Response",
+ "schema": {
+ "$ref": "#/definitions/NsPkgDelResponse"
+ }
+ },
+ "404": {
+ "description": "URL not found"
+ },
+ "500": {
+ "description": "internal error"
+ }
+ }
+ }
+ },
+ "/parsernsd": {
+ "post": {
+ "tags": [
+ "model"
+ ],
+ "summary": "ns package model",
+ "description": "ns package model",
+ "operationId": "ms_model_parser",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "in": "body",
+ "name": "body",
+ "description": "distribute request param",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/modelParserRequest"
+ }
+ }
+ ],
+ "responses": {
+ "202": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/modelParserResponse"
+ }
+ },
+ "404": {
+ "description": "URL not found"
+ },
+ "500": {
+ "description": "internal error"
+ }
+ }
+ }
+ },
+ "/vnfpackages": {
+ "get": {
+ "tags": [
+ "vnfpackage"
+ ],
+ "summary": "query vnf packages info",
+ "description": "query vnf packages info",
+ "operationId": "query_vnf_packages",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "$ref": "#/definitions/VnfPkgListInfo"
+ }
+ },
+ "404": {
+ "description": "URL not found"
+ },
+ "500": {
+ "description": "internal error"
+ }
+ }
+ },
+ "post": {
+ "tags": [
+ "vnfpackage"
+ ],
+ "summary": "vnf package distribute",
+ "description": "vnf package distribute",
+ "operationId": "vnf_pkg_distribute",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "in": "body",
+ "name": "body",
+ "description": "distribute request param",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/VnfPkgDistributeRequest"
+ }
+ }
+ ],
+ "responses": {
+ "202": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/VnfPkgDistributeResponse"
+ }
+ },
+ "404": {
+ "description": "URL not found"
+ },
+ "500": {
+ "description": "internal error"
+ }
+ }
+ }
+ },
+ "/vnfpackages/{csarId}": {
+ "get": {
+ "tags": [
+ "vnfpackage"
+ ],
+ "summary": "query vnf package info",
+ "description": "query one vnf package info via vnf package csarId",
+ "operationId": "query_vnf_package",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "csarId",
+ "in": "path",
+ "description": "csar id of vnf package",
+ "required": true,
+ "type": "string"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "successful operation",
+ "schema": {
+ "$ref": "#/definitions/VnfPkgDetailInfo"
+ }
+ },
+ "404": {
+ "description": "URL not found"
+ },
+ "500": {
+ "description": "internal error"
+ }
+ }
+ },
+ "delete": {
+ "tags": [
+ "vnfpackage"
+ ],
+ "summary": "delete vnf package",
+ "description": "delete vnf package",
+ "operationId": "delete_vnf_package",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "name": "csarId",
+ "in": "path",
+ "description": "csar id of vnf package",
+ "required": true,
+ "type": "string"
+ }
+ ],
+ "responses": {
+ "202": {
+ "description": "Delete VNF Pakcage Response",
+ "schema": {
+ "$ref": "#/definitions/VnfPkgDelResponse"
+ }
+ },
+ "404": {
+ "description": "URL not found"
+ },
+ "500": {
+ "description": "internal error"
+ }
+ }
+ }
+ },
+ "/parservnfd": {
+ "post": {
+ "tags": [
+ "model"
+ ],
+ "summary": "vnf package model",
+ "description": "vnf package model",
+ "operationId": "vnf_model_parser",
+ "consumes": [
+ "application/json"
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "parameters": [
+ {
+ "in": "body",
+ "name": "body",
+ "description": "distribute request param",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/modelParserRequest"
+ }
+ }
+ ],
+ "responses": {
+ "202": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/modelParserResponse"
+ }
+ },
+ "404": {
+ "description": "URL not found"
+ },
+ "500": {
+ "description": "internal error"
+ }
+ }
+ }
+ },
+ "/jobs/{jobId}": {
+ "get": {
+ "tags": [
+ "job"
+ ],
+ "summary": "jobstatus",
+ "description": "Get Job Status",
+ "operationId": "get_jobstatus",
+ "parameters": [
+ {
+ "required": true,
+ "type": "string",
+ "description": "job Id",
+ "name": "jobId",
+ "in": "path"
+ },
+ {
+ "required": true,
+ "type": "string",
+ "description": "job response message id",
+ "name": "responseId",
+ "in": "query"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/JobDetailInfo"
+ }
+ }
+ }
+ },
+ "post": {
+ "tags": [
+ "job"
+ ],
+ "summary": "Update Job Status",
+ "description": "Update Job Status",
+ "operationId": "post_jobstatus",
+ "parameters": [
+ {
+ "required": true,
+ "type": "string",
+ "description": "job Id",
+ "name": "jobId",
+ "in": "path"
+ },
+ {
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/PostJobRequest"
+ },
+ "description": "job status",
+ "name": "responseId",
+ "in": "body"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/JobDetailInfo"
+ }
+ }
+ }
+ }
+ }
+ },
+ "definitions": {
+ "NsPkgDistributeRequest": {
+ "type": "object",
+ "properties": {
+ "csarId": {
+ "type": "string",
+ "description": "network service package id, UUID"
+ }
+ }
+ },
+ "NsPkgDistributeResponse": {
+ "type": "object",
+ "properties": {
+ "status": {
+ "type": "string",
+ "description": "Operation status. value is success or failed"
+ },
+ "statusDescription": {
+ "type": "string",
+ "description": "description about the operation result"
+ },
+ "errorCode": {
+ "type": "string",
+ "description": "If the status is failed, the errorcode will be returned"
+ }
+ }
+ },
+ "NsPkgDelResponse": {
+ "type": "object",
+ "properties": {
+ "status": {
+ "type": "string",
+ "description": "Operation status. value is success or failed"
+ },
+ "statusDescription": {
+ "type": "string",
+ "description": "description about the operation result"
+ },
+ "errorCode": {
+ "type": "string",
+ "description": "If the status is failed, the errorcode will be returned"
+ }
+ }
+ },
+ "NsPkgListInfo": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/NsPkgDetailInfo"
+ }
+ },
+ "NsPkgDetailInfo": {
+ "type": "object",
+ "properties": {
+ "csarId": {
+ "type": "string"
+ },
+ "packageInfo": {
+ "$ref": "#/definitions/NsPkgInfo"
+ }
+ }
+ },
+ "NsPkgInfo": {
+ "type": "object",
+ "properties": {
+ "nsPackageId": {
+ "type": "string",
+ "description": "network service package id, UUID, csarId"
+ },
+ "nsdId": {
+ "type": "string",
+ "description": "network service descriptor ID"
+ },
+ "nsdProvider": {
+ "type": "string",
+ "description": "network service designer name"
+ },
+ "nsdVersion": {
+ "type": "string",
+ "description": "network service descriptor version"
+ },
+ "csarName": {
+ "type": "string",
+ "description": "network service package name"
+ },
+ "nsdModel": {
+ "type": "string",
+ "description": "ns JSON string parsed and transformed by parser"
+ },
+ "downloadUrl": {
+ "type": "string",
+ "description": "download url of network service package"
+ }
+ }
+ },
+ "NsInstListInfo": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/NsInstInfo"
+ }
+ },
+ "NsInstInfo": {
+ "type": "object",
+ "properties": {
+ "nsInstanceId": {
+ "type": "string",
+ "description": "network service instance ID"
+ },
+ "nsInstanceName": {
+ "type": "string",
+ "description": "network service instance name"
+ }
+ }
+ },
+ "VnfPkgDistributeRequest": {
+ "type": "object",
+ "properties": {
+ "csarId": {
+ "type": "string",
+ "description": "vnf package id, UUID"
+ }
+ }
+ },
+ "VnfPkgDistributeResponse": {
+ "type": "object",
+ "properties": {
+ "jobId": {
+ "type": "string",
+ "description": "VNF package distribute job ID"
+ }
+ }
+ },
+ "VnfPkgDelResponse": {
+ "type": "object",
+ "properties": {
+ "status": {
+ "type": "string",
+ "description": "Operation status. value is success or failed"
+ },
+ "statusDescription": {
+ "type": "string",
+ "description": "description about the operation result"
+ },
+ "errorCode": {
+ "type": "string",
+ "description": "If the status is failed, the errorcode will be returned"
+ }
+ }
+ },
+ "VnfPkgListInfo": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/VnfPkgDetailInfo"
+ }
+ },
+ "VnfPkgDetailInfo": {
+ "type": "object",
+ "properties": {
+ "csarId": {
+ "type": "string",
+ "description": "vnf package id, UUID"
+ },
+ "packageInfo": {
+ "$ref": "#/definitions/VnfPkgInfo"
+ },
+ "imageInfo": {
+ "$ref": "#/definitions/VnfPkgImgListInfo"
+ }
+ }
+ },
+ "VnfPkgInfo": {
+ "type": "object",
+ "description": "vnf package infomation",
+ "properties": {
+ "vnfPackageId": {
+ "type": "string",
+ "description": "vnf package id (csarId)"
+ },
+ "csarName": {
+ "type": "string",
+ "description": "The name of the csar"
+ },
+ "vnfdId": {
+ "type": "string",
+ "description": "VNF descriptor ID"
+ },
+ "vnfdProvider": {
+ "type": "string",
+ "description": "VNF descriptor vendor ID"
+ },
+ "vnfdModel": {
+ "type": "string",
+ "description": "The model of the VNF (JSON) encoded to string"
+ },
+ "vnfdVersion": {
+ "type": "string",
+ "description": "VNF descriptor version"
+ },
+ "vnfVersion": {
+ "type": "string",
+ "description": "VNF Software version"
+ },
+ "downloadUrl":{
+ "type": "string",
+ "description": "The URL from which the VNF package can be downloaded"
+ }
+ }
+ },
+ "VnfInstListInfo": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/VnfInstInfo"
+ }
+ },
+ "VnfInstInfo": {
+ "type": "object",
+ "properties": {
+ "vnfInstanceId": {
+ "type": "string",
+ "description": "VNF instance ID"
+ },
+ "vnfInstanceName": {
+ "type": "string",
+ "description": "VNF instance name"
+ }
+ }
+ },
+ "VnfPkgImgListInfo": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/VnfPkgImgInfo"
+ }
+ },
+ "VnfPkgImgInfo": {
+ "type": "object",
+ "properties": {
+ "fileName": {
+ "type": "string",
+ "description": "image file name"
+ },
+ "imageUrl": {
+ "type": "string",
+ "description": "image file path in the csar or image url in external repository"
+ }
+ }
+ },
+ "modelParserRequest":{
+ "type": "object",
+ "properties": {
+ "csarId": {
+ "type": "string",
+ "description": "csar Package Id"
+ },
+ "inputs": {
+ "type": "object",
+ "description": "csar package json inputs"
+ }
+ }
+ },
+ "modelParserResponse":{
+ "type": "object",
+ "properties": {
+ "model": {
+ "type": "object",
+ "description": "csar model json data"
+ }
+ }
+ },
+ "jobResponseInfo": {
+ "type": "object",
+ "properties": {
+ "status": {
+ "type": "string"
+ },
+ "progress":{
+ "type": "string"
+ },
+ "statusDescription": {
+ "type": "string"
+ },
+ "errorCode": {
+ "type": "string"
+ },
+ "responseId": {
+ "type": "string"
+ }
+ }
+ },
+ "PostJobRequest": {
+ "type": "object",
+ "properties": {
+ "progress": {
+ "type": "string"
+ },
+ "desc": {
+ "type": "string"
+ },
+ "errcode": {
+ "type": "string"
+ }
+ }
+ },
+ "JobDetailInfo":{
+ "type": "object",
+ "properties": {
+ "jobId": {
+ "type": "string"
+ },
+ "responseDescriptor":
+ {
+ "type":"object",
+ "properties": {
+ "status": {
+ "type": "string"
+ },
+ "progress":{
+ "type": "string"
+ },
+ "statusDescription": {
+ "type": "string"
+ },
+ "errorCode": {
+ "type": "string"
+ },
+ "responseId": {
+ "type": "string"
+ },
+ "responseHistoryList": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/jobResponseInfo"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/catalog/swagger/views.py b/catalog/swagger/views.py
new file mode 100644
index 0000000..33d0edb
--- /dev/null
+++ b/catalog/swagger/views.py
@@ -0,0 +1,28 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+
+class SwaggerJsonView(APIView):
+ def get(self, request):
+ json_file = os.path.join(os.path.dirname(__file__), 'vfc.catalog.swagger.json')
+ f = open(json_file)
+ json_data = json.JSONDecoder().decode(f.read())
+ f.close()
+ return Response(json_data)
diff --git a/catalog/urls.py b/catalog/urls.py
new file mode 100644
index 0000000..76c972b
--- /dev/null
+++ b/catalog/urls.py
@@ -0,0 +1,37 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import include, url
+from django.contrib import admin
+
+from catalog.pub.config.config import REG_TO_MSB_WHEN_START, REG_TO_MSB_REG_URL, REG_TO_MSB_REG_PARAM, \
+ MSB_SVC_CALALOG_URL, MSB_SVC_NSD_URL, MSB_SVC_VNFPKGM_URL
+
+urlpatterns = [
+ url(r'^api/catalog/v1/admin', admin.site.urls),
+ url(r'^', include('catalog.samples.urls')),
+ url(r'^', include('catalog.packages.urls')),
+ url(r'^', include('catalog.jobs.urls')),
+ url(r'^', include('catalog.swagger.urls')),
+]
+
+# regist to MSB when startup
+if REG_TO_MSB_WHEN_START:
+ import json
+ from catalog.pub.utils.restcall import req_by_msb
+ req_by_msb(MSB_SVC_CALALOG_URL, "DELETE")
+ req_by_msb(MSB_SVC_NSD_URL, "DELETE")
+ req_by_msb(MSB_SVC_VNFPKGM_URL, "DELETE")
+ for reg_param in REG_TO_MSB_REG_PARAM:
+ req_by_msb(REG_TO_MSB_REG_URL, "POST", json.JSONEncoder().encode(reg_param))
diff --git a/catalog/wsgi.py b/catalog/wsgi.py
new file mode 100644
index 0000000..a0b4d5d
--- /dev/null
+++ b/catalog/wsgi.py
@@ -0,0 +1,21 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from django.core.wsgi import get_wsgi_application
+
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "catalog.settings")
+
+application = get_wsgi_application()