aboutsummaryrefslogtreecommitdiffstats
path: root/catalog/pub/utils
diff options
context:
space:
mode:
Diffstat (limited to 'catalog/pub/utils')
-rw-r--r--catalog/pub/utils/__init__.py13
-rw-r--r--catalog/pub/utils/fileutil.py78
-rw-r--r--catalog/pub/utils/idutil.py20
-rw-r--r--catalog/pub/utils/jobutil.py145
-rw-r--r--catalog/pub/utils/restcall.py114
-rw-r--r--catalog/pub/utils/syscomm.py19
-rw-r--r--catalog/pub/utils/tests.py221
-rw-r--r--catalog/pub/utils/timeutil.py19
-rw-r--r--catalog/pub/utils/toscaparser/__init__.py54
-rw-r--r--catalog/pub/utils/toscaparser/basemodel.py534
-rw-r--r--catalog/pub/utils/toscaparser/const.py30
-rw-r--r--catalog/pub/utils/toscaparser/dataentityext.py33
-rw-r--r--catalog/pub/utils/toscaparser/graph.py74
-rw-r--r--catalog/pub/utils/toscaparser/nsdmodel.py220
-rw-r--r--catalog/pub/utils/toscaparser/pnfmodel.py53
-rw-r--r--catalog/pub/utils/toscaparser/sdmodel.py93
-rw-r--r--catalog/pub/utils/toscaparser/servicemodel.py188
-rw-r--r--catalog/pub/utils/toscaparser/testdata/ns/ran.csarbin0 -> 3007 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/ns/service-vIMS.csarbin0 -> 47518 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/pnf/ran-du.csarbin0 -> 2688 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vSBC.csarbin0 -> 11516 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/infra.csarbin0 -> 15716 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbng.csarbin0 -> 15357 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbrgemu.csarbin0 -> 14527 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgmux.csarbin0 -> 14970 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgw.csarbin0 -> 15008 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/infra.csarbin0 -> 15432 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbng.csarbin0 -> 15410 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbrgemu.csarbin0 -> 14569 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgmux.csarbin0 -> 15023 bytes
-rw-r--r--catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgw.csarbin0 -> 23182 bytes
-rw-r--r--catalog/pub/utils/toscaparser/tests.py101
-rw-r--r--catalog/pub/utils/toscaparser/vnfdmodel.py48
-rw-r--r--catalog/pub/utils/toscaparser/vnfdparser/__init__.py23
-rw-r--r--catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py300
-rw-r--r--catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py236
-rw-r--r--catalog/pub/utils/values.py33
37 files changed, 2649 insertions, 0 deletions
diff --git a/catalog/pub/utils/__init__.py b/catalog/pub/utils/__init__.py
new file mode 100644
index 0000000..c7b6818
--- /dev/null
+++ b/catalog/pub/utils/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/catalog/pub/utils/fileutil.py b/catalog/pub/utils/fileutil.py
new file mode 100644
index 0000000..6ddfc72
--- /dev/null
+++ b/catalog/pub/utils/fileutil.py
@@ -0,0 +1,78 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import shutil
+import logging
+import tempfile
+import traceback
+import urllib
+import zipfile
+
+
+logger = logging.getLogger(__name__)
+
+
+def make_dirs(path):
+ if not os.path.exists(path):
+ os.makedirs(path, 0o777)
+
+
+def delete_dirs(path):
+ try:
+ if os.path.exists(path):
+ shutil.rmtree(path)
+ except Exception as e:
+ logger.error(traceback.format_exc())
+ logger.error("Failed to delete %s:%s", path, e.args[0])
+
+
+def download_file_from_http(url, local_dir, file_name):
+ local_file_name = os.path.join(local_dir, file_name)
+ is_download_ok = False
+ try:
+ make_dirs(local_dir)
+ req = urllib.request.urlopen(url)
+ save_file = open(local_file_name, 'w')
+ save_file.write(req.read())
+ save_file.close()
+ req.close()
+ is_download_ok = True
+ except:
+ logger.error(traceback.format_exc())
+ logger.error("Failed to download %s to %s.", url, local_file_name)
+ return is_download_ok, local_file_name
+
+
+def unzip_file(zip_src, dst_dir, csar_path):
+ if os.path.exists(zip_src):
+ fz = zipfile.ZipFile(zip_src, 'r')
+ for file in fz.namelist():
+ fz.extract(file, dst_dir)
+ return os.path.join(dst_dir, csar_path)
+ else:
+ return ""
+
+
+def unzip_csar_to_tmp(zip_src):
+ dirpath = tempfile.mkdtemp()
+ zip_ref = zipfile.ZipFile(zip_src, 'r')
+ zip_ref.extractall(dirpath)
+ return dirpath
+
+
+def get_artifact_path(vnf_path, artifact_file):
+ for root, dirs, files in os.walk(vnf_path):
+ if artifact_file in files:
+ return os.path.join(root, artifact_file)
+ return None
diff --git a/catalog/pub/utils/idutil.py b/catalog/pub/utils/idutil.py
new file mode 100644
index 0000000..c2347c1
--- /dev/null
+++ b/catalog/pub/utils/idutil.py
@@ -0,0 +1,20 @@
+# Copyright 2016 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from catalog.pub.redisco import containers as cont
+
+
+def get_auto_id(id_type, id_group="auto_id_hash"):
+ auto_id_hash = cont.Hash(id_group)
+ auto_id_hash.hincrby(id_type, 1)
+ return auto_id_hash.hget(id_type)
diff --git a/catalog/pub/utils/jobutil.py b/catalog/pub/utils/jobutil.py
new file mode 100644
index 0000000..3d79c7a
--- /dev/null
+++ b/catalog/pub/utils/jobutil.py
@@ -0,0 +1,145 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import datetime
+import logging
+import uuid
+import traceback
+from functools import reduce
+
+from catalog.pub.database.models import JobStatusModel, JobModel
+from catalog.pub.utils import idutil
+
+logger = logging.getLogger(__name__)
+
+
+def enum(**enums):
+ return type('Enum', (), enums)
+
+
+JOB_STATUS = enum(PROCESSING=0, FINISH=1)
+JOB_MODEL_STATUS = enum(STARTED='started', PROCESSING='processing', FINISHED='finished', ERROR='error',
+ TIMEOUT='timeout')
+JOB_TYPE = enum(CREATE_VNF="create vnf", TERMINATE_VNF="terminate vnf", GRANT_VNF="grant vnf", MANUAL_SCALE_VNF="manual scale vnf",
+ HEAL_VNF="heal vnf")
+
+
+class JobUtil(object):
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def __gen_job_id(job_name):
+ return "%s-%s" % (job_name if job_name else "UnknownJob", uuid.uuid1())
+
+ @staticmethod
+ def query_job_status(job_id, index_id=-1):
+ # logger.info("Query job status, jobid =[%s], responseid [%d]" % (job_id, index_id))
+ jobs = []
+ if index_id < 0:
+ row = JobStatusModel.objects.filter(jobid=job_id).order_by("-indexid").first()
+ if row:
+ jobs.append(row)
+ else:
+ [jobs.append(job) for job in JobStatusModel.objects.filter(jobid=job_id).order_by("-indexid")
+ if job.indexid > index_id]
+
+ # logger.info("Query job status, rows=%s" % str(jobs))
+ return jobs
+
+ @staticmethod
+ def is_job_exists(job_id):
+ jobs = JobModel.objects.filter(jobid=job_id)
+ return len(jobs) > 0
+
+ @staticmethod
+ def create_job(inst_type, jobaction, inst_id, user='', job_id=None, res_name=''):
+ if job_id is None:
+ job_id = JobUtil.__gen_job_id(
+ '%s-%s-%s' % (str(inst_type).replace(' ', '_'), str(jobaction).replace(' ', '_'), str(inst_id)))
+ job = JobModel()
+ job.jobid = job_id
+ job.jobtype = inst_type
+ job.jobaction = jobaction
+ job.resid = str(inst_id)
+ job.status = JOB_STATUS.PROCESSING
+ job.user = user
+ job.starttime = datetime.datetime.now().strftime('%Y-%m-%d %X')
+ job.progress = 0
+ job.resname = res_name
+ logger.debug("create a new job, jobid=%s, jobtype=%s, jobaction=%s, resid=%s, status=%d" %
+ (job.jobid, job.jobtype, job.jobaction, job.resid, job.status))
+ job.save()
+ return job_id
+
+ @staticmethod
+ def clear_job(job_id):
+ [job.delete() for job in JobModel.objects.filter(jobid=job_id)]
+ logger.debug("Clear job, job_id=%s" % job_id)
+
+ @staticmethod
+ def add_job_status(job_id, progress, status_decs, error_code=""):
+ jobs = JobModel.objects.filter(jobid=job_id)
+ if not jobs:
+ logger.error("Job[%s] is not exists, please create job first." % job_id)
+ raise Exception("Job[%s] is not exists." % job_id)
+ try:
+ int_progress = int(progress)
+ job_status = JobStatusModel()
+ job_status.indexid = int(idutil.get_auto_id(job_id))
+ job_status.jobid = job_id
+ job_status.status = "processing"
+ job_status.progress = int_progress
+
+ if job_status.progress == 0:
+ job_status.status = "started"
+ elif job_status.progress == 100:
+ job_status.status = "finished"
+ elif job_status.progress == 101:
+ job_status.status = "partly_finished"
+ elif job_status.progress > 101:
+ job_status.status = "error"
+
+ if error_code == "255":
+ job_status.status = "error"
+
+ job_status.descp = status_decs
+ # job_status.errcode = error_code
+ job_status.errcode = error_code if error_code else "0"
+ job_status.addtime = datetime.datetime.now().strftime('%Y-%m-%d %X')
+ job_status.save()
+ logger.debug("Add a new job status, jobid=%s, indexid=%d,"
+ " status=%s, description=%s, progress=%d, errcode=%s, addtime=%r" %
+ (job_status.jobid, job_status.indexid, job_status.status, job_status.descp,
+ job_status.progress, job_status.errcode, job_status.addtime))
+
+ job = jobs[0]
+ job.progress = int_progress
+ if job_status.progress >= 100:
+ job.status = JOB_STATUS.FINISH
+ job.endtime = datetime.datetime.now().strftime('%Y-%m-%d %X')
+ job.save()
+ logger.debug("update job, jobid=%s, progress=%d" % (job_status.jobid, int_progress))
+ except:
+ logger.error(traceback.format_exc())
+
+ @staticmethod
+ def clear_job_status(job_id):
+ [job.delete() for job in JobStatusModel.objects.filter(jobid=job_id)]
+ logger.debug("Clear job status, job_id=%s" % job_id)
+
+ @staticmethod
+ def get_unfinished_jobs(url_prefix, inst_id, inst_type):
+ jobs = JobModel.objects.filter(resid=inst_id, jobtype=inst_type, status=JOB_STATUS.PROCESSING)
+ progresses = reduce(lambda content, job: content + [url_prefix + "/" + job.jobid], jobs, [])
+ return progresses
diff --git a/catalog/pub/utils/restcall.py b/catalog/pub/utils/restcall.py
new file mode 100644
index 0000000..8f03259
--- /dev/null
+++ b/catalog/pub/utils/restcall.py
@@ -0,0 +1,114 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import traceback
+import logging
+import urllib
+import uuid
+import httplib2
+import base64
+
+from catalog.pub.config.config import MSB_SERVICE_IP, MSB_SERVICE_PORT
+
+rest_no_auth, rest_oneway_auth, rest_bothway_auth = 0, 1, 2
+HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED = '200', '201', '204', '202'
+status_ok_list = [HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED]
+HTTP_404_NOTFOUND, HTTP_403_FORBIDDEN, HTTP_401_UNAUTHORIZED, HTTP_400_BADREQUEST = '404', '403', '401', '400'
+
+logger = logging.getLogger(__name__)
+
+
+def call_req(base_url, user, passwd, auth_type, resource, method, content='', additional_headers={}):
+ callid = str(uuid.uuid1())
+ logger.debug("[%s]call_req('%s','%s','%s',%s,'%s','%s','%s')" % (
+ callid, base_url, user, passwd, auth_type, resource, method, content))
+ ret = None
+ resp_status = ''
+ try:
+ full_url = combine_url(base_url, resource)
+ headers = {'content-type': 'application/json', 'accept': 'application/json'}
+ if user:
+ headers['Authorization'] = 'Basic %s' % base64.b64encode(bytes('%s:%s' % (user, passwd), "utf-8")).decode()
+ ca_certs = None
+ if additional_headers:
+ headers.update(additional_headers)
+ for retry_times in range(3):
+ http = httplib2.Http(ca_certs=ca_certs, disable_ssl_certificate_validation=(auth_type == rest_no_auth))
+ http.follow_all_redirects = True
+ try:
+ resp, resp_content = http.request(full_url, method=method.upper(), body=content, headers=headers)
+ resp_status, resp_body = resp['status'], resp_content
+ logger.debug("[%s][%d]status=%s)" % (callid, retry_times, resp_status))
+ if headers['accept'] == 'application/json':
+ resp_body = resp_content.decode('UTF-8')
+ logger.debug("resp_body=%s", resp_body)
+ if resp_status in status_ok_list:
+ ret = [0, resp_body, resp_status]
+ else:
+ ret = [1, resp_body, resp_status]
+ break
+ except Exception as ex:
+ if 'httplib.ResponseNotReady' in str(sys.exc_info()):
+ logger.debug("retry_times=%d", retry_times)
+ logger.error(traceback.format_exc())
+ ret = [1, "Unable to connect to %s" % full_url, resp_status]
+ continue
+ raise ex
+ except urllib.error.URLError as err:
+ ret = [2, str(err), resp_status]
+ except Exception as ex:
+ logger.error(traceback.format_exc())
+ logger.error("[%s]ret=%s" % (callid, str(sys.exc_info())))
+ res_info = str(sys.exc_info())
+ if 'httplib.ResponseNotReady' in res_info:
+ res_info = "The URL[%s] request failed or is not responding." % full_url
+ ret = [3, res_info, resp_status]
+ except:
+ logger.error(traceback.format_exc())
+ ret = [4, str(sys.exc_info()), resp_status]
+
+ logger.debug("[%s]ret=%s" % (callid, str(ret)))
+ return ret
+
+
+def req_by_msb(resource, method, content=''):
+ base_url = "http://%s:%s/" % (MSB_SERVICE_IP, MSB_SERVICE_PORT)
+ return call_req(base_url, "", "", rest_no_auth, resource, method, content)
+
+
+def upload_by_msb(resource, method, file_data={}):
+ headers = {'Content-Type': 'application/octet-stream'}
+ full_url = "http://%s:%s/%s" % (MSB_SERVICE_IP, MSB_SERVICE_PORT, resource)
+ http = httplib2.Http()
+ resp, resp_content = http.request(full_url, method=method.upper(), body=file_data, headers=headers)
+ resp_status, resp_body = resp['status'], resp_content.decode('UTF-8')
+ if resp_status not in status_ok_list:
+ logger.error("Status code is %s, detail is %s.", resp_status, resp_body)
+ return [1, "Failed to upload file.", resp_status]
+ logger.debug("resp_body=%s", resp_body)
+ return [0, resp_body, resp_status]
+
+
+def combine_url(base_url, resource):
+ full_url = None
+ if base_url.endswith('/') and resource.startswith('/'):
+ full_url = base_url[:-1] + resource
+ elif base_url.endswith('/') and not resource.startswith('/'):
+ full_url = base_url + resource
+ elif not base_url.endswith('/') and resource.startswith('/'):
+ full_url = base_url + resource
+ else:
+ full_url = base_url + '/' + resource
+ return full_url
diff --git a/catalog/pub/utils/syscomm.py b/catalog/pub/utils/syscomm.py
new file mode 100644
index 0000000..89219ec
--- /dev/null
+++ b/catalog/pub/utils/syscomm.py
@@ -0,0 +1,19 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import inspect
+
+
+def fun_name():
+ return inspect.stack()[1][3]
diff --git a/catalog/pub/utils/tests.py b/catalog/pub/utils/tests.py
new file mode 100644
index 0000000..0f02467
--- /dev/null
+++ b/catalog/pub/utils/tests.py
@@ -0,0 +1,221 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import platform
+import unittest
+import mock
+from . import fileutil
+import urllib
+from . import syscomm
+from . import timeutil
+from . import values
+
+from catalog.pub.database.models import JobStatusModel, JobModel
+from catalog.pub.utils.jobutil import JobUtil
+
+
+class MockReq():
+ def read(self):
+ return "1"
+
+ def close(self):
+ pass
+
+
+class UtilsTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def test_create_and_delete_dir(self):
+ dirs = "abc/def/hij"
+ fileutil.make_dirs(dirs)
+ fileutil.make_dirs(dirs)
+ fileutil.delete_dirs(dirs)
+
+ @mock.patch.object(urllib.request, 'urlopen')
+ def test_download_file_from_http(self, mock_urlopen):
+ mock_urlopen.return_value = MockReq()
+ fileutil.delete_dirs("abc")
+ is_ok, f_name = fileutil.download_file_from_http("1", "abc", "1.txt")
+ self.assertTrue(is_ok)
+ if 'Windows' in platform.system():
+ self.assertTrue(f_name.endswith("abc\\1.txt"))
+ else:
+ self.assertTrue(f_name.endswith("abc/1.txt"))
+ fileutil.delete_dirs("abc")
+
+ def test_query_job_status(self):
+ job_id = "1"
+ JobStatusModel.objects.filter().delete()
+ JobStatusModel(
+ indexid=1,
+ jobid=job_id,
+ status="success",
+ progress=10
+ ).save()
+ JobStatusModel(
+ indexid=2,
+ jobid=job_id,
+ status="success",
+ progress=50
+ ).save()
+ JobStatusModel(
+ indexid=3,
+ jobid=job_id,
+ status="success",
+ progress=100
+ ).save()
+ jobs = JobUtil.query_job_status(job_id)
+ self.assertEqual(1, len(jobs))
+ self.assertEqual(3, jobs[0].indexid)
+ jobs = JobUtil.query_job_status(job_id, 1)
+ self.assertEqual(2, len(jobs))
+ self.assertEqual(3, jobs[0].indexid)
+ self.assertEqual(2, jobs[1].indexid)
+ JobStatusModel.objects.filter().delete()
+
+ def test_is_job_exists(self):
+ job_id = "1"
+ JobModel.objects.filter().delete()
+ JobModel(
+ jobid=job_id,
+ jobtype="1",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ self.assertTrue(JobUtil.is_job_exists(job_id))
+ JobModel.objects.filter().delete()
+
+ def test_create_job(self):
+ job_id = "5"
+ JobModel.objects.filter().delete()
+ JobUtil.create_job(
+ inst_type="1",
+ jobaction="2",
+ inst_id="3",
+ user="4",
+ job_id=5,
+ res_name="6")
+ self.assertEqual(1, len(JobModel.objects.filter(jobid=job_id)))
+ JobModel.objects.filter().delete()
+
+ def test_clear_job(self):
+ job_id = "1"
+ JobModel.objects.filter().delete()
+ JobModel(
+ jobid=job_id,
+ jobtype="1",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ JobUtil.clear_job(job_id)
+ self.assertEqual(0, len(JobModel.objects.filter(jobid=job_id)))
+
+ def test_add_job_status_when_job_is_not_created(self):
+ JobModel.objects.filter().delete()
+ self.assertRaises(
+ Exception,
+ JobUtil.add_job_status,
+ job_id="1",
+ progress=1,
+ status_decs="2",
+ error_code="0"
+ )
+
+ def test_add_job_status_normal(self):
+ job_id = "1"
+ JobModel.objects.filter().delete()
+ JobStatusModel.objects.filter().delete()
+ JobModel(
+ jobid=job_id,
+ jobtype="1",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ JobUtil.add_job_status(
+ job_id="1",
+ progress=1,
+ status_decs="2",
+ error_code="0"
+ )
+ self.assertEqual(1, len(JobStatusModel.objects.filter(jobid=job_id)))
+ JobStatusModel.objects.filter().delete()
+ JobModel.objects.filter().delete()
+
+ def test_clear_job_status(self):
+ job_id = "1"
+ JobStatusModel.objects.filter().delete()
+ JobStatusModel(
+ indexid=1,
+ jobid=job_id,
+ status="success",
+ progress=10
+ ).save()
+ JobUtil.clear_job_status(job_id)
+ self.assertEqual(0, len(JobStatusModel.objects.filter(jobid=job_id)))
+
+ def test_get_unfinished_jobs(self):
+ JobModel.objects.filter().delete()
+ JobModel(
+ jobid="11",
+ jobtype="InstVnf",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ JobModel(
+ jobid="22",
+ jobtype="InstVnf",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ JobModel(
+ jobid="33",
+ jobtype="InstVnf",
+ jobaction="2",
+ resid="3",
+ status=0
+ ).save()
+ progresses = JobUtil.get_unfinished_jobs(
+ url_prefix="/vnfinst",
+ inst_id="3",
+ inst_type="InstVnf"
+ )
+ expect_progresses = ['/vnfinst/11', '/vnfinst/22', '/vnfinst/33']
+ self.assertEqual(expect_progresses, progresses)
+ JobModel.objects.filter().delete()
+
+ def test_fun_name(self):
+ self.assertEqual("test_fun_name", syscomm.fun_name())
+
+ def test_now_time(self):
+ self.assertIn(":", timeutil.now_time())
+ self.assertIn("-", timeutil.now_time())
+
+ def test_ignore_case_get(self):
+ data = {
+ "Abc": "def",
+ "HIG": "klm"
+ }
+ self.assertEqual("def", values.ignore_case_get(data, 'ABC'))
+ self.assertEqual("def", values.ignore_case_get(data, 'abc'))
+ self.assertEqual("klm", values.ignore_case_get(data, 'hig'))
+ self.assertEqual("bbb", values.ignore_case_get(data, 'aaa', 'bbb'))
diff --git a/catalog/pub/utils/timeutil.py b/catalog/pub/utils/timeutil.py
new file mode 100644
index 0000000..1d97e9d
--- /dev/null
+++ b/catalog/pub/utils/timeutil.py
@@ -0,0 +1,19 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+
+def now_time(fmt="%Y-%m-%d %H:%M:%S"):
+ return datetime.datetime.now().strftime(fmt)
diff --git a/catalog/pub/utils/toscaparser/__init__.py b/catalog/pub/utils/toscaparser/__init__.py
new file mode 100644
index 0000000..62ead96
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/__init__.py
@@ -0,0 +1,54 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+
+from catalog.pub.utils.toscaparser.nsdmodel import NsdInfoModel
+from catalog.pub.utils.toscaparser.pnfmodel import PnfdInfoModel
+from catalog.pub.utils.toscaparser.sdmodel import SdInfoModel
+from catalog.pub.utils.toscaparser.vnfdmodel import EtsiVnfdInfoModel
+
+
+def parse_nsd(path, input_parameters=[]):
+ tosca_obj = NsdInfoModel(path, input_parameters).model
+ strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+ strResponse = strResponse.replace(': null', ': ""')
+ return strResponse
+
+
+def parse_sd(path, input_parameters=[]):
+ tosca_obj = SdInfoModel(path, input_parameters)
+ strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+ strResponse = strResponse.replace(': null', ': ""')
+ return strResponse
+
+
+def parse_vnfd(path, input_parameters=[], isETSI=True):
+ if isETSI:
+ tosca_obj = EtsiVnfdInfoModel(path, input_parameters)
+ else:
+ tosca_obj = {}
+ strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+ strResponse = strResponse.replace(': null', ': ""')
+ return strResponse
+
+
+def parse_pnfd(path, input_parameters=[], isETSI=True):
+ if isETSI:
+ tosca_obj = PnfdInfoModel(path, input_parameters)
+ else:
+ tosca_obj = {}
+ strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+ strResponse = strResponse.replace(': null', ': ""')
+ return strResponse
diff --git a/catalog/pub/utils/toscaparser/basemodel.py b/catalog/pub/utils/toscaparser/basemodel.py
new file mode 100644
index 0000000..6ed26aa
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/basemodel.py
@@ -0,0 +1,534 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ftplib
+import json
+import logging
+import os
+import re
+import shutil
+import urllib
+
+# import paramiko
+from toscaparser.tosca_template import ToscaTemplate
+from toscaparser.properties import Property
+from toscaparser.functions import Function, Concat, GetInput, get_function, function_mappings
+from catalog.pub.utils.toscaparser.graph import Graph
+
+from catalog.pub.utils.toscaparser.dataentityext import DataEntityExt
+
+logger = logging.getLogger(__name__)
+
+METADATA = "metadata"
+PROPERTIES = "properties"
+DESCRIPTION = "description"
+REQUIREMENTS = "requirements"
+INTERFACES = "interfaces"
+TOPOLOGY_TEMPLATE = "topology_template"
+INPUTS = "inputs"
+CAPABILITIES = "capabilities"
+ATTRIBUTES = "attributes"
+ARTIFACTS = "artifacts"
+DERIVED_FROM = "derived_from"
+
+NODE_NAME = "name"
+NODE_TYPE = "nodeType"
+NODE_ROOT = "tosca.nodes.Root"
+GROUP_TYPE = "groupType"
+GROUPS_ROOT = "tosca.groups.Root"
+
+
+class BaseInfoModel(object):
+
+ def __init__(self, path=None, params=None, tosca=None):
+ if tosca:
+ _tosca = tosca
+ else:
+ _tosca = self.buildToscaTemplate(path, params)
+ self.description = getattr(_tosca, "description", "")
+ self.parseModel(_tosca)
+
+ def parseModel(self, tosca):
+ pass
+
+ def buildInputs(self, tosca):
+ topo = tosca.tpl.get(TOPOLOGY_TEMPLATE, None)
+ return topo.get(INPUTS, {}) if topo else {}
+
+ def buildToscaTemplate(self, path, params):
+ file_name = None
+ try:
+ file_name = self._check_download_file(path)
+ valid_params = self._validate_input_params(file_name, params)
+ return self._create_tosca_template(file_name, valid_params)
+ finally:
+ if file_name is not None and file_name != path and os.path.exists(file_name):
+ try:
+ os.remove(file_name)
+ except Exception as e:
+ logger.error("Failed to parse package, error: %s", e.args[0])
+
+ def _validate_input_params(self, path, params):
+ valid_params = {}
+ inputs = {}
+ if isinstance(params, list):
+ for param in params:
+ key = param.get('key', 'undefined')
+ value = param.get('value', 'undefined')
+ inputs[key] = value
+ params = inputs
+
+ if params:
+ tmp = self._create_tosca_template(path, None)
+ if isinstance(params, dict):
+ for key, value in list(params.items()):
+ if hasattr(tmp, 'inputs') and len(tmp.inputs) > 0:
+ for input_def in tmp.inputs:
+ if (input_def.name == key):
+ valid_params[key] = DataEntityExt.validate_datatype(input_def.type, value)
+ return valid_params
+
+ def _create_tosca_template(self, file_name, valid_params):
+ tosca_tpl = None
+ try:
+ tosca_tpl = ToscaTemplate(path=file_name,
+ parsed_params=valid_params,
+ no_required_paras_check=True,
+ debug_mode=True)
+ except Exception as e:
+ print(e.args[0])
+ finally:
+ if tosca_tpl is not None and hasattr(tosca_tpl, "temp_dir") and os.path.exists(tosca_tpl.temp_dir):
+ try:
+ shutil.rmtree(tosca_tpl.temp_dir)
+ except Exception as e:
+ logger.error("Failed to create tosca template, error: %s", e.args[0])
+ print("-----------------------------")
+ print('\n'.join(['%s:%s' % item for item in list(tosca_tpl.__dict__.items())]))
+ print("-----------------------------")
+ return tosca_tpl
+
+ def _check_download_file(self, path):
+ if (path.startswith("ftp") or path.startswith("sftp")):
+ return self.downloadFileFromFtpServer(path)
+ elif (path.startswith("http")):
+ return self.download_file_from_httpserver(path)
+ return path
+
+ def download_file_from_httpserver(self, path):
+ path = path.encode("utf-8")
+ tmps = str.split(path, '/')
+ localFileName = tmps[len(tmps) - 1]
+ urllib.request.urlretrieve(path, localFileName)
+ return localFileName
+
+ def downloadFileFromFtpServer(self, path):
+ path = path.encode("utf-8")
+ tmp = str.split(path, '://')
+ protocol = tmp[0]
+ tmp = str.split(tmp[1], ':')
+ if len(tmp) == 2:
+ userName = tmp[0]
+ tmp = str.split(tmp[1], '@')
+ userPwd = tmp[0]
+ index = tmp[1].index('/')
+ hostIp = tmp[1][0:index]
+ remoteFileName = tmp[1][index:len(tmp[1])]
+ if protocol.lower() == 'ftp':
+ hostPort = 21
+ else:
+ hostPort = 22
+
+ if len(tmp) == 3:
+ userName = tmp[0]
+ userPwd = str.split(tmp[1], '@')[0]
+ hostIp = str.split(tmp[1], '@')[1]
+ index = tmp[2].index('/')
+ hostPort = tmp[2][0:index]
+ remoteFileName = tmp[2][index:len(tmp[2])]
+
+ localFileName = str.split(remoteFileName, '/')
+ localFileName = localFileName[len(localFileName) - 1]
+
+ if protocol.lower() == 'sftp':
+ self.sftp_get(userName, userPwd, hostIp, hostPort, remoteFileName, localFileName)
+ else:
+ self.ftp_get(userName, userPwd, hostIp, hostPort, remoteFileName, localFileName)
+ return localFileName
+
+ # def sftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName):
+ # # return
+ # t = None
+ # try:
+ # t = paramiko.Transport(hostIp, int(hostPort))
+ # t.connect(username=userName, password=userPwd)
+ # sftp = paramiko.SFTPClient.from_transport(t)
+ # sftp.get(remoteFileName, localFileName)
+ # finally:
+ # if t is not None:
+ # t.close()
+
+ def ftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName):
+ f = None
+ try:
+ ftp = ftplib.FTP()
+ ftp.connect(hostIp, hostPort)
+ ftp.login(userName, userPwd)
+ f = open(localFileName, 'wb')
+ ftp.retrbinary('RETR ' + remoteFileName, f.write, 1024)
+ f.close()
+ finally:
+ if f is not None:
+ f.close()
+
+ def buildMetadata(self, tosca):
+ return tosca.tpl.get(METADATA, {}) if tosca else {}
+
+ def buildNode(self, nodeTemplate, tosca):
+ inputs = tosca.inputs
+ parsed_params = tosca.parsed_params
+ ret = {}
+ ret[NODE_NAME] = nodeTemplate.name
+ ret[NODE_TYPE] = nodeTemplate.type
+ if DESCRIPTION in nodeTemplate.entity_tpl:
+ ret[DESCRIPTION] = nodeTemplate.entity_tpl[DESCRIPTION]
+ else:
+ ret[DESCRIPTION] = ''
+ if METADATA in nodeTemplate.entity_tpl:
+ ret[METADATA] = nodeTemplate.entity_tpl[METADATA]
+ else:
+ ret[METADATA] = ''
+ props = self.buildProperties_ex(nodeTemplate, tosca.topology_template)
+ ret[PROPERTIES] = self.verify_properties(props, inputs, parsed_params)
+ ret[REQUIREMENTS] = self.build_requirements(nodeTemplate)
+ self.buildCapabilities(nodeTemplate, inputs, ret)
+ self.buildArtifacts(nodeTemplate, inputs, ret)
+ interfaces = self.build_interfaces(nodeTemplate)
+ if interfaces:
+ ret[INTERFACES] = interfaces
+ return ret
+
+ def buildProperties(self, nodeTemplate, parsed_params):
+ properties = {}
+ isMappingParams = parsed_params and len(parsed_params) > 0
+ for k, item in list(nodeTemplate.get_properties().items()):
+ properties[k] = item.value
+ if isinstance(item.value, GetInput):
+ if item.value.result() and isMappingParams:
+ properties[k] = DataEntityExt.validate_datatype(item.type, item.value.result())
+ else:
+ tmp = {}
+ tmp[item.value.name] = item.value.input_name
+ properties[k] = tmp
+ if ATTRIBUTES in nodeTemplate.entity_tpl:
+ for k, item in list(nodeTemplate.entity_tpl[ATTRIBUTES].items()):
+ properties[k] = str(item)
+ return properties
+
+ def buildProperties_ex(self, nodeTemplate, topology_template, properties=None):
+ if properties is None:
+ properties = nodeTemplate.get_properties()
+ _properties = {}
+ if isinstance(properties, dict):
+ for name, prop in list(properties.items()):
+ if isinstance(prop, Property):
+ if isinstance(prop.value, Function):
+ if isinstance(prop.value, Concat): # support one layer inner function.
+ value_str = ''
+ for arg in prop.value.args:
+ if isinstance(arg, str):
+ value_str += arg
+ elif isinstance(arg, dict):
+ raw_func = {}
+ for k, v in list(arg.items()):
+ func_args = []
+ func_args.append(v)
+ raw_func[k] = func_args
+ func = get_function(topology_template, nodeTemplate, raw_func)
+ value_str += str(func.result())
+ _properties[name] = value_str
+ else:
+ _properties[name] = prop.value.result()
+ elif isinstance(prop.value, dict) or isinstance(prop.value, list):
+ _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop.value)
+ elif prop.type == 'string':
+ _properties[name] = prop.value
+ else:
+ _properties[name] = json.dumps(prop.value)
+ elif isinstance(prop, dict):
+ _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop)
+ elif isinstance(prop, list):
+ _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop)
+ elif name in function_mappings:
+ raw_func = {}
+ func_args = []
+ func_args.append(prop)
+ raw_func[name] = func_args
+ if name == 'CONCAT':
+ value_str = ''
+ for arg in prop:
+ if isinstance(arg, str):
+ value_str += arg
+ elif isinstance(arg, dict):
+ raw_func = {}
+ for k, v in list(arg.items()):
+ func_args = []
+ func_args.append(v)
+ raw_func[k] = func_args
+ value_str += str(
+ get_function(topology_template, nodeTemplate, raw_func).result())
+ value = value_str
+ else:
+ return get_function(topology_template, nodeTemplate, raw_func).result()
+ else:
+ _properties[name] = prop
+ elif isinstance(properties, list):
+ value = []
+ for para in properties:
+ if isinstance(para, dict) or isinstance(para, list):
+ value.append(self.buildProperties_ex(nodeTemplate, topology_template, para))
+ else:
+ value.append(para)
+ return value
+ return _properties
+
+ def verify_properties(self, props, inputs, parsed_params):
+ ret_props = {}
+ if (props and len(props) > 0):
+ for key, value in list(props.items()):
+ ret_props[key] = self._verify_value(value, inputs, parsed_params)
+ # if isinstance(value, str):
+ # ret_props[key] = self._verify_string(inputs, parsed_params, value);
+ # continue
+ # if isinstance(value, list):
+ # ret_props[key] = map(lambda x: self._verify_dict(inputs, parsed_params, x), value)
+ # continue
+ # if isinstance(value, dict):
+ # ret_props[key] = self._verify_map(inputs, parsed_params, value)
+ # continue
+ # ret_props[key] = value
+ return ret_props
+
+ def build_requirements(self, node_template):
+ rets = []
+ for req in node_template.requirements:
+ for req_name, req_value in list(req.items()):
+ if (isinstance(req_value, dict)):
+ if ('node' in req_value and req_value['node'] not in node_template.templates):
+ continue # No target requirement for aria parser, not add to result.
+ rets.append({req_name: req_value})
+ return rets
+
+ def buildCapabilities(self, nodeTemplate, inputs, ret):
+ capabilities = json.dumps(nodeTemplate.entity_tpl.get(CAPABILITIES, None))
+ match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', capabilities)
+ for m in match:
+ aa = [input_def for input_def in inputs if m == input_def.name][0]
+ capabilities = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), capabilities, 1)
+ if capabilities != 'null':
+ ret[CAPABILITIES] = json.loads(capabilities)
+
+ def buildArtifacts(self, nodeTemplate, inputs, ret):
+ artifacts = json.dumps(nodeTemplate.entity_tpl.get('artifacts', None))
+ match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', artifacts)
+ for m in match:
+ aa = [input_def for input_def in inputs if m == input_def.name][0]
+ artifacts = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), artifacts, 1)
+ if artifacts != 'null':
+ ret[ARTIFACTS] = json.loads(artifacts)
+
+ def build_interfaces(self, node_template):
+ if INTERFACES in node_template.entity_tpl:
+ return node_template.entity_tpl[INTERFACES]
+ return None
+
+ def isNodeTypeX(self, node, nodeTypes, x):
+ node_type = node[NODE_TYPE]
+ while node_type != x:
+ node_type_derived = node_type
+ node_type = nodeTypes[node_type][DERIVED_FROM]
+ if node_type == NODE_ROOT or node_type == node_type_derived:
+ return False
+ return True
+
+ def get_requirement_node_name(self, req_value):
+ return self.get_prop_from_obj(req_value, 'node')
+
+ def getRequirementByNodeName(self, nodeTemplates, storage_name, prop):
+ for node in nodeTemplates:
+ if node[NODE_NAME] == storage_name:
+ if prop in node:
+ return node[prop]
+
+ def get_prop_from_obj(self, obj, prop):
+ if isinstance(obj, str):
+ return obj
+ if (isinstance(obj, dict) and prop in obj):
+ return obj[prop]
+ return None
+
+ def getNodeDependencys(self, node):
+ return self.getRequirementByName(node, 'dependency')
+
+ def getRequirementByName(self, node, requirementName):
+ requirements = []
+ if REQUIREMENTS in node:
+ for item in node[REQUIREMENTS]:
+ for key, value in list(item.items()):
+ if key == requirementName:
+ requirements.append(value)
+ return requirements
+
+ def _verify_value(self, value, inputs, parsed_params):
+ if value == '{}':
+ return ''
+ if isinstance(value, str):
+ return self._verify_string(inputs, parsed_params, value)
+ if isinstance(value, list) or isinstance(value, dict):
+ return self._verify_object(value, inputs, parsed_params)
+ return value
+
+ def _verify_object(self, value, inputs, parsed_params):
+ s = self._verify_string(inputs, parsed_params, json.dumps(value))
+ return json.loads(s)
+
+ def _get_input_name(self, getInput):
+ input_name = getInput.split(':')[1]
+ input_name = input_name.strip()
+ return input_name.replace('"', '').replace('}', '')
+
+ def _verify_string(self, inputs, parsed_params, value):
+ getInputs = re.findall(r'{"get_input": "[a-zA-Z_0-9]+"}', value)
+ for getInput in getInputs:
+ input_name = self._get_input_name(getInput)
+ if parsed_params and input_name in parsed_params:
+ value = value.replace(getInput, json.dumps(parsed_params[input_name]))
+ else:
+ for input_def in inputs:
+ if input_def.default and input_name == input_def.name:
+ value = value.replace(getInput, json.dumps(input_def.default))
+ return value
+
+ def get_node_by_name(self, node_templates, name):
+ for node in node_templates:
+ if node[NODE_NAME] == name:
+ return node
+ return None
+
+ def getCapabilityByName(self, node, capabilityName):
+ if CAPABILITIES in node and capabilityName in node[CAPABILITIES]:
+ return node[CAPABILITIES][capabilityName]
+ return None
+
+ def get_base_path(self, tosca):
+ fpath, fname = os.path.split(tosca.path)
+ return fpath
+
+ def build_artifacts(self, node):
+ rets = []
+ if ARTIFACTS in node and len(node[ARTIFACTS]) > 0:
+ artifacts = node[ARTIFACTS]
+ for name, value in list(artifacts.items()):
+ ret = {}
+ ret['artifact_name'] = name
+ ret['file'] = value
+ if isinstance(value, dict):
+ ret.update(value)
+ rets.append(ret)
+ else:
+ # TODO It is workaround for SDC-1900.
+ logger.error("VCPE specific code")
+ ret = {}
+ ret['artifact_name'] = "sw_image"
+ ret['file'] = "ubuntu_16.04"
+ ret['type'] = "tosca.artifacts.nfv.SwImage"
+ rets.append(ret)
+
+ return rets
+
+ def get_node_by_req(self, node_templates, req):
+ req_node_name = self.get_requirement_node_name(req)
+ return self.get_node_by_name(node_templates, req_node_name)
+
+ def isGroupTypeX(self, group, groupTypes, x):
+ group_type = group[GROUP_TYPE]
+ while group_type != x:
+ group_type_derived = group_type
+ group_type = groupTypes[group_type][DERIVED_FROM]
+ if group_type == GROUPS_ROOT or group_type == group_type_derived:
+ return False
+ return True
+
+ def setTargetValues(self, dict_target, target_keys, dict_source, source_keys):
+ i = 0
+ for item in source_keys:
+ dict_target[target_keys[i]] = dict_source.get(item, "")
+ i += 1
+ return dict_target
+
+ def get_deploy_graph(self, tosca, relations):
+ nodes = tosca.graph.nodetemplates
+ graph = Graph()
+ for node in nodes:
+ self._build_deploy_path(node, [], graph, relations)
+ return graph.to_dict()
+
+ def _build_deploy_path(self, node, node_parent, graph, relations):
+ graph.add_node(node.name, node_parent)
+ type_require_set = {}
+ type_requires = node.type_definition.requirements
+ for type_require in type_requires:
+ type_require_set.update(type_require)
+ for requirement in node.requirements:
+ for k in list(requirement.keys()):
+ if type_require_set[k].get('relationship', None) in relations[0] or type_require_set[k].get('capability', None) in relations[0]:
+ if isinstance(requirement[k], dict):
+ next_node = requirement[k].get('node', None)
+ else:
+ next_node = requirement[k]
+ graph.add_node(next_node, [node.name])
+ if type_require_set[k].get('relationship', None) in relations[1]:
+ if isinstance(requirement[k], dict):
+ next_node = requirement[k].get('node', None)
+ else:
+ next_node = requirement[k]
+ graph.add_node(next_node, [node.name])
+
+ def get_substitution_mappings(self, tosca):
+ node = {
+ 'properties': {},
+ 'requirements': {},
+ 'capabilities': {},
+ 'metadata': {}
+ }
+ metadata = None
+ substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None)
+ if substitution_mappings:
+ nodeType = substitution_mappings['node_type']
+ logger.debug("nodeType %s", nodeType)
+ if "type" not in node or node['type'] == "":
+ node['type'] = nodeType
+ node['properties'] = substitution_mappings.get('properties', {})
+ node['requirements'] = substitution_mappings.get('requirements', {})
+ node['capabilities'] = substitution_mappings.get('capabilities', {})
+ metadata = substitution_mappings.get('metadata', {})
+
+ if "node_types" in tosca.tpl:
+ node_types = tosca.tpl['node_types'].get(nodeType, None)
+ derivedFrom = node_types.get('derived_from', "")
+ node['type'] = derivedFrom
+ node['properties'] = node_types.get('properties', {})
+
+ node['metadata'] = metadata if metadata and metadata != {} else self.buildMetadata(tosca)
+ return node
diff --git a/catalog/pub/utils/toscaparser/const.py b/catalog/pub/utils/toscaparser/const.py
new file mode 100644
index 0000000..9c61c48
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/const.py
@@ -0,0 +1,30 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+NS_METADATA_SECTIONS = (NS_UUID, NS_INVARIANTUUID, NS_NAME, NS_VERSION, NS_DESIGNER, NSD_RELEASE_DATE) =\
+ ("nsd_id", "nsd_invariant_id", "nsd_name", "nsd_file_structure_version", "nsd_designer", "nsd_release_date_time")
+# ("id", "invariant_id", "name", "version", "designer", "description")
+
+SDC_SERVICE_METADATA_SECTIONS = (SRV_UUID, SRV_INVARIANTUUID, SRV_NAME) = ('UUID', 'invariantUUID', 'name')
+
+PNF_METADATA_SECTIONS = (PNF_UUID, PNF_INVARIANTUUID, PNF_NAME, PNF_METADATA_DESCRIPTION, PNF_VERSION, PNF_PROVIDER) = \
+ ("descriptor_id", "descriptor_invariant_id", "name", "description", "version", "provider")
+PNF_SECTIONS = (PNF_ID, PNF_METADATA, PNF_PROPERTIES, PNF_DESCRIPTION) = \
+ ("pnf_id", "metadata", "properties", "description")
+
+VNF_SECTIONS = (VNF_ID, VNF_METADATA, VNF_PROPERTIES, VNF_DESCRIPTION) = \
+ ("vnf_id", "metadata", "properties", "description")
+
+VL_SECTIONS = (VL_ID, VL_METADATA, VL_PROPERTIES, VL_DESCRIPTION) = \
+ ("vl_id", "metadata", "properties", "description")
diff --git a/catalog/pub/utils/toscaparser/dataentityext.py b/catalog/pub/utils/toscaparser/dataentityext.py
new file mode 100644
index 0000000..825e93b
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/dataentityext.py
@@ -0,0 +1,33 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from toscaparser.dataentity import DataEntity
+from toscaparser.elements.constraints import Schema
+from toscaparser.common.exception import ExceptionCollector
+
+
+class DataEntityExt(object):
+ '''A complex data value entity ext.'''
+ @staticmethod
+ def validate_datatype(type, value, entry_schema=None, custom_def=None):
+ if value:
+ if (type == Schema.STRING):
+ return str(value)
+ elif type == Schema.FLOAT:
+ try:
+ return float(value)
+ except Exception:
+ ExceptionCollector.appendException(ValueError(('"%s" is not an float.') % value))
+ return DataEntity.validate_datatype(type, value, entry_schema, custom_def)
+ return value
diff --git a/catalog/pub/utils/toscaparser/graph.py b/catalog/pub/utils/toscaparser/graph.py
new file mode 100644
index 0000000..0af2a14
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/graph.py
@@ -0,0 +1,74 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import deque
+from collections import OrderedDict
+
+
+class Graph(object):
+
+ def __init__(self, graph_dict=None):
+ self.graph = OrderedDict()
+ if graph_dict:
+ for node, dep_nodes in list(graph_dict.items()):
+ self.add_node(node, dep_nodes)
+
+ def add_node(self, node, dep_nodes):
+ if node not in self.graph:
+ self.graph[node] = set()
+ if isinstance(dep_nodes, list):
+ for dep_node in dep_nodes:
+ if dep_node not in self.graph:
+ self.graph[dep_node] = set()
+ if dep_node not in self.graph[node]:
+ self.graph[node].add(dep_node)
+
+ def get_pre_nodes(self, node):
+ return [k for k in self.graph if node in self.graph[k]]
+
+ def topo_sort(self):
+ degree = {}
+ for node in self.graph:
+ degree[node] = 0
+
+ for node in self.graph:
+ for dependent in self.graph[node]:
+ degree[dependent] += 1
+
+ queue = deque()
+ for node in degree:
+ if degree[node] == 0:
+ queue.appendleft(node)
+
+ sort_list = []
+ while queue:
+ node = queue.pop()
+ sort_list.append(node)
+ for dependent in self.graph[node]:
+ degree[dependent] -= 1
+ if degree[dependent] == 0:
+ queue.appendleft(dependent)
+
+ if len(sort_list) == len(self.graph):
+ return sort_list
+ else:
+ return None
+
+ def to_dict(self):
+ dict = {}
+ for node, dependents in self.graph.items():
+ dict[node] = []
+ for dep in dependents:
+ dict[node].append(dep)
+ return dict
diff --git a/catalog/pub/utils/toscaparser/nsdmodel.py b/catalog/pub/utils/toscaparser/nsdmodel.py
new file mode 100644
index 0000000..f742640
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/nsdmodel.py
@@ -0,0 +1,220 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from catalog.pub.utils.toscaparser.basemodel import BaseInfoModel
+from catalog.pub.utils.toscaparser.const import SDC_SERVICE_METADATA_SECTIONS
+from catalog.pub.utils.toscaparser.servicemodel import SdcServiceModel
+
+logger = logging.getLogger(__name__)
+
+SECTIONS = (NS_TYPE, NS_VNF_TYPE, NS_VL_TYPE, NS_PNF_TYPE, NS_NFP_TYPE, NS_VNFFG_TYPE) = \
+ ('tosca.nodes.nfv.NS',
+ 'tosca.nodes.nfv.VNF',
+ 'tosca.nodes.nfv.NsVirtualLink',
+ 'tosca.nodes.nfv.PNF',
+ 'tosca.nodes.nfv.NFP',
+ 'tosca.nodes.nfv.VNFFG')
+
+NFV_NS_RELATIONSHIPS = [["tosca.relationships.nfv.VirtualLinksTo", "tosca.relationships.DependsOn"], []]
+
+
+class NsdInfoModel(BaseInfoModel):
+ def __init__(self, path, params):
+ super(NsdInfoModel, self).__init__(path, params)
+
+ def parseModel(self, tosca):
+ metadata = self.buildMetadata(tosca)
+ self.model = {}
+ if self._is_etsi(metadata):
+ self.model = EtsiNsdInfoModel(tosca)
+ elif self._is_ecomp(metadata):
+ self.model = SdcServiceModel(tosca)
+
+ def _is_etsi(self, metadata):
+ NS_METADATA_MUST = ["nsd_invariant_id", "nsd_name", "nsd_file_structure_version", "nsd_designer", "nsd_release_date_time"]
+ return True if len([1 for key in NS_METADATA_MUST if key in metadata]) == len(NS_METADATA_MUST) else False
+
+ def _is_ecomp(self, metadata):
+ return True if len([1 for key in SDC_SERVICE_METADATA_SECTIONS if key in metadata]) == len(SDC_SERVICE_METADATA_SECTIONS) else False
+
+
+class EtsiNsdInfoModel(BaseInfoModel):
+
+ def __init__(self, tosca):
+ super(EtsiNsdInfoModel, self).__init__(tosca=tosca)
+
+ def parseModel(self, tosca):
+ self.metadata = self.buildMetadata(tosca)
+ self.ns = self._build_ns(tosca)
+ self.inputs = self.buildInputs(tosca)
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
+ types = tosca.topology_template.custom_defs
+ self.basepath = self.get_base_path(tosca)
+ self.vnfs = self._get_all_vnf(nodeTemplates, types)
+ self.pnfs = self._get_all_pnf(nodeTemplates, types)
+ self.vls = self._get_all_vl(nodeTemplates, types)
+ self.fps = self._get_all_fp(nodeTemplates, types)
+ self.vnffgs = self._get_all_vnffg(tosca.topology_template.groups, types)
+ self.ns_exposed = self._get_all_endpoint_exposed(tosca.topology_template)
+ self.nested_ns = self._get_all_nested_ns(nodeTemplates, types)
+ self.graph = self.get_deploy_graph(tosca, NFV_NS_RELATIONSHIPS)
+
+ def _get_all_vnf(self, nodeTemplates, node_types):
+ vnfs = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, NS_VNF_TYPE):
+ vnf = {}
+ vnf['vnf_id'] = node['name']
+ vnf['description'] = node['description']
+ vnf['properties'] = node['properties']
+ if not vnf['properties'].get('id', None):
+ vnf['properties']['id'] = vnf['properties'].get('descriptor_id', None)
+ vnf['dependencies'] = self._get_networks(node, node_types)
+ vnf['networks'] = self._get_networks(node, node_types)
+ vnfs.append(vnf)
+ return vnfs
+
+ def _get_all_pnf(self, nodeTemplates, node_types):
+ pnfs = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, NS_PNF_TYPE):
+ pnf = {}
+ pnf['pnf_id'] = node['name']
+ pnf['description'] = node['description']
+ pnf['properties'] = node['properties']
+ pnf['networks'] = self._get_networks(node, node_types)
+ pnfs.append(pnf)
+ return pnfs
+
+ def _get_all_vl(self, nodeTemplates, node_types):
+ vls = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, NS_VL_TYPE):
+ vl = dict()
+ vl['vl_id'] = node['name']
+ vl['description'] = node['description']
+ vl['properties'] = node['properties']
+ vls.append(vl)
+ return vls
+
+ def _get_all_fp(self, nodeTemplates, node_types):
+ fps = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, NS_NFP_TYPE):
+ fp = {}
+ fp['fp_id'] = node['name']
+ fp['description'] = node['description']
+ fp['properties'] = node['properties']
+ fp['forwarder_list'] = self._getForwarderList(node, nodeTemplates, node_types)
+ fps.append(fp)
+ return fps
+
+ def _getForwarderList(self, node, node_templates, node_types):
+ forwarderList = []
+ if 'requirements' in node:
+ for item in node['requirements']:
+ for key, value in list(item.items()):
+ if key == 'forwarder':
+ tmpnode = self.get_node_by_req(node_templates, value)
+ type = 'pnf' if self.isNodeTypeX(tmpnode, node_types, NS_PNF_TYPE) else 'vnf'
+ req_node_name = self.get_requirement_node_name(value)
+ if isinstance(value, dict) and 'capability' in value:
+ forwarderList.append(
+ {"type": type, "node_name": req_node_name, "capability": value['capability']})
+ else:
+ forwarderList.append({"type": type, "node_name": req_node_name, "capability": ""})
+ return forwarderList
+
+ def _get_all_vnffg(self, groups, group_types):
+ vnffgs = []
+ for group in groups:
+ if self.isGroupTypeX(group, group_types, NS_VNFFG_TYPE):
+ vnffg = {}
+ vnffg['vnffg_id'] = group.name
+ vnffg['description'] = group.description
+ if 'properties' in group.tpl:
+ vnffg['properties'] = group.tpl['properties']
+ vnffg['members'] = group.members
+ vnffgs.append(vnffg)
+ return vnffgs
+
+ def _get_all_endpoint_exposed(self, topo_tpl):
+ if 'substitution_mappings' in topo_tpl.tpl:
+ external_cps = self._get_external_cps(topo_tpl.tpl['substitution_mappings'])
+ forward_cps = self._get_forward_cps(topo_tpl.tpl['substitution_mappings'])
+ return {"external_cps": external_cps, "forward_cps": forward_cps}
+ return {}
+
+ def _get_external_cps(self, subs_mappings):
+ external_cps = []
+ if 'requirements' in subs_mappings:
+ for key, value in list(subs_mappings['requirements'].items()):
+ if isinstance(value, list) and len(value) > 0:
+ external_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ external_cps.append({"key_name": key, "cpd_id": value})
+ return external_cps
+
+ def _get_forward_cps(self, subs_mappings):
+ forward_cps = []
+ if 'capabilities' in subs_mappings:
+ for key, value in list(subs_mappings['capabilities'].items()):
+ if isinstance(value, list) and len(value) > 0:
+ forward_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ forward_cps.append({"key_name": key, "cpd_id": value})
+ return forward_cps
+
+ def _get_all_nested_ns(self, nodes, node_types):
+ nss = []
+ for node in nodes:
+ if self.isNodeTypeX(node, node_types, NS_TYPE):
+ ns = {}
+ ns['ns_id'] = node['name']
+ ns['description'] = node['description']
+ ns['properties'] = node['properties']
+ ns['networks'] = self._get_networks(node, node_types)
+ nss.append(ns)
+ return nss
+
+ def _get_networks(self, node, node_types):
+ rets = []
+ if 'requirements' in node and (self.isNodeTypeX(node, node_types, NS_TYPE) or self.isNodeTypeX(node, node_types, NS_VNF_TYPE)):
+ for item in node['requirements']:
+ for key, value in list(item.items()):
+ rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
+ return rets
+
+ def _build_ns(self, tosca):
+ ns = self.get_substitution_mappings(tosca)
+ properties = ns.get("properties", {})
+ metadata = ns.get("metadata", {})
+ if properties.get("descriptor_id", "") == "":
+ descriptor_id = metadata.get("nsd_id", "")
+ properties["descriptor_id"] = descriptor_id
+ if properties.get("verison", "") == "":
+ version = metadata.get("nsd_file_structure_version", "")
+ properties["verison"] = version
+ if properties.get("designer", "") == "":
+ author = metadata.get("nsd_designer", "")
+ properties["designer"] = author
+ if properties.get("name", "") == "":
+ template_name = metadata.get("nsd_name", "")
+ properties["name"] = template_name
+ if properties.get("invariant_id", "") == "":
+ nsd_invariant_id = metadata.get("nsd_invariant_id", "")
+ properties["invariant_id"] = nsd_invariant_id
+ return ns
diff --git a/catalog/pub/utils/toscaparser/pnfmodel.py b/catalog/pub/utils/toscaparser/pnfmodel.py
new file mode 100644
index 0000000..9ad8686
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/pnfmodel.py
@@ -0,0 +1,53 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from catalog.pub.utils.toscaparser.basemodel import BaseInfoModel
+logger = logging.getLogger(__name__)
+
+
+class PnfdInfoModel(BaseInfoModel):
+
+ def __init__(self, path, params):
+ super(PnfdInfoModel, self).__init__(path, params)
+
+ def parseModel(self, tosca):
+ self.metadata = self.buildMetadata(tosca)
+ self.inputs = self.buildInputs(tosca)
+ nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca),
+ tosca.nodetemplates)
+ self.basepath = self.get_base_path(tosca)
+ self.pnf = {}
+ self.get_substitution_mappings(tosca)
+ self.get_all_cp(nodeTemplates)
+
+ def get_substitution_mappings(self, tosca):
+ pnf_substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None)
+ if pnf_substitution_mappings:
+ self.pnf['type'] = pnf_substitution_mappings['node_type']
+ self.pnf['properties'] = pnf_substitution_mappings.get('properties', {})
+
+ def get_all_cp(self, nodeTemplates):
+ self.pnf['ExtPorts'] = []
+ for node in nodeTemplates:
+ if self.isPnfExtPort(node):
+ cp = {}
+ cp['id'] = node['name']
+ cp['type'] = node['nodeType']
+ cp['properties'] = node['properties']
+ self.pnf['ExtPorts'].append(cp)
+
+ def isPnfExtPort(self, node):
+ return node['nodeType'].find('tosca.nodes.nfv.PnfExtPort') >= 0
diff --git a/catalog/pub/utils/toscaparser/sdmodel.py b/catalog/pub/utils/toscaparser/sdmodel.py
new file mode 100644
index 0000000..05e0caf
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/sdmodel.py
@@ -0,0 +1,93 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from catalog.pub.utils.toscaparser.basemodel import BaseInfoModel
+from catalog.pub.utils.toscaparser.servicemodel import SdcServiceModel
+
+logger = logging.getLogger(__name__)
+
+
+class SdInfoModel(BaseInfoModel):
+ def __init__(self, path, params):
+ super(SdInfoModel, self).__init__(path, params)
+
+ def parseModel(self, tosca):
+ self.metadata = self.buildMetadata(tosca)
+ self.inputs = self.build_inputs(tosca)
+
+ sdcModle = SdcServiceModel(tosca)
+ if sdcModle:
+ self.service = sdcModle.ns
+ if hasattr(tosca, 'nodetemplates'):
+ self.basepath = sdcModle.basepath
+ self.vnfs = sdcModle.vnfs
+ self.pnfs = sdcModle.pnfs
+ self.vls = sdcModle.vls
+ self.graph = sdcModle.graph
+
+ def build_inputs(self, tosca):
+ """ Get all the inputs for complex type"""
+ result_inputs = {}
+
+ if not tosca.inputs:
+ return {}
+
+ for input in tosca.inputs:
+ type = input.schema.type
+ if type.__eq__('list') or type.__eq__('map'):
+ complex_input = []
+ entry_schema = self.get_entry_schema(input.schema.schema['entry_schema'])
+ self.get_child_input_repeat(complex_input, entry_schema, input)
+ result_inputs[input.schema.name] = complex_input
+
+ else:
+ simple_input = {
+ "type": input.schema.type,
+ "description": input.schema.description,
+ "required": input.schema.required,
+ }
+ result_inputs[input.schema.name] = simple_input
+ return result_inputs
+
+ def get_child_input_repeat(self, complex_input, entry_schema, input):
+ custom_defs = input.custom_defs
+ properties = custom_defs[entry_schema]['properties']
+ for key, value in properties.items():
+ if value['type'].__eq__('list'):
+ child_complex_input = []
+ child_entry_schema = self.get_entry_schema(value['entry_schema'])
+ self.get_child_input_repeat(child_complex_input, child_entry_schema, input)
+ complex_input.append({key: child_complex_input})
+ else:
+ if 'description' in list(value.keys()):
+ simple_input = {
+ key: "",
+ "type": value['type'],
+ "required": value['required'],
+ "description": value['description'],
+ }
+ else:
+ simple_input = {
+ key: "",
+ "type": value['type'],
+ "required": value['required'],
+ }
+ complex_input.append(simple_input)
+
+ def get_entry_schema(self, entry_schema):
+ if isinstance(entry_schema, dict):
+ if 'type' in list(entry_schema.keys()):
+ entry_schema = entry_schema['type']
+ return entry_schema
diff --git a/catalog/pub/utils/toscaparser/servicemodel.py b/catalog/pub/utils/toscaparser/servicemodel.py
new file mode 100644
index 0000000..47d6630
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/servicemodel.py
@@ -0,0 +1,188 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from catalog.pub.utils.toscaparser.const import NS_METADATA_SECTIONS, PNF_METADATA_SECTIONS, VNF_SECTIONS, PNF_SECTIONS, VL_SECTIONS
+from catalog.pub.utils.toscaparser.basemodel import BaseInfoModel
+
+logger = logging.getLogger(__name__)
+
+SDC_SERVICE_SECTIONS = (SERVICE_TYPE, SRV_DESCRIPTION) = (
+ 'org.openecomp.resource.abstract.nodes.service', 'description')
+
+SDC_SERVICE_METADATA_SECTIONS = (SRV_UUID, SRV_INVARIANTUUID, SRV_NAME) = (
+ 'UUID', 'invariantUUID', 'name')
+
+SDC_VL = (VL_TYPE) = ('tosca.nodes.nfv.ext.zte.VL')
+SDC_VL_SECTIONS = (VL_ID, VL_METADATA, VL_PROPERTIES, VL_DESCRIPTION) = \
+ ("name", "metadata", "properties", "description")
+
+SDC_VF = (VF_TYPE, VF_UUID) = \
+ ('org.openecomp.resource.abstract.nodes.VF', 'UUID')
+SDC_VF_SECTIONS = (VF_ID, VF_METADATA, VF_PROPERTIES, VF_DESCRIPTION) = \
+ ("name", "metadata", "properties", "description")
+
+SDC_PNF = (PNF_TYPE) = \
+ ('org.openecomp.resource.abstract.nodes.PNF')
+SDC_PNF_METADATA_SECTIONS = (SDC_PNF_UUID, SDC_PNF_INVARIANTUUID, SDC_PNF_NAME, SDC_PNF_METADATA_DESCRIPTION, SDC_PNF_VERSION) = \
+ ("UUID", "invariantUUID", "name", "description", "version")
+SDC_PNF_SECTIONS = (SDC_PNF_ID, SDC_PNF_METADATA, SDC_PNF_PROPERTIES, SDC_PNF_DESCRIPTION) = \
+ ("name", "metadata", "properties", "description")
+
+SERVICE_RELATIONSHIPS = [["tosca.relationships.network.LinksTo", "tosca.relationships.nfv.VirtualLinksTo", "tosca.capabilities.nfv.VirtualLinkable", "tosca.relationships.DependsOn"], []]
+
+
+class SdcServiceModel(BaseInfoModel):
+
+ def __init__(self, tosca):
+ super(SdcServiceModel, self).__init__(tosca=tosca)
+
+ def parseModel(self, tosca):
+ self.metadata = self._buildServiceMetadata(tosca)
+ self.ns = self._build_ns(tosca)
+ self.inputs = self.buildInputs(tosca)
+ if hasattr(tosca, 'nodetemplates'):
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
+ types = tosca.topology_template.custom_defs
+ self.basepath = self.get_base_path(tosca)
+ self.vnfs = self._get_all_vnf(nodeTemplates, types)
+ self.pnfs = self._get_all_pnf(nodeTemplates, types)
+ self.vls = self._get_all_vl(nodeTemplates, types)
+ self.graph = self.get_deploy_graph(tosca, SERVICE_RELATIONSHIPS)
+
+ def _buildServiceMetadata(self, tosca):
+ """ SDC service Meta Format
+ invariantUUID: e2618ee1 - a29a - 44c4 - a52a - b718fe1269f4
+ UUID: 2362d14a - 115f - 4a2b - b449 - e2f93c0b7c89
+ name: demoVLB
+ description: catalogservicedescription
+ type: Service
+ category: NetworkL1 - 3
+ serviceType: ''
+ serviceRole: ''
+ serviceEcompNaming: true
+ ecompGeneratedNaming: true
+ namingPolicy: ''
+ """
+ metadata_temp = self.buildMetadata(tosca)
+ metadata = {}
+ return self.setTargetValues(metadata, NS_METADATA_SECTIONS, metadata_temp, SDC_SERVICE_METADATA_SECTIONS)
+
+ def _get_all_vnf(self, nodeTemplates, node_types):
+ """ SDC Resource Metadata
+ invariantUUID: 9ed46ddc-8eb7-4cb0-a1b6-04136c921af4
+ UUID: b56ba35d-45fb-41e3-b6b8-b4f66917baa1
+ customizationUUID: af0a6e64-967b-476b-87bc-959dcf59c305
+ version: '1.0'
+ name: b7d2fceb-dd11-43cd-a3fa
+ description: vendor software product
+ type: VF
+ category: Generic
+ subcategory: Abstract
+ resourceVendor: b9d9f9f7-7994-4f0d-8104
+ resourceVendorRelease: '1.0'
+ resourceVendorModelNumber: ''
+ """
+ vnfs = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, VF_TYPE):
+ vnf = {}
+ self.setTargetValues(vnf, VNF_SECTIONS, node, SDC_VF_SECTIONS)
+ if not vnf['properties'].get('id', None) and node['metadata']:
+ vnf['properties']['id'] = node['metadata'].get('UUID', None)
+ vnf['properties']['vnfm_info'] = vnf['properties'].get('nf_type', None)
+ vnf['dependencies'] = self._get_networks(node, node_types)
+ vnf['networks'] = self._get_networks(node, node_types)
+ vnfs.append(vnf)
+ return vnfs
+
+ def _get_all_pnf(self, nodeTemplates, node_types):
+ pnfs = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, PNF_TYPE):
+ pnf = {}
+ self.setTargetValues(pnf, PNF_SECTIONS, node, SDC_PNF_SECTIONS)
+ self.setTargetValues(pnf['properties'], PNF_METADATA_SECTIONS, node['metadata'], SDC_PNF_METADATA_SECTIONS)
+ pnf['networks'] = self._get_networks(node, node_types)
+ pnfs.append(pnf)
+ return pnfs
+
+ def _get_all_vl(self, nodeTemplates, node_types):
+ vls = []
+ for node in nodeTemplates:
+ if self.isNodeTypeX(node, node_types, VL_TYPE):
+ vl = {}
+ self.setTargetValues(vl, VL_SECTIONS, node, SDC_VL_SECTIONS)
+ vl_profile = {}
+ if 'segmentation_id' in vl['properties']:
+ vl_profile['segmentationId'] = vl['properties'].get('segmentation_id')
+ if 'network_name' in vl['properties']:
+ vl_profile['networkName'] = vl['properties'].get('network_name')
+ if 'cidr' in vl['properties']:
+ vl_profile['cidr'] = vl['properties'].get('cidr')
+ if 'network_name' in vl['properties']:
+ vl_profile['networkName'] = vl['properties'].get('network_name')
+ if 'start_ip' in vl['properties']:
+ vl_profile['startIp'] = vl['properties'].get('start_ip', '')
+ if 'end_ip' in vl['properties']:
+ vl_profile['endIp'] = vl['properties'].get('end_ip', '')
+ if 'gateway_ip' in vl['properties']:
+ vl_profile['gatewayIp'] = vl['properties'].get('gateway_ip', '')
+ if 'physical_network' in vl['properties']:
+ vl_profile['physicalNetwork'] = vl['properties'].get('physical_network', '')
+ if 'network_type' in vl['properties']:
+ vl_profile['networkType'] = vl['properties'].get('network_type', '')
+ if 'dhcp_enabled' in vl['properties']:
+ vl_profile['dhcpEnabled'] = vl['properties'].get('dhcp_enabled', '')
+ if 'vlan_transparent' in vl['properties']:
+ vl_profile['vlanTransparent'] = vl['properties'].get('vlan_transparent', '')
+ if 'mtu' in vl['properties']:
+ vl_profile['mtu'] = vl['properties'].get('mtu', '')
+ if 'ip_version' in vl['properties']:
+ vl_profile['ip_version'] = vl['properties'].get('ip_version', '')
+ if 'dns_nameservers' in vl['properties']:
+ vl_profile['dns_nameservers'] = vl['properties'].get('dns_nameservers', [])
+ if 'host_routes' in vl['properties']:
+ vl_profile['host_routes'] = vl['properties'].get('host_routes', [])
+ if 'network_id' in vl['properties']:
+ vl_profile['network_id'] = vl['properties'].get('network_id', '')
+ vl['properties']['vl_profile'] = vl_profile
+ vls.append(vl)
+ return vls
+
+ def _get_networks(self, node, node_types):
+ rets = []
+ if 'requirements' in node and self.isNodeTypeX(node, node_types, VF_TYPE):
+ for item in node['requirements']:
+ for key, value in list(item.items()):
+ rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
+ return rets
+
+ def _build_ns(self, tosca):
+ ns = self.get_substitution_mappings(tosca)
+ properties = ns.get("properties", {})
+ metadata = ns.get("metadata", {})
+ if properties.get("descriptor_id", "") == "":
+ descriptor_id = metadata.get(SRV_UUID, "")
+ properties["descriptor_id"] = descriptor_id
+ properties["verison"] = ""
+ properties["designer"] = ""
+ if properties.get("name", "") == "":
+ template_name = metadata.get(SRV_NAME, "")
+ properties["name"] = template_name
+ if properties.get("invariant_id", "") == "":
+ nsd_invariant_id = metadata.get(SRV_INVARIANTUUID, "")
+ properties["invariant_id"] = nsd_invariant_id
+ return ns
diff --git a/catalog/pub/utils/toscaparser/testdata/ns/ran.csar b/catalog/pub/utils/toscaparser/testdata/ns/ran.csar
new file mode 100644
index 0000000..9ea868c
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/ns/ran.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/ns/service-vIMS.csar b/catalog/pub/utils/toscaparser/testdata/ns/service-vIMS.csar
new file mode 100644
index 0000000..0aeed58
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/ns/service-vIMS.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/pnf/ran-du.csar b/catalog/pub/utils/toscaparser/testdata/pnf/ran-du.csar
new file mode 100644
index 0000000..45168a9
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/pnf/ran-du.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vSBC.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vSBC.csar
new file mode 100644
index 0000000..921eafd
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vSBC.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/infra.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/infra.csar
new file mode 100644
index 0000000..5c9fbcf
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/infra.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbng.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbng.csar
new file mode 100644
index 0000000..b11a6ef
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbng.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbrgemu.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbrgemu.csar
new file mode 100644
index 0000000..730ea8d
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vbrgemu.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgmux.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgmux.csar
new file mode 100644
index 0000000..b0f37a7
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgmux.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgw.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgw.csar
new file mode 100644
index 0000000..ca652bf
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpedpdk/vgw.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/infra.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/infra.csar
new file mode 100644
index 0000000..c91c034
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/infra.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbng.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbng.csar
new file mode 100644
index 0000000..5011563
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbng.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbrgemu.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbrgemu.csar
new file mode 100644
index 0000000..0f99199
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vbrgemu.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgmux.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgmux.csar
new file mode 100644
index 0000000..3d2dbf7
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgmux.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgw.csar b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgw.csar
new file mode 100644
index 0000000..79e0d20
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/testdata/vnf/vcpesriov/vgw.csar
Binary files differ
diff --git a/catalog/pub/utils/toscaparser/tests.py b/catalog/pub/utils/toscaparser/tests.py
new file mode 100644
index 0000000..285d970
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/tests.py
@@ -0,0 +1,101 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+import os
+import logging
+import tempfile
+import shutil
+
+from django.test import TestCase
+
+from catalog.pub.utils.toscaparser import parse_vnfd, parse_pnfd, parse_nsd
+from catalog.pub.utils.toscaparser.graph import Graph
+
+logger = logging.getLogger(__name__)
+
+
+class TestToscaparser(TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def test_vnfd_parse(self):
+ self.remove_temp_dir()
+ input_parameters = [{"value": "222222", "key": "sdncontroller"}]
+ # vcpe = ["vgw", "infra", "vbng", "vbrgemu", "vgmux"]
+ vcpe_part = 'vgw'
+ sriov_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/vnf/vcpesriov"
+ csar_file = ("%s/%s.csar" % (sriov_path, vcpe_part))
+ logger.debug("csar_file:%s", csar_file)
+ vnfd_json = parse_vnfd(csar_file, input_parameters)
+ metadata = json.loads(vnfd_json).get("metadata")
+ logger.debug("sriov metadata:%s", metadata)
+ self.assertEqual(("vCPE_%s" % vcpe_part), metadata.get("template_name", ""))
+ if vcpe_part == "infra":
+ self.assertEqual("b1bb0ce7-1111-4fa7-95ed-4840d70a1177", json.loads(vnfd_json)["vnf"]["properties"]["descriptor_id"])
+
+ dpdk_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/vnf/vcpedpdk"
+ csar_file = ("%s/%s.csar" % (dpdk_path, vcpe_part))
+ logger.debug("csar_file:%s", csar_file)
+ vnfd_json = parse_vnfd(csar_file, input_parameters)
+ metadata = json.loads(vnfd_json).get("metadata")
+ logger.debug("dpdk metadata:%s", metadata)
+ self.assertEqual(("vCPE_%s" % vcpe_part), metadata.get("template_name", ""))
+
+ def test_pnfd_parse(self):
+ self.remove_temp_dir()
+ csar_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/pnf/ran-du.csar"
+ pnfd_json = parse_pnfd(csar_path)
+ pnfd_dict = json.loads(pnfd_json)
+ metadata = pnfd_dict.get("metadata")
+ self.assertEqual("RAN_DU", metadata.get("template_name", ""))
+ descriptor_id = pnfd_dict["pnf"]["properties"]["descriptor_id"]
+ self.assertEqual(1, descriptor_id)
+
+ def test_nsd_parse(self):
+ self.remove_temp_dir()
+ # ran_csar = os.path.dirname(os.path.abspath(__file__)) + "/testdata/ns/ran.csar"
+ # nsd_json = parse_nsd(ran_csar, [])
+ # logger.debug("NS ran json: %s" % nsd_json)
+ # metadata = json.loads(nsd_json).get("metadata")
+ # self.assertEqual("RAN-NS", metadata.get("nsd_name", ""))
+
+ def test_service_descriptor_parse(self):
+ self.remove_temp_dir()
+ service_test_csar = os.path.dirname(os.path.abspath(__file__)) + "/testdata/ns/service-vIMS.csar"
+ test_json = parse_nsd(service_test_csar, [])
+ logger.debug("service-vIMS json: %s" % test_json)
+ metadata = json.loads(test_json).get("metadata")
+ self.assertEqual("vIMS_v2", metadata.get("nsd_name", ""))
+
+ def remove_temp_dir(self):
+ tempdir = tempfile.gettempdir()
+ for dir in os.listdir(tempdir):
+ if dir.startswith("tmp"):
+ path = tempfile.tempdir + "/" + dir
+ if (not os.path.isfile(path)) and os.path.exists(path):
+ shutil.rmtree(tempfile.tempdir + "/" + dir)
+
+ def test_graph(self):
+ data = {
+ "cucp": [],
+ "du": [],
+ "vl_flat_net": ["cucp", "cuup"],
+ "vl_ext_net": ["cucp", "cuup"],
+ "cuup": []
+ }
+ graph = Graph(data)
+ self.assertEqual(['vl_ext_net', 'vl_flat_net'].sort(), graph.get_pre_nodes("cucp").sort())
diff --git a/catalog/pub/utils/toscaparser/vnfdmodel.py b/catalog/pub/utils/toscaparser/vnfdmodel.py
new file mode 100644
index 0000000..1ed0659
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/vnfdmodel.py
@@ -0,0 +1,48 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from catalog.pub.config.config import VNFD_SCHEMA_VERSION_DEFAULT
+from catalog.pub.utils.toscaparser.basemodel import BaseInfoModel
+from catalog.pub.utils.toscaparser.vnfdparser import CreateVnfdSOLParser
+
+
+logger = logging.getLogger(__name__)
+
+NFV_VNF_RELATIONSHIPS = [["tosca.relationships.nfv.VirtualLinksTo", "tosca.relationships.nfv.VduAttachesTo", "tosca.relationships.nfv.AttachesTo", "tosca.relationships.nfv.Vdu.AttachedTo", "tosca.relationships.DependsOn"],
+ ["tosca.nodes.relationships.VirtualBindsTo", "tosca.relationships.nfv.VirtualBindsTo"]]
+
+
+class EtsiVnfdInfoModel(BaseInfoModel):
+
+ def __init__(self, path, params):
+ self.vnf = {}
+ super(EtsiVnfdInfoModel, self).__init__(path, params)
+
+ def parseModel(self, tosca):
+ self.metadata = self.buildMetadata(tosca)
+ self.inputs = self.buildInputs(tosca)
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
+ self.basepath = self.get_base_path(tosca)
+ node_types = tosca.topology_template.custom_defs
+ sol_version = self.metadata.get("VNFD_SCHEMA_VERSION", VNFD_SCHEMA_VERSION_DEFAULT) if isinstance(self.metadata, dict) else VNFD_SCHEMA_VERSION_DEFAULT
+ vnfd_sol_parser = CreateVnfdSOLParser(sol_version, self)
+ self.vnf = vnfd_sol_parser.build_vnf(tosca)
+ self.volume_storages = vnfd_sol_parser.get_all_volume_storage(nodeTemplates, node_types)
+ self.vdus = vnfd_sol_parser.get_all_vdu(nodeTemplates, node_types)
+ self.vls = vnfd_sol_parser.get_all_vl(nodeTemplates, node_types)
+ self.cps = vnfd_sol_parser.get_all_cp(nodeTemplates, node_types)
+ self.vnf_exposed = vnfd_sol_parser.get_all_endpoint_exposed()
+ self.graph = self.get_deploy_graph(tosca, NFV_VNF_RELATIONSHIPS)
diff --git a/catalog/pub/utils/toscaparser/vnfdparser/__init__.py b/catalog/pub/utils/toscaparser/vnfdparser/__init__.py
new file mode 100644
index 0000000..911de2c
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/vnfdparser/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from catalog.pub.utils.toscaparser.vnfdparser.vnfd_sol_base import VnfdSOLBase
+from catalog.pub.utils.toscaparser.vnfdparser.vnfd_sol_251 import VnfdSOL251
+
+
+def CreateVnfdSOLParser(sol_version, etsi_vnfd_model):
+ switcher = {
+ "base": VnfdSOLBase(etsi_vnfd_model),
+ "2.5.1+1": VnfdSOL251(etsi_vnfd_model)
+ }
+ return switcher.get(sol_version, lambda: "Invalid Version")
diff --git a/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py
new file mode 100644
index 0000000..d1b0d14
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py
@@ -0,0 +1,300 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import base64
+
+
+logger = logging.getLogger(__name__)
+
+SECTIONS = (VDU_COMPUTE_TYPE, VNF_VL_TYPE, VDU_CP_TYPE, VDU_STORAGE_TYPE) = \
+ ('tosca.nodes.nfv.Vdu.Compute', 'tosca.nodes.nfv.VnfVirtualLink', 'tosca.nodes.nfv.VduCp', 'tosca.nodes.nfv.Vdu.VirtualStorage')
+
+
+class VnfdSOL251():
+
+ def __init__(self, model):
+ self.model = model
+
+ def build_vnf(self, tosca):
+ vnf = self.model.get_substitution_mappings(tosca)
+ properties = vnf.get("properties", {})
+ metadata = vnf.get("metadata", {})
+
+ for key, value in list(properties.items()):
+ if isinstance(value, dict):
+ if value["type"] == "string":
+ properties[key] = value.get("default", "")
+ elif value["type"] == "list":
+ properties[key] = value.get("default", {})
+ else:
+ properties[key] = value.get("default", "")
+ ptype = "descriptor_id"
+ meta_types = ["descriptor_id", "id", "UUID"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "descriptor_version"
+ meta_types = ["template_version", "version"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "provider"
+ meta_types = ["template_author", "provider"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "template_name"
+ meta_types = ["template_name"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "software_version"
+ meta_types = ["software_version"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "product_name"
+ meta_types = ["product_name"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "flavour_description"
+ meta_types = ["flavour_description"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "vnfm_info"
+ meta_types = ["vnfm_info"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ ptype = "flavour_id"
+ meta_types = ["flavour_id"]
+ self._get_property(properties, metadata, ptype, meta_types)
+
+ logger.debug("vnf:%s", vnf)
+
+ return vnf
+
+ def get_all_vl(self, nodeTemplates, node_types):
+ vls = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VNF_VL_TYPE):
+ vl = dict()
+ vl['vl_id'] = node['name']
+ vl['description'] = node['description']
+ vl['properties'] = node['properties']
+ vlp = vl['properties']
+ nodep = node['properties']
+ vlp['connectivity_type']['layer_protocol'] = nodep['connectivity_type']['layer_protocols'][0]
+ vlp['vl_profile']['max_bit_rate_requirements'] = nodep['vl_profile']['max_bitrate_requirements']
+ vlp['vl_profile']['min_bit_rate_requirements'] = nodep['vl_profile']['min_bitrate_requirements']
+ if 'virtual_link_protocol_data' in nodep['vl_profile']:
+ protocol_data = nodep['vl_profile']['virtual_link_protocol_data'][0]
+ vlp['vl_profile']['associated_layer_protocol'] = protocol_data['associated_layer_protocol']
+ if 'l3_protocol_data' in protocol_data:
+ l3 = protocol_data['l3_protocol_data']
+ vlp['vl_profile']['networkName'] = l3.get("name", "")
+ vlp['vl_profile']['cidr'] = l3.get("cidr", "")
+ vlp['vl_profile']['dhcpEnabled'] = l3.get("dhcp_enabled", "")
+ vlp['vl_profile']['ip_version'] = l3.get("ip_version", "")
+ if 'l2_protocol_data' in protocol_data:
+ l2 = protocol_data['l2_protocol_data']
+ vlp['vl_profile']['physicalNetwork'] = l2.get("physical_network", "")
+ vls.append(vl)
+ return vls
+
+ def get_all_cp(self, nodeTemplates, node_types):
+ cps = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VDU_CP_TYPE):
+ cp = {}
+ cp['cp_id'] = node['name']
+ cp['cpd_id'] = node['name']
+ cp['description'] = node['description']
+ cp['properties'] = {}
+ nodep = node['properties']
+ cp['properties']['trunk_mode'] = nodep.get("trunk_mode", "")
+ cp['properties']['layer_protocol'] = nodep.get("layer_protocols", "")
+ if 'vnic_type' in nodep:
+ cp['properties']['vnic_type'] = nodep.get("vnic_type", "normal")
+ if 'virtual_network_interface_requirements' in nodep:
+ cp['properties']['virtual_network_interface_requirements'] = nodep.get("virtual_network_interface_requirements", "")
+ if "protocol" in nodep:
+ node_protocol = nodep['protocol'][0]
+ cp['properties']['protocol_data'] = nodep['protocol']
+ cp_protocol = cp['properties']['protocol_data'][0]
+ cp_protocol['asscociated_layer_protocol'] = node_protocol['associated_layer_protocol']
+ if "address_data" in node_protocol:
+ cp_protocol['address_data'] = node_protocol['address_data'][0]
+
+ cp['vl_id'] = self._get_node_vl_id(node)
+ cp['vdu_id'] = self._get_node_vdu_id(node)
+ vls = self._buil_cp_vls(node)
+ if len(vls) > 1:
+ cp['vls'] = vls
+ cps.append(cp)
+ return cps
+
+ def get_all_volume_storage(self, nodeTemplates, node_types):
+ rets = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VDU_STORAGE_TYPE):
+ ret = {}
+ ret['volume_storage_id'] = node['name']
+ if 'description' in node:
+ ret['description'] = node['description']
+ ret['properties'] = node['properties']
+ rets.append(ret)
+ return rets
+
+ def get_all_vdu(self, nodeTemplates, node_types):
+ rets = []
+ inject_files = []
+ for node in nodeTemplates:
+ logger.debug("nodeTemplates :%s", node)
+ if self.model.isNodeTypeX(node, node_types, VDU_COMPUTE_TYPE):
+ ret = {}
+ ret['vdu_id'] = node['name']
+ ret['type'] = node['nodeType']
+ if 'description' in node:
+ ret['description'] = node['description']
+ ret['properties'] = node['properties']
+ if 'boot_data' in node['properties']:
+ ret['properties']['user_data'] = node['properties']['boot_data']
+ del ret['properties']['boot_data']
+ if 'inject_files' in node['properties']:
+ inject_files = node['properties']['inject_files']
+ if inject_files is not None:
+ if isinstance(inject_files, list):
+ for inject_file in inject_files:
+ source_path = os.path.join(self.model.basepath, inject_file['source_path'])
+ with open(source_path, "rb") as f:
+ source_data = f.read()
+ source_data_base64 = base64.b64encode(source_data)
+ inject_file["source_data_base64"] = source_data_base64.decode()
+ if isinstance(inject_files, dict):
+ source_path = os.path.join(self.model.basepath, inject_files['source_path'])
+ with open(source_path, "rb") as f:
+ source_data = f.read()
+ source_data_base64 = base64.b64encode(source_data)
+ inject_files["source_data_base64"] = source_data_base64.decode()
+ ret['dependencies'] = [self.model.get_requirement_node_name(x) for x in self.model.getNodeDependencys(node)]
+ virtual_compute = self.model.getCapabilityByName(node, 'virtual_compute')
+ if virtual_compute is not None and 'properties' in virtual_compute:
+ vc = {}
+ vc['virtual_cpu'] = virtual_compute['properties']['virtual_cpu']
+ vc['virtual_memory'] = virtual_compute['properties']['virtual_memory']
+ vc['virtual_storages'] = virtual_compute['properties'].get("virtual_local_storage", {})
+ ret['virtual_compute'] = vc
+ ret['vls'] = self._get_linked_vl_ids(node, nodeTemplates)
+ ret['cps'] = self._get_virtal_binding_cp_ids(node, nodeTemplates)
+ ret['artifacts'] = self.model.build_artifacts(node)
+ rets.append(ret)
+ logger.debug("rets:%s", rets)
+ return rets
+
+ def get_all_endpoint_exposed(self):
+ if self.model.vnf:
+ external_cps = self._get_external_cps(self.model.vnf.get('requirements', None))
+ forward_cps = self._get_forward_cps(self.model.vnf.get('capabilities', None))
+ return {"external_cps": external_cps, "forward_cps": forward_cps}
+ return {}
+
+ def _get_property(self, properties, metadata, ptype, meta_types):
+ if ptype not in properties or properties[ptype] == "":
+ for mtype in meta_types:
+ data = metadata.get(mtype, "")
+ if data != "":
+ properties[ptype] = data
+
+ def _trans_virtual_storage(self, virtual_storage):
+ if isinstance(virtual_storage, str):
+ return {"virtual_storage_id": virtual_storage}
+ else:
+ ret = {}
+ ret['virtual_storage_id'] = self.model.get_requirement_node_name(virtual_storage)
+ return ret
+
+ def _get_linked_vl_ids(self, node, node_templates):
+ vl_ids = []
+ cps = self._get_virtal_binding_cps(node, node_templates)
+ for cp in cps:
+ vl_reqs = self.model.getRequirementByName(cp, 'virtual_link')
+ for vl_req in vl_reqs:
+ vl_ids.append(self.model.get_requirement_node_name(vl_req))
+ return vl_ids
+
+ def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
+ return [x['name'] for x in self._get_virtal_binding_cps(node, nodeTemplates)]
+
+ def _get_virtal_binding_cps(self, node, nodeTemplates):
+ cps = []
+ for tmpnode in nodeTemplates:
+ if 'requirements' in tmpnode:
+ for item in tmpnode['requirements']:
+ for key, value in list(item.items()):
+ if key.upper().startswith('VIRTUAL_BINDING'):
+ req_node_name = self.model.get_requirement_node_name(value)
+ if req_node_name is not None and req_node_name == node['name']:
+ cps.append(tmpnode)
+ return cps
+
+ def _get_node_vdu_id(self, node):
+ vdu_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_binding')]
+ if len(vdu_ids) > 0:
+ return vdu_ids[0]
+ return ""
+
+ def _get_node_vl_id(self, node):
+ vl_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
+ if len(vl_ids) > 0:
+ return vl_ids[0]
+ return ""
+
+ def _buil_cp_vls(self, node):
+ return [self._build_cp_vl(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
+
+ def _build_cp_vl(self, req):
+ cp_vl = {}
+ cp_vl['vl_id'] = self.model.get_prop_from_obj(req, 'node')
+ relationship = self.model.get_prop_from_obj(req, 'relationship')
+ if relationship is not None:
+ properties = self.model.get_prop_from_obj(relationship, 'properties')
+ if properties is not None and isinstance(properties, dict):
+ for key, value in list(properties.items()):
+ cp_vl[key] = value
+ return cp_vl
+
+ def _get_external_cps(self, vnf_requirements):
+ external_cps = []
+ if vnf_requirements:
+ if isinstance(vnf_requirements, dict):
+ for key, value in list(vnf_requirements.items()):
+ if isinstance(value, list) and len(value) > 0:
+ external_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ external_cps.append({"key_name": key, "cpd_id": value})
+ elif isinstance(vnf_requirements, list):
+ for vnf_requirement in vnf_requirements:
+ for key, value in list(vnf_requirement.items()):
+ if isinstance(value, list) and len(value) > 0:
+ external_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ external_cps.append({"key_name": key, "cpd_id": value})
+ return external_cps
+
+ def _get_forward_cps(self, vnf_capabilities):
+ forward_cps = []
+ if vnf_capabilities:
+ for key, value in list(vnf_capabilities.items()):
+ if isinstance(value, list) and len(value) > 0:
+ forward_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ forward_cps.append({"key_name": key, "cpd_id": value})
+ return forward_cps
diff --git a/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py
new file mode 100644
index 0000000..7b3a1a0
--- /dev/null
+++ b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py
@@ -0,0 +1,236 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+import os
+import base64
+
+
+logger = logging.getLogger(__name__)
+
+SECTIONS = (VDU_COMPUTE_TYPE, VNF_VL_TYPE, VDU_CP_TYPE, VDU_STORAGE_TYPE) = \
+ ('tosca.nodes.nfv.Vdu.Compute', 'tosca.nodes.nfv.VnfVirtualLink', 'tosca.nodes.nfv.VduCp', 'tosca.nodes.nfv.Vdu.VirtualStorage')
+
+
+class VnfdSOLBase():
+
+ def __init__(self, model):
+ self.model = model
+
+ def build_vnf(self, tosca):
+ vnf = self.model.get_substitution_mappings(tosca)
+ properties = vnf.get("properties", {})
+ metadata = vnf.get("metadata", {})
+ if properties.get("descriptor_id", "") == "":
+ descriptor_id = metadata.get("descriptor_id", "")
+ if descriptor_id == "":
+ descriptor_id = metadata.get("id", "")
+ if descriptor_id == "":
+ descriptor_id = metadata.get("UUID", "")
+ properties["descriptor_id"] = descriptor_id
+
+ if properties.get("descriptor_version", "") == "":
+ version = metadata.get("template_version", "")
+ if version == "":
+ version = metadata.get("version", "")
+ properties["descriptor_version"] = version
+
+ if properties.get("provider", "") == "":
+ provider = metadata.get("template_author", "")
+ if provider == "":
+ provider = metadata.get("provider", "")
+ properties["provider"] = provider
+
+ if properties.get("template_name", "") == "":
+ template_name = metadata.get("template_name", "")
+ if template_name == "":
+ template_name = metadata.get("template_name", "")
+ properties["template_name"] = template_name
+ logger.debug("vnf:%s", vnf)
+ return vnf
+
+ def get_all_vl(self, nodeTemplates, node_types):
+ vls = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VNF_VL_TYPE):
+ vl = dict()
+ vl['vl_id'] = node['name']
+ vl['description'] = node['description']
+ vl['properties'] = node['properties']
+ vls.append(vl)
+ return vls
+
+ def get_all_cp(self, nodeTemplates, node_types):
+ cps = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VDU_CP_TYPE):
+ cp = {}
+ cp['cp_id'] = node['name']
+ cp['cpd_id'] = node['name']
+ cp['description'] = node['description']
+ cp['properties'] = node['properties']
+ cp['vl_id'] = self._get_node_vl_id(node)
+ cp['vdu_id'] = self._get_node_vdu_id(node)
+ vls = self._buil_cp_vls(node)
+ if len(vls) > 1:
+ cp['vls'] = vls
+ cps.append(cp)
+ return cps
+
+ def get_all_volume_storage(self, nodeTemplates, node_types):
+ rets = []
+ for node in nodeTemplates:
+ if self.model.isNodeTypeX(node, node_types, VDU_STORAGE_TYPE):
+ ret = {}
+ ret['volume_storage_id'] = node['name']
+ if 'description' in node:
+ ret['description'] = node['description']
+ ret['properties'] = node['properties']
+ rets.append(ret)
+ return rets
+
+ def get_all_vdu(self, nodeTemplates, node_types):
+ rets = []
+ inject_files = []
+ for node in nodeTemplates:
+ logger.debug("nodeTemplates :%s", node)
+ if self.model.isNodeTypeX(node, node_types, VDU_COMPUTE_TYPE):
+ ret = {}
+ ret['vdu_id'] = node['name']
+ ret['type'] = node['nodeType']
+ if 'description' in node:
+ ret['description'] = node['description']
+ ret['properties'] = node['properties']
+ if 'inject_files' in node['properties']:
+ inject_files = node['properties']['inject_files']
+ if inject_files is not None:
+ if isinstance(inject_files, list):
+ for inject_file in inject_files:
+ source_path = os.path.join(self.model.basepath, inject_file['source_path'])
+ with open(source_path, "rb") as f:
+ source_data = f.read()
+ source_data_base64 = base64.b64encode(source_data)
+ inject_file["source_data_base64"] = source_data_base64.decode()
+ if isinstance(inject_files, dict):
+ source_path = os.path.join(self.model.basepath, inject_files['source_path'])
+ with open(source_path, "rb") as f:
+ source_data = f.read()
+ source_data_base64 = base64.b64encode(source_data)
+ inject_files["source_data_base64"] = source_data_base64.decode()
+ virtual_storages = self.model.getRequirementByName(node, 'virtual_storage')
+ ret['virtual_storages'] = list(map(functools.partial(self._trans_virtual_storage), virtual_storages))
+ ret['dependencies'] = [self.model.get_requirement_node_name(x) for x in self.model.getNodeDependencys(node)]
+ virtual_compute = self.model.getCapabilityByName(node, 'virtual_compute')
+ if virtual_compute is not None and 'properties' in virtual_compute:
+ ret['virtual_compute'] = virtual_compute['properties']
+ ret['vls'] = self._get_linked_vl_ids(node, nodeTemplates)
+ ret['cps'] = self._get_virtal_binding_cp_ids(node, nodeTemplates)
+ ret['artifacts'] = self.model.build_artifacts(node)
+ rets.append(ret)
+ logger.debug("rets:%s", rets)
+ return rets
+
+ def get_all_endpoint_exposed(self):
+ if self.model.vnf:
+ external_cps = self._get_external_cps(self.model.vnf.get('requirements', None))
+ forward_cps = self._get_forward_cps(self.model.vnf.get('capabilities', None))
+ return {"external_cps": external_cps, "forward_cps": forward_cps}
+ return {}
+
+ def _trans_virtual_storage(self, virtual_storage):
+ if isinstance(virtual_storage, str):
+ return {"virtual_storage_id": virtual_storage}
+ else:
+ ret = {}
+ ret['virtual_storage_id'] = self.model.get_requirement_node_name(virtual_storage)
+ return ret
+
+ def _get_linked_vl_ids(self, node, node_templates):
+ vl_ids = []
+ cps = self._get_virtal_binding_cps(node, node_templates)
+ for cp in cps:
+ vl_reqs = self.model.getRequirementByName(cp, 'virtual_link')
+ for vl_req in vl_reqs:
+ vl_ids.append(self.model.get_requirement_node_name(vl_req))
+ return vl_ids
+
+ def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
+ return [x['name'] for x in self._get_virtal_binding_cps(node, nodeTemplates)]
+
+ def _get_virtal_binding_cps(self, node, nodeTemplates):
+ cps = []
+ for tmpnode in nodeTemplates:
+ if 'requirements' in tmpnode:
+ for item in tmpnode['requirements']:
+ for key, value in list(item.items()):
+ if key.upper().startswith('VIRTUAL_BINDING'):
+ req_node_name = self.model.get_requirement_node_name(value)
+ if req_node_name is not None and req_node_name == node['name']:
+ cps.append(tmpnode)
+ return cps
+
+ def _get_node_vdu_id(self, node):
+ vdu_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_binding')]
+ if len(vdu_ids) > 0:
+ return vdu_ids[0]
+ return ""
+
+ def _get_node_vl_id(self, node):
+ vl_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
+ if len(vl_ids) > 0:
+ return vl_ids[0]
+ return ""
+
+ def _buil_cp_vls(self, node):
+ return [self._build_cp_vl(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
+
+ def _build_cp_vl(self, req):
+ cp_vl = {}
+ cp_vl['vl_id'] = self.model.get_prop_from_obj(req, 'node')
+ relationship = self.model.get_prop_from_obj(req, 'relationship')
+ if relationship is not None:
+ properties = self.model.get_prop_from_obj(relationship, 'properties')
+ if properties is not None and isinstance(properties, dict):
+ for key, value in list(properties.items()):
+ cp_vl[key] = value
+ return cp_vl
+
+ def _get_external_cps(self, vnf_requirements):
+ external_cps = []
+ if vnf_requirements:
+ if isinstance(vnf_requirements, dict):
+ for key, value in list(vnf_requirements.items()):
+ if isinstance(value, list) and len(value) > 0:
+ external_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ external_cps.append({"key_name": key, "cpd_id": value})
+ elif isinstance(vnf_requirements, list):
+ for vnf_requirement in vnf_requirements:
+ for key, value in list(vnf_requirement.items()):
+ if isinstance(value, list) and len(value) > 0:
+ external_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ external_cps.append({"key_name": key, "cpd_id": value})
+ return external_cps
+
+ def _get_forward_cps(self, vnf_capabilities):
+ forward_cps = []
+ if vnf_capabilities:
+ for key, value in list(vnf_capabilities.items()):
+ if isinstance(value, list) and len(value) > 0:
+ forward_cps.append({"key_name": key, "cpd_id": value[0]})
+ else:
+ forward_cps.append({"key_name": key, "cpd_id": value})
+ return forward_cps
diff --git a/catalog/pub/utils/values.py b/catalog/pub/utils/values.py
new file mode 100644
index 0000000..d02d544
--- /dev/null
+++ b/catalog/pub/utils/values.py
@@ -0,0 +1,33 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def ignore_case_get(args, key, def_val=""):
+ if not key:
+ return def_val
+ if key in args:
+ return args[key]
+ for old_key in args:
+ if old_key.upper() == key.upper():
+ return args[old_key]
+ return def_val
+
+
+def remove_none_key(data, none_list=None):
+ none_list = none_list if none_list else [None, '', 'NULL', 'None', [], {}]
+ if isinstance(data, dict):
+ data = dict([(k, remove_none_key(v, none_list)) for k, v in list(data.items()) if v not in none_list])
+ if isinstance(data, list):
+ data = [remove_none_key(s, none_list) for s in data if s not in none_list]
+ return data