summaryrefslogtreecommitdiffstats
path: root/genericparser/pub
diff options
context:
space:
mode:
Diffstat (limited to 'genericparser/pub')
-rw-r--r--genericparser/pub/__init__.py13
-rw-r--r--genericparser/pub/config/__init__.py13
-rw-r--r--genericparser/pub/config/config.py85
-rw-r--r--genericparser/pub/database/__init__.py13
-rw-r--r--genericparser/pub/database/admin.py361
-rw-r--r--genericparser/pub/database/migrations/0001_initial.py229
-rw-r--r--genericparser/pub/database/migrations/0002_auto_20190422_1442.py47
-rw-r--r--genericparser/pub/database/migrations/__init__.py13
-rw-r--r--genericparser/pub/database/models.py234
-rw-r--r--genericparser/pub/exceptions.py57
-rw-r--r--genericparser/pub/msapi/__init__.py13
-rw-r--r--genericparser/pub/msapi/extsys.py175
-rw-r--r--genericparser/pub/msapi/sdc.py130
-rw-r--r--genericparser/pub/redisco/__init__.py58
-rw-r--r--genericparser/pub/redisco/containers.py116
-rw-r--r--genericparser/pub/ssl/cert/foobar.crt20
-rw-r--r--genericparser/pub/ssl/cert/foobar.csr18
-rw-r--r--genericparser/pub/ssl/cert/foobar.key27
-rw-r--r--genericparser/pub/utils/__init__.py13
-rw-r--r--genericparser/pub/utils/fileutil.py78
-rw-r--r--genericparser/pub/utils/idutil.py20
-rw-r--r--genericparser/pub/utils/jobutil.py145
-rw-r--r--genericparser/pub/utils/restcall.py114
-rw-r--r--genericparser/pub/utils/syscomm.py19
-rw-r--r--genericparser/pub/utils/tests.py221
-rw-r--r--genericparser/pub/utils/timeutil.py19
-rw-r--r--genericparser/pub/utils/toscaparsers/__init__.py54
-rw-r--r--genericparser/pub/utils/toscaparsers/basemodel.py537
-rw-r--r--genericparser/pub/utils/toscaparsers/const.py30
-rw-r--r--genericparser/pub/utils/toscaparsers/dataentityext.py33
-rw-r--r--genericparser/pub/utils/toscaparsers/graph.py74
-rw-r--r--genericparser/pub/utils/toscaparsers/nsdmodel.py220
-rw-r--r--genericparser/pub/utils/toscaparsers/pnfmodel.py53
-rw-r--r--genericparser/pub/utils/toscaparsers/sdmodel.py93
-rw-r--r--genericparser/pub/utils/toscaparsers/servicemodel.py188
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/ns/ran.csarbin3007 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csarbin47518 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csarbin2688 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csarbin11516 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csarbin15716 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csarbin15357 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csarbin14527 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csarbin14970 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csarbin15008 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csarbin15432 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csarbin15410 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csarbin14569 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csarbin15023 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csarbin23182 -> 0 bytes
-rw-r--r--genericparser/pub/utils/toscaparsers/tests.py102
-rw-r--r--genericparser/pub/utils/toscaparsers/vnfdmodel.py48
-rw-r--r--genericparser/pub/utils/toscaparsers/vnfdparser/__init__.py24
-rw-r--r--genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_251.py264
-rw-r--r--genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_base.py236
-rw-r--r--genericparser/pub/utils/values.py33
55 files changed, 0 insertions, 4240 deletions
diff --git a/genericparser/pub/__init__.py b/genericparser/pub/__init__.py
deleted file mode 100644
index c7b6818..0000000
--- a/genericparser/pub/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/genericparser/pub/config/__init__.py b/genericparser/pub/config/__init__.py
deleted file mode 100644
index c7b6818..0000000
--- a/genericparser/pub/config/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/genericparser/pub/config/config.py b/genericparser/pub/config/config.py
deleted file mode 100644
index 24f4fca..0000000
--- a/genericparser/pub/config/config.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# [MSB]
-MSB_SERVICE_IP = '127.0.0.1'
-MSB_SERVICE_PORT = '80'
-
-# [REDIS]
-REDIS_HOST = '127.0.0.1'
-REDIS_PORT = '6379'
-REDIS_PASSWD = ''
-
-# [mysql]
-DB_IP = "127.0.0.1"
-DB_PORT = 3306
-DB_NAME = "genericparser"
-DB_USER = "genericparser"
-DB_PASSWD = "genericparser"
-
-# [MDC]
-SERVICE_NAME = "genericparser"
-FORWARDED_FOR_FIELDS = ["HTTP_X_FORWARDED_FOR", "HTTP_X_FORWARDED_HOST",
- "HTTP_X_FORWARDED_SERVER"]
-
-# [register]
-REG_TO_MSB_WHEN_START = True
-REG_TO_MSB_REG_URL = "/api/microservices/v1/services"
-REG_TO_MSB_REG_PARAM = [{
- "serviceName": "parser",
- "version": "v1",
- "url": "/api/parser/v1",
- "protocol": "REST",
- "visualRange": "1",
- "nodes": [{
- "ip": "127.0.0.1",
- "port": "8806",
- "ttl": 0
- }]
-}, {
- "serviceName": "nsd",
- "version": "v1",
- "url": "/api/nsd/v1",
- "protocol": "REST",
- "visualRange": "1",
- "nodes": [{
- "ip": "127.0.0.1",
- "port": "8806",
- "ttl": 0
- }]
-}, {
- "serviceName": "vnfpkgm",
- "version": "v1",
- "url": "/api/vnfpkgm/v1",
- "protocol": "REST",
- "visualRange": "1",
- "nodes": [{
- "ip": "127.0.0.1",
- "port": "8806",
- "ttl": 0
- }]
-}]
-
-# genericparser path(values is defined in settings.py)
-# CATALOG_ROOT_PATH = None
-# CATALOG_URL_PATH = None
-GENERICPARSER_ROOT_PATH = None
-GENERICPARSER_URL_PATH = None
-
-# [sdc config]
-SDC_BASE_URL = "http://msb-iag/api"
-SDC_USER = "aai"
-SDC_PASSWD = "Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U"
-
-VNFD_SCHEMA_VERSION_DEFAULT = "base"
diff --git a/genericparser/pub/database/__init__.py b/genericparser/pub/database/__init__.py
deleted file mode 100644
index c7b6818..0000000
--- a/genericparser/pub/database/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/genericparser/pub/database/admin.py b/genericparser/pub/database/admin.py
deleted file mode 100644
index ec7a92a..0000000
--- a/genericparser/pub/database/admin.py
+++ /dev/null
@@ -1,361 +0,0 @@
-# Copyright 2019 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from django.contrib import admin
-
-from genericparser.pub.database.models import NSPackageModel
-from genericparser.pub.database.models import ServicePackageModel
-from genericparser.pub.database.models import VnfPackageModel
-from genericparser.pub.database.models import PnfPackageModel
-from genericparser.pub.database.models import SoftwareImageModel
-from genericparser.pub.database.models import JobModel
-from genericparser.pub.database.models import JobStatusModel
-from genericparser.pub.database.models import NsdmSubscriptionModel
-from genericparser.pub.database.models import VnfPkgSubscriptionModel
-
-
-@admin.register(NSPackageModel)
-class NSPackageModelAdmin(admin.ModelAdmin):
- list_display_links = ('nsPackageId', 'nsdName')
- fields = [
- "nsPackageId",
- "nsPackageUri",
- "checksum",
- "sdcCsarId",
- "onboardingState",
- "operationalState",
- "usageState",
- "deletionPending",
- "nsdId",
- "invariantId",
- "nsdName",
- "nsdDesginer",
- "nsdDescription",
- "nsdVersion",
- "userDefinedData",
- "localFilePath",
- "nsdModel"
- ]
-
- list_display = [
- "nsPackageId",
- "nsPackageUri",
- "checksum",
- "sdcCsarId",
- "onboardingState",
- "operationalState",
- "usageState",
- "deletionPending",
- "nsdId",
- "invariantId",
- "nsdName",
- "nsdDesginer",
- "nsdDescription",
- "nsdVersion",
- "userDefinedData",
- "localFilePath",
- "nsdModel"
- ]
-
- search_fields = (
- "nsPackageId",
- "nsdId",
- "nsdName",
- "sdcCsarId"
- )
-
-
-@admin.register(ServicePackageModel)
-class ServicePackageModelAdmin(admin.ModelAdmin):
- list_display_links = ('servicePackageId', 'servicedName')
- fields = [
- "servicePackageId",
- "servicePackageUri",
- "checksum",
- "sdcCsarId",
- "onboardingState",
- "operationalState",
- "usageState",
- "deletionPending",
- "servicedId",
- "invariantId",
- "servicedName",
- "servicedDesigner",
- "servicedDescription",
- "servicedVersion",
- "userDefinedData",
- "localFilePath",
- "servicedModel"
- ]
-
- list_display = [
- "servicePackageId",
- "servicePackageUri",
- "checksum",
- "sdcCsarId",
- "onboardingState",
- "operationalState",
- "usageState",
- "deletionPending",
- "servicedId",
- "invariantId",
- "servicedName",
- "servicedDesigner",
- "servicedDescription",
- "servicedVersion",
- "userDefinedData",
- "localFilePath",
- "servicedModel"
- ]
-
- search_fields = (
- "servicePackageId",
- "sdcCsarId",
- "servicedName",
- "onboardingState"
- )
-
-
-@admin.register(VnfPackageModel)
-class VnfPackageModelAdmin(admin.ModelAdmin):
- list_display_links = ('vnfPackageId', 'vnfdId')
- fields = [
- "vnfPackageId",
- "vnfPackageUri",
- "SdcCSARUri",
- "checksum",
- "onboardingState",
- "operationalState",
- "usageState",
- "deletionPending",
- "vnfdId",
- "vnfVendor",
- "vnfdProductName",
- "vnfdVersion",
- "vnfSoftwareVersion",
- "userDefinedData",
- "localFilePath",
- "vnfdModel"
- ]
-
- list_display = [
- "vnfPackageId",
- "vnfPackageUri",
- "SdcCSARUri",
- "checksum",
- "onboardingState",
- "operationalState",
- "usageState",
- "deletionPending",
- "vnfdId",
- "vnfVendor",
- "vnfdProductName",
- "vnfdVersion",
- "vnfSoftwareVersion",
- "userDefinedData",
- "localFilePath",
- "vnfdModel"
- ]
-
- search_fields = (
- "vnfPackageId",
- "onboardingState",
- "vnfdId"
- )
-
-
-@admin.register(PnfPackageModel)
-class PnfPackageModelAdmin(admin.ModelAdmin):
- list_display_links = ('pnfPackageId', 'pnfdId')
- fields = [
- "pnfPackageId",
- "pnfPackageUri",
- "sdcCSARUri",
- "checksum",
- "onboardingState",
- "usageState",
- "deletionPending",
- "pnfdId",
- "pnfVendor",
- "pnfdProductName",
- "pnfdVersion",
- "pnfSoftwareVersion",
- "userDefinedData",
- "localFilePath",
- "pnfdModel",
- "pnfdName"
- ]
-
- list_display = [
- "pnfPackageId",
- "pnfPackageUri",
- "sdcCSARUri",
- "checksum",
- "onboardingState",
- "usageState",
- "deletionPending",
- "pnfdId",
- "pnfVendor",
- "pnfdProductName",
- "pnfdVersion",
- "pnfSoftwareVersion",
- "userDefinedData",
- "localFilePath",
- "pnfdModel",
- "pnfdName"
- ]
-
- search_fields = (
- "pnfPackageId",
- "onboardingState",
- "pnfdId"
- )
-
-
-@admin.register(SoftwareImageModel)
-class SoftwareImageModelAdmin(admin.ModelAdmin):
- list_display_links = ('imageid', 'vnfPackageId')
- fields = [
- "imageid",
- "containerFormat",
- "diskFormat",
- "mindisk",
- "minram",
- "usermetadata",
- "vnfPackageId",
- "filePath",
- "status",
- "vimid"
- ]
-
- list_display = [
- "imageid",
- "containerFormat",
- "diskFormat",
- "mindisk",
- "minram",
- "usermetadata",
- "vnfPackageId",
- "filePath",
- "status",
- "vimid"
- ]
-
- search_fields = (
- "imageid",
- "vnfPackageId",
- "vimid"
- )
-
-
-@admin.register(NsdmSubscriptionModel)
-class NsdmSubscriptionModelAdmin(admin.ModelAdmin):
- list_display_links = ('subscriptionid', 'notificationTypes')
- fields = [
- "subscriptionid",
- "notificationTypes",
- "auth_info",
- "callback_uri",
- "nsdInfoId",
- "nsdId",
- "nsdName",
- "nsdVersion",
- "nsdDesigner",
- "nsdInvariantId",
- "vnfPkgIds",
- "pnfdInfoIds",
- "nestedNsdInfoIds",
- "nsdOnboardingState",
- "nsdOperationalState",
- "nsdUsageState",
- "pnfdId",
- "pnfdName",
- "pnfdVersion",
- "pnfdProvider",
- "pnfdInvariantId",
- "pnfdOnboardingState",
- "pnfdUsageState",
- "links"
- ]
-
- list_display = [
- "subscriptionid",
- "notificationTypes",
- "auth_info",
- "callback_uri",
- "nsdInfoId",
- "nsdId",
- "nsdName",
- "nsdVersion",
- "nsdDesigner",
- "nsdInvariantId",
- "vnfPkgIds",
- "pnfdInfoIds",
- "nestedNsdInfoIds",
- "nsdOnboardingState",
- "nsdOperationalState",
- "nsdUsageState",
- "pnfdId",
- "pnfdName",
- "pnfdVersion",
- "pnfdProvider",
- "pnfdInvariantId",
- "pnfdOnboardingState",
- "pnfdUsageState",
- "links"
- ]
-
- search_fields = (
- "subscriptionid",
- "notificationTypes"
- )
-
-
-@admin.register(VnfPkgSubscriptionModel)
-class VnfPkgSubscriptionModelAdmin(admin.ModelAdmin):
- list_display_links = ('subscription_id', 'notification_types')
- fields = [
- "subscription_id",
- "callback_uri",
- "auth_info",
- "usage_states",
- "notification_types",
- "vnfd_id",
- "vnf_pkg_id",
- "operation_states",
- "vnf_products_from_provider",
- "links"
- ]
-
- list_display = [
- "subscription_id",
- "callback_uri",
- "auth_info",
- "usage_states",
- "notification_types",
- "vnfd_id",
- "vnf_pkg_id",
- "operation_states",
- "vnf_products_from_provider",
- "links"
- ]
-
- search_fields = (
- "subscription_id",
- "notification_types"
- )
-
-
-admin.site.register(JobModel)
-admin.site.register(JobStatusModel)
diff --git a/genericparser/pub/database/migrations/0001_initial.py b/genericparser/pub/database/migrations/0001_initial.py
deleted file mode 100644
index 98ca84c..0000000
--- a/genericparser/pub/database/migrations/0001_initial.py
+++ /dev/null
@@ -1,229 +0,0 @@
-# Copyright 2019 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# Generated by Django 1.11.9 on 2019-04-16 03:53
-from __future__ import unicode_literals
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
-
- initial = True
-
- dependencies = [
- ]
-
- operations = [
- migrations.CreateModel(
- name='JobModel',
- fields=[
- ('jobid', models.CharField(db_column='JOBID', max_length=255, primary_key=True, serialize=False)),
- ('jobtype', models.CharField(db_column='JOBTYPE', max_length=255)),
- ('jobaction', models.CharField(db_column='JOBACTION', max_length=255)),
- ('resid', models.CharField(db_column='RESID', max_length=255)),
- ('status', models.IntegerField(blank=True, db_column='STATUS', null=True)),
- ('starttime', models.CharField(blank=True, db_column='STARTTIME', max_length=255, null=True)),
- ('endtime', models.CharField(blank=True, db_column='ENDTIME', max_length=255, null=True)),
- ('progress', models.IntegerField(blank=True, db_column='PROGRESS', null=True)),
- ('user', models.CharField(blank=True, db_column='USER', max_length=255, null=True)),
- ('parentjobid', models.CharField(blank=True, db_column='PARENTJOBID', max_length=255, null=True)),
- ('resname', models.CharField(blank=True, db_column='RESNAME', max_length=255, null=True)),
- ],
- options={
- 'db_table': 'CATALOG_JOB',
- },
- ),
- migrations.CreateModel(
- name='JobStatusModel',
- fields=[
- ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('indexid', models.IntegerField(db_column='INDEXID')),
- ('jobid', models.CharField(db_column='JOBID', max_length=255)),
- ('status', models.CharField(db_column='STATUS', max_length=255)),
- ('progress', models.IntegerField(blank=True, db_column='PROGRESS', null=True)),
- ('descp', models.TextField(db_column='DESCP', max_length=65535)),
- ('errcode', models.CharField(blank=True, db_column='ERRCODE', max_length=255, null=True)),
- ('addtime', models.CharField(blank=True, db_column='ADDTIME', max_length=255, null=True)),
- ],
- options={
- 'db_table': 'CATALOG_JOB_STATUS',
- },
- ),
- migrations.CreateModel(
- name='NsdmSubscriptionModel',
- fields=[
- ('subscriptionid', models.CharField(db_column='SUBSCRIPTIONID', max_length=255, primary_key=True, serialize=False)),
- ('notificationTypes', models.TextField(db_column='NOTIFICATIONTYPES', null=True)),
- ('auth_info', models.TextField(db_column='AUTHINFO', null=True)),
- ('callback_uri', models.CharField(db_column='CALLBACKURI', max_length=255)),
- ('nsdInfoId', models.TextField(db_column='NSDINFOID', null=True)),
- ('nsdId', models.TextField(db_column='NSDID', null=True)),
- ('nsdName', models.TextField(db_column='NSDNAME', null=True)),
- ('nsdVersion', models.TextField(db_column='NSDVERSION', null=True)),
- ('nsdDesigner', models.TextField(db_column='NSDDESIGNER', null=True)),
- ('nsdInvariantId', models.TextField(db_column='NSDINVARIANTID', null=True)),
- ('vnfPkgIds', models.TextField(db_column='VNFPKGIDS', null=True)),
- ('pnfdInfoIds', models.TextField(db_column='PNFDINFOIDS', null=True)),
- ('nestedNsdInfoIds', models.TextField(db_column='NESTEDNSDINFOIDS', null=True)),
- ('nsdOnboardingState', models.TextField(db_column='NSDONBOARDINGSTATE', null=True)),
- ('nsdOperationalState', models.TextField(db_column='NSDOPERATIONALSTATE', null=True)),
- ('nsdUsageState', models.TextField(db_column='NSDUSAGESTATE', null=True)),
- ('pnfdId', models.TextField(db_column='PNFDID', null=True)),
- ('pnfdName', models.TextField(db_column='PNFDNAME', null=True)),
- ('pnfdVersion', models.TextField(db_column='PNFDVERSION', null=True)),
- ('pnfdProvider', models.TextField(db_column='PNFDPROVIDER', null=True)),
- ('pnfdInvariantId', models.TextField(db_column='PNFDINVARIANTID', null=True)),
- ('pnfdOnboardingState', models.TextField(db_column='PNFDONBOARDINGSTATE', null=True)),
- ('pnfdUsageState', models.TextField(db_column='PNFDUSAGESTATE', null=True)),
- ('links', models.TextField(db_column='LINKS')),
- ],
- options={
- 'db_table': 'CATALOG_NSDM_SUBSCRIPTION',
- },
- ),
- migrations.CreateModel(
- name='NSPackageModel',
- fields=[
- ('nsPackageId', models.CharField(db_column='NSPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('nsPackageUri', models.CharField(blank=True, db_column='NSPACKAGEURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
- ('sdcCsarId', models.CharField(blank=True, db_column='SDCCSARID', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
- ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
- ('nsdId', models.CharField(blank=True, db_column='NSDID', max_length=50, null=True)),
- ('invariantId', models.CharField(blank=True, db_column='INVARIANTID', max_length=50, null=True)),
- ('nsdName', models.CharField(blank=True, db_column='NSDNAME', max_length=50, null=True)),
- ('nsdDesginer', models.CharField(blank=True, db_column='NSDDESIGNER', max_length=50, null=True)),
- ('nsdDescription', models.CharField(blank=True, db_column='NSDDESCRIPTION', max_length=100, null=True)),
- ('nsdVersion', models.CharField(blank=True, db_column='NSDVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
- ('nsdModel', models.TextField(blank=True, db_column='NSDMODEL', max_length=65535, null=True)),
- ],
- options={
- 'db_table': 'CATALOG_NSPACKAGE',
- },
- ),
- migrations.CreateModel(
- name='PnfPackageModel',
- fields=[
- ('pnfPackageId', models.CharField(db_column='PNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('pnfPackageUri', models.CharField(blank=True, db_column='PNFPACKAGEURI', max_length=300, null=True)),
- ('sdcCSARUri', models.CharField(blank=True, db_column='SDCCSARURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
- ('pnfdId', models.CharField(blank=True, db_column='PNFDID', max_length=50, null=True)),
- ('pnfVendor', models.CharField(blank=True, db_column='VENDOR', max_length=50, null=True)),
- ('pnfdProductName', models.CharField(blank=True, db_column='PNFDPRODUCTNAME', max_length=50, null=True)),
- ('pnfdVersion', models.CharField(blank=True, db_column='PNFDVERSION', max_length=20, null=True)),
- ('pnfSoftwareVersion', models.CharField(blank=True, db_column='PNFSOFTWAREVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
- ('pnfdModel', models.TextField(blank=True, db_column='PNFDMODEL', max_length=65535, null=True)),
- ('pnfdName', models.TextField(blank=True, db_column='PNFDNAME', max_length=65535, null=True)),
- ],
- options={
- 'db_table': 'CATALOG_PNFPACKAGE',
- },
- ),
- migrations.CreateModel(
- name='ServicePackageModel',
- fields=[
- ('servicePackageId', models.CharField(db_column='SERVICEPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('servicePackageUri', models.CharField(blank=True, db_column='SERVICEPACKAGEURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
- ('sdcCsarId', models.CharField(blank=True, db_column='SDCCSARID', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
- ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
- ('servicedId', models.CharField(blank=True, db_column='SERVICEDID', max_length=50, null=True)),
- ('invariantId', models.CharField(blank=True, db_column='INVARIANTID', max_length=50, null=True)),
- ('servicedName', models.CharField(blank=True, db_column='SERVICEDNAME', max_length=50, null=True)),
- ('servicedDesigner', models.CharField(blank=True, db_column='SERVICEDDESIGNER', max_length=50, null=True)),
- ('servicedDescription', models.CharField(blank=True, db_column='SERVICEDDESCRIPTION', max_length=100, null=True)),
- ('servicedVersion', models.CharField(blank=True, db_column='SERVICEDVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
- ('servicedModel', models.TextField(blank=True, db_column='SERVICEDMODEL', max_length=65535, null=True)),
- ],
- options={
- 'db_table': 'CATALOG_SERVICEPACKAGE',
- },
- ),
- migrations.CreateModel(
- name='SoftwareImageModel',
- fields=[
- ('imageid', models.CharField(db_column='IMAGEID', max_length=50, primary_key=True, serialize=False)),
- ('containerFormat', models.CharField(db_column='CONTAINERFORMAT', max_length=20)),
- ('diskFormat', models.CharField(db_column='DISKFORMAT', max_length=20)),
- ('mindisk', models.CharField(db_column='MINDISK', max_length=20)),
- ('minram', models.CharField(db_column='MINRAM', max_length=20)),
- ('usermetadata', models.CharField(db_column='USAERMETADATA', max_length=1024)),
- ('vnfPackageId', models.CharField(db_column='VNFPACKAGEID', max_length=50)),
- ('filePath', models.CharField(db_column='FILEPATH', max_length=300)),
- ('status', models.CharField(db_column='STATUS', max_length=10)),
- ('vimid', models.CharField(db_column='VIMID', max_length=50)),
- ],
- options={
- 'db_table': 'CATALOG_SOFTWAREIMAGEMODEL',
- },
- ),
- migrations.CreateModel(
- name='VnfPackageModel',
- fields=[
- ('vnfPackageId', models.CharField(db_column='VNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('vnfPackageUri', models.CharField(blank=True, db_column='VNFPACKAGEURI', max_length=300, null=True)),
- ('SdcCSARUri', models.CharField(blank=True, db_column='SDCCSARURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
- ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
- ('vnfdId', models.CharField(blank=True, db_column='VNFDID', max_length=50, null=True)),
- ('vnfVendor', models.CharField(blank=True, db_column='VENDOR', max_length=50, null=True)),
- ('vnfdProductName', models.CharField(blank=True, db_column='VNFDPRODUCTNAME', max_length=50, null=True)),
- ('vnfdVersion', models.CharField(blank=True, db_column='VNFDVERSION', max_length=20, null=True)),
- ('vnfSoftwareVersion', models.CharField(blank=True, db_column='VNFSOFTWAREVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
- ('vnfdModel', models.TextField(blank=True, db_column='VNFDMODEL', max_length=65535, null=True)),
- ],
- options={
- 'db_table': 'CATALOG_VNFPACKAGE',
- },
- ),
- migrations.CreateModel(
- name='VnfPkgSubscriptionModel',
- fields=[
- ('subscription_id', models.CharField(db_column='SUBSCRIPTION_ID', max_length=255, primary_key=True, serialize=False)),
- ('callback_uri', models.URLField(db_column='CALLBACK_URI', max_length=255)),
- ('auth_info', models.TextField(db_column='AUTH_INFO')),
- ('usage_states', models.TextField(db_column='USAGE_STATES')),
- ('notification_types', models.TextField(db_column='NOTIFICATION_TYPES')),
- ('vnfd_id', models.TextField(db_column='VNFD_ID')),
- ('vnf_pkg_id', models.TextField(db_column='VNF_PKG_ID')),
- ('operation_states', models.TextField(db_column='OPERATION_STATES')),
- ('vnf_products_from_provider', models.TextField(db_column='VNF_PRODUCTS_FROM_PROVIDER')),
- ('links', models.TextField(db_column='LINKS')),
- ],
- options={
- 'db_table': 'VNF_PKG_SUBSCRIPTION',
- },
- ),
- ]
diff --git a/genericparser/pub/database/migrations/0002_auto_20190422_1442.py b/genericparser/pub/database/migrations/0002_auto_20190422_1442.py
deleted file mode 100644
index da23582..0000000
--- a/genericparser/pub/database/migrations/0002_auto_20190422_1442.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by Django 1.11.9 on 2019-04-22 14:42
-from __future__ import unicode_literals
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
-
- dependencies = [
- ('database', '0001_initial'),
- ]
-
- operations = [
- migrations.AlterModelTable(
- name='jobmodel',
- table='GENERICPARSER_JOB',
- ),
- migrations.AlterModelTable(
- name='jobstatusmodel',
- table='GENERICPARSER_JOB_STATUS',
- ),
- migrations.AlterModelTable(
- name='nsdmsubscriptionmodel',
- table='GENERICPARSER_NSDM_SUBSCRIPTION',
- ),
- migrations.AlterModelTable(
- name='nspackagemodel',
- table='GENERICPARSER_NSPACKAGE',
- ),
- migrations.AlterModelTable(
- name='pnfpackagemodel',
- table='GENERICPARSER_PNFPACKAGE',
- ),
- migrations.AlterModelTable(
- name='servicepackagemodel',
- table='GENERICPARSER_SERVICEPACKAGE',
- ),
- migrations.AlterModelTable(
- name='softwareimagemodel',
- table='GENERICPARSER_SOFTWAREIMAGEMODEL',
- ),
- migrations.AlterModelTable(
- name='vnfpackagemodel',
- table='GENERICPARSER_VNFPACKAGE',
- ),
- ]
diff --git a/genericparser/pub/database/migrations/__init__.py b/genericparser/pub/database/migrations/__init__.py
deleted file mode 100644
index 0c847b7..0000000
--- a/genericparser/pub/database/migrations/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2019 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/genericparser/pub/database/models.py b/genericparser/pub/database/models.py
deleted file mode 100644
index ffbd6d0..0000000
--- a/genericparser/pub/database/models.py
+++ /dev/null
@@ -1,234 +0,0 @@
-# Copyright 2016-2018 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from django.db import models
-
-
-class NSPackageModel(models.Model):
- nsPackageId = models.CharField(db_column='NSPACKAGEID', primary_key=True, max_length=50)
- nsPackageUri = models.CharField(db_column='NSPACKAGEURI', max_length=300, null=True, blank=True)
- checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum
- sdcCsarId = models.CharField(db_column='SDCCSARID', max_length=50, null=True, blank=True) # SdcCSARUri
- onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
- operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True) # operationalState
- usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState
- deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending
- nsdId = models.CharField(db_column='NSDID', max_length=50, blank=True, null=True)
- invariantId = models.CharField(db_column='INVARIANTID', max_length=50, blank=True, null=True) # nsdInvariantId
- nsdName = models.CharField(db_column='NSDNAME', max_length=50, blank=True, null=True)
- nsdDesginer = models.CharField(db_column='NSDDESIGNER', max_length=50, null=True, blank=True)
- nsdDescription = models.CharField(db_column='NSDDESCRIPTION', max_length=100, null=True, blank=True)
- nsdVersion = models.CharField(db_column='NSDVERSION', max_length=20, null=True, blank=True)
- userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData
- localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
- nsdModel = models.TextField(db_column='NSDMODEL', max_length=65535, null=True, blank=True)
-
- class Meta:
- db_table = 'GENERICPARSER_NSPACKAGE'
-
-
-class ServicePackageModel(models.Model):
- servicePackageId = models.CharField(db_column='SERVICEPACKAGEID', primary_key=True, max_length=50)
- servicePackageUri = models.CharField(db_column='SERVICEPACKAGEURI', max_length=300, null=True, blank=True)
- checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum
- sdcCsarId = models.CharField(db_column='SDCCSARID', max_length=50, null=True, blank=True) # SdcCSARUri
- onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
- operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True) # operationalState
- usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState
- deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending
- servicedId = models.CharField(db_column='SERVICEDID', max_length=50, blank=True, null=True)
- invariantId = models.CharField(db_column='INVARIANTID', max_length=50, blank=True, null=True) # servicedInvariantId
- servicedName = models.CharField(db_column='SERVICEDNAME', max_length=50, blank=True, null=True)
- servicedDesigner = models.CharField(db_column='SERVICEDDESIGNER', max_length=50, null=True, blank=True)
- servicedDescription = models.CharField(db_column='SERVICEDDESCRIPTION', max_length=100, null=True, blank=True)
- servicedVersion = models.CharField(db_column='SERVICEDVERSION', max_length=20, null=True, blank=True)
- userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData
- localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
- servicedModel = models.TextField(db_column='SERVICEDMODEL', max_length=65535, null=True, blank=True)
-
- class Meta:
- db_table = 'GENERICPARSER_SERVICEPACKAGE'
-
-
-class VnfPackageModel(models.Model):
- # uuid = models.CharField(db_column='UUID', primary_key=True, max_length=255)
- vnfPackageId = models.CharField(db_column='VNFPACKAGEID', primary_key=True, max_length=50) # onboardedVnfPkgInfoId
- vnfPackageUri = models.CharField(db_column='VNFPACKAGEURI', max_length=300, null=True, blank=True) # downloadUri
- SdcCSARUri = models.CharField(db_column='SDCCSARURI', max_length=300, null=True, blank=True) # SdcCSARUri
- checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum
- onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
- operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True) # operationalState
- usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState
- deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending
- vnfdId = models.CharField(db_column='VNFDID', max_length=50, blank=True, null=True) # vnfdId
- vnfVendor = models.CharField(db_column='VENDOR', max_length=50, blank=True, null=True) # vnfProvider
- vnfdProductName = models.CharField(db_column='VNFDPRODUCTNAME', max_length=50, blank=True, null=True) # vnfProductName
- vnfdVersion = models.CharField(db_column='VNFDVERSION', max_length=20, blank=True, null=True) # vnfdVersion
- vnfSoftwareVersion = models.CharField(db_column='VNFSOFTWAREVERSION', max_length=20, blank=True, null=True) # vnfSoftwareVersion
- userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData
- localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
- vnfdModel = models.TextField(db_column='VNFDMODEL', max_length=65535, blank=True, null=True) # vnfd
-
- class Meta:
- db_table = 'GENERICPARSER_VNFPACKAGE'
-
-
-class PnfPackageModel(models.Model):
- # uuid = models.CharField(db_column='UUID', primary_key=True, max_length=255)
- pnfPackageId = models.CharField(db_column='PNFPACKAGEID', primary_key=True, max_length=50) # onboardedPnfPkgInfoId
- pnfPackageUri = models.CharField(db_column='PNFPACKAGEURI', max_length=300, null=True, blank=True) # downloadUri
- sdcCSARUri = models.CharField(db_column='SDCCSARURI', max_length=300, null=True, blank=True) # sdcCSARUri
- checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum
- onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
- usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState
- deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending
- pnfdId = models.CharField(db_column='PNFDID', max_length=50, blank=True, null=True) # pnfdId
- pnfVendor = models.CharField(db_column='VENDOR', max_length=50, blank=True, null=True) # pnfProvider
- pnfdProductName = models.CharField(db_column='PNFDPRODUCTNAME', max_length=50, blank=True, null=True) # pnfProductName
- pnfdVersion = models.CharField(db_column='PNFDVERSION', max_length=20, blank=True, null=True) # pnfdVersion
- pnfSoftwareVersion = models.CharField(db_column='PNFSOFTWAREVERSION', max_length=20, blank=True, null=True) # pnfSoftwareVersion
- userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData
- localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
- pnfdModel = models.TextField(db_column='PNFDMODEL', max_length=65535, blank=True, null=True) # pnfd
- pnfdName = models.TextField(db_column='PNFDNAME', max_length=65535, blank=True, null=True) # pnfd_name
-
- class Meta:
- db_table = 'GENERICPARSER_PNFPACKAGE'
-
-
-class SoftwareImageModel(models.Model):
- imageid = models.CharField(db_column='IMAGEID', primary_key=True, max_length=50)
- containerFormat = models.CharField(db_column='CONTAINERFORMAT', max_length=20)
- diskFormat = models.CharField(db_column='DISKFORMAT', max_length=20)
- mindisk = models.CharField(db_column='MINDISK', max_length=20)
- minram = models.CharField(db_column='MINRAM', max_length=20)
- usermetadata = models.CharField(db_column='USAERMETADATA', max_length=1024)
- vnfPackageId = models.CharField(db_column='VNFPACKAGEID', max_length=50)
- filePath = models.CharField(db_column='FILEPATH', max_length=300)
- status = models.CharField(db_column='STATUS', max_length=10)
- vimid = models.CharField(db_column='VIMID', max_length=50)
- # filetype = models.CharField(db_column='FILETYPE', max_length=2)
- # vimuser = models.CharField(db_column='VIMUSER', max_length=50)
- # tenant = models.CharField(db_column='TENANT', max_length=50)
- # purpose = models.CharField(db_column='PURPOSE', max_length=1000)
-
- class Meta:
- db_table = 'GENERICPARSER_SOFTWAREIMAGEMODEL'
-
-
-class JobModel(models.Model):
- jobid = models.CharField(db_column='JOBID', primary_key=True, max_length=255)
- jobtype = models.CharField(db_column='JOBTYPE', max_length=255)
- jobaction = models.CharField(db_column='JOBACTION', max_length=255)
- resid = models.CharField(db_column='RESID', max_length=255)
- status = models.IntegerField(db_column='STATUS', null=True, blank=True)
- starttime = models.CharField(db_column='STARTTIME', max_length=255, null=True, blank=True)
- endtime = models.CharField(db_column='ENDTIME', max_length=255, null=True, blank=True)
- progress = models.IntegerField(db_column='PROGRESS', null=True, blank=True)
- user = models.CharField(db_column='USER', max_length=255, null=True, blank=True)
- parentjobid = models.CharField(db_column='PARENTJOBID', max_length=255, null=True, blank=True)
- resname = models.CharField(db_column='RESNAME', max_length=255, null=True, blank=True)
-
- class Meta:
- db_table = 'GENERICPARSER_JOB'
-
- def toJSON(self):
- import json
- return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]]))
-
-
-class JobStatusModel(models.Model):
- indexid = models.IntegerField(db_column='INDEXID')
- jobid = models.CharField(db_column='JOBID', max_length=255)
- status = models.CharField(db_column='STATUS', max_length=255)
- progress = models.IntegerField(db_column='PROGRESS', null=True, blank=True)
- descp = models.TextField(db_column='DESCP', max_length=65535)
- errcode = models.CharField(db_column='ERRCODE', max_length=255, null=True, blank=True)
- addtime = models.CharField(db_column='ADDTIME', max_length=255, null=True, blank=True)
-
- class Meta:
- db_table = 'GENERICPARSER_JOB_STATUS'
-
- def toJSON(self):
- import json
- return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]]))
-
-
-class NsdmSubscriptionModel(models.Model):
- subscriptionid = models.CharField(db_column='SUBSCRIPTIONID', max_length=255, primary_key=True)
- notificationTypes = models.TextField(db_column='NOTIFICATIONTYPES', null=True)
- auth_info = models.TextField(db_column='AUTHINFO', null=True)
- callback_uri = models.CharField(db_column='CALLBACKURI', max_length=255)
- nsdInfoId = models.TextField(db_column='NSDINFOID', null=True)
- nsdId = models.TextField(db_column='NSDID', null=True)
- nsdName = models.TextField(db_column='NSDNAME', null=True)
- nsdVersion = models.TextField(db_column='NSDVERSION', null=True)
- nsdDesigner = models.TextField(db_column='NSDDESIGNER', null=True)
- nsdInvariantId = models.TextField(db_column='NSDINVARIANTID', null=True)
- vnfPkgIds = models.TextField(db_column='VNFPKGIDS', null=True)
- pnfdInfoIds = models.TextField(db_column='PNFDINFOIDS', null=True)
- nestedNsdInfoIds = models.TextField(db_column='NESTEDNSDINFOIDS', null=True)
- nsdOnboardingState = models.TextField(db_column='NSDONBOARDINGSTATE', null=True)
- nsdOperationalState = models.TextField(db_column='NSDOPERATIONALSTATE', null=True)
- nsdUsageState = models.TextField(db_column='NSDUSAGESTATE', null=True)
- pnfdId = models.TextField(db_column='PNFDID', null=True)
- pnfdName = models.TextField(db_column='PNFDNAME', null=True)
- pnfdVersion = models.TextField(db_column='PNFDVERSION', null=True)
- pnfdProvider = models.TextField(db_column='PNFDPROVIDER', null=True)
- pnfdInvariantId = models.TextField(db_column='PNFDINVARIANTID', null=True)
- pnfdOnboardingState = models.TextField(db_column='PNFDONBOARDINGSTATE', null=True)
- pnfdUsageState = models.TextField(db_column='PNFDUSAGESTATE', null=True)
- links = models.TextField(db_column='LINKS')
-
- class Meta:
- db_table = 'GENERICPARSER_NSDM_SUBSCRIPTION'
-
- def toJSON(self):
- import json
- return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]]))
-
-
-class VnfPkgSubscriptionModel(models.Model):
- subscription_id = models.CharField(max_length=255, primary_key=True, db_column='SUBSCRIPTION_ID')
- callback_uri = models.URLField(db_column="CALLBACK_URI", max_length=255)
- auth_info = models.TextField(db_column="AUTH_INFO")
- usage_states = models.TextField(db_column="USAGE_STATES")
- notification_types = models.TextField(db_column="NOTIFICATION_TYPES")
- vnfd_id = models.TextField(db_column="VNFD_ID")
- vnf_pkg_id = models.TextField(db_column="VNF_PKG_ID")
- operation_states = models.TextField(db_column="OPERATION_STATES")
- vnf_products_from_provider = \
- models.TextField(db_column="VNF_PRODUCTS_FROM_PROVIDER")
- links = models.TextField(db_column="LINKS")
-
- class Meta:
- db_table = 'VNF_PKG_SUBSCRIPTION'
-
- def toDict(self):
- import json
- subscription_obj = {
- "id": self.subscription_id,
- "callbackUri": self.callback_uri,
- "_links": json.loads(self.links)
- }
- filter_obj = {
- "notificationTypes": json.loads(self.notification_types),
- "vnfdId": json.loads(self.vnfd_id),
- "vnfPkgId": json.loads(self.vnf_pkg_id),
- "operationalState": json.loads(self.operation_states),
- "usageState": json.loads(self.usage_states),
- "vnfProductsFromProviders": json.loads(self.vnf_products_from_provider)
- }
- subscription_obj["filter"] = filter_obj
- return subscription_obj
diff --git a/genericparser/pub/exceptions.py b/genericparser/pub/exceptions.py
deleted file mode 100644
index b3c797d..0000000
--- a/genericparser/pub/exceptions.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class GenericparserException(Exception):
- pass
-
-
-class ResourceNotFoundException(GenericparserException):
- pass
-
-
-class PackageNotFoundException(GenericparserException):
- pass
-
-
-class PackageHasExistsException(GenericparserException):
- pass
-
-
-class VnfPkgSubscriptionException(GenericparserException):
- pass
-
-
-class VnfPkgDuplicateSubscriptionException(GenericparserException):
- pass
-
-
-class SubscriptionDoesNotExistsException(GenericparserException):
- pass
-
-
-class NsdmBadRequestException(GenericparserException):
- pass
-
-
-class NsdmDuplicateSubscriptionException(GenericparserException):
- pass
-
-
-class BadRequestException(GenericparserException):
- pass
-
-
-class ArtifactNotFoundException(GenericparserException):
- pass
diff --git a/genericparser/pub/msapi/__init__.py b/genericparser/pub/msapi/__init__.py
deleted file mode 100644
index c7b6818..0000000
--- a/genericparser/pub/msapi/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/genericparser/pub/msapi/extsys.py b/genericparser/pub/msapi/extsys.py
deleted file mode 100644
index 639513f..0000000
--- a/genericparser/pub/msapi/extsys.py
+++ /dev/null
@@ -1,175 +0,0 @@
-# Copyright 2016 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-import uuid
-
-from genericparser.pub.config.config import AAI_BASE_URL, AAI_USER, AAI_PASSWD
-from genericparser.pub.exceptions import GenericparserException
-from genericparser.pub.utils import restcall
-from genericparser.pub.utils.values import ignore_case_get
-
-logger = logging.getLogger(__name__)
-
-
-def call_aai(resource, method, content=''):
- additional_headers = {
- 'X-FromAppId': 'MODEL-GENERICPARSER',
- 'X-TransactionId': str(uuid.uuid1())
- }
- return restcall.call_req(AAI_BASE_URL,
- AAI_USER,
- AAI_PASSWD,
- restcall.rest_no_auth,
- resource,
- method,
- content,
- additional_headers)
-
-
-def get_vims():
- ret = call_aai("/cloud-infrastructure/cloud-regions?depth=all", "GET")
- if ret[0] != 0:
- logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
- raise GenericparserException("Failed to query vims from extsys.")
- # convert vim_info_aai to internal vim_info
- vims_aai = json.JSONDecoder().decode(ret[1])
- vims_aai = ignore_case_get(vims_aai, "cloud-region")
- vims_info = []
- for vim in vims_aai:
- vim = convert_vim_info(vim)
- vims_info.append(vim)
- return vims_info
-
-
-def get_vim_by_id(vim_id):
- cloud_owner, cloud_region = split_vim_to_owner_region(vim_id)
- ret = call_aai("/cloud-infrastructure/cloud-regions/cloud-region/%s/%s?depth=all"
- % (cloud_owner, cloud_region), "GET")
- if ret[0] != 0:
- logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
- raise GenericparserException("Failed to query vim(%s) from extsys." % vim_id)
- # convert vim_info_aai to internal vim_info
- vim_info_aai = json.JSONDecoder().decode(ret[1])
- vim_info = convert_vim_info(vim_info_aai)
- return vim_info
-
-
-def split_vim_to_owner_region(vim_id):
- split_vim = vim_id.split('_')
- cloud_owner = split_vim[0]
- cloud_region = "".join(split_vim[1:])
- return cloud_owner, cloud_region
-
-
-def convert_vim_info(vim_info_aai):
- vim_id = vim_info_aai["cloud-owner"] + "_" + vim_info_aai["cloud-region-id"]
- esr_system_info = ignore_case_get(ignore_case_get(vim_info_aai, "esr-system-info-list"), "esr-system-info")
- vim_info = {
- "vimId": vim_id,
- "name": vim_id,
- "url": ignore_case_get(esr_system_info[0], "service-url"),
- "userName": ignore_case_get(esr_system_info[0], "user-name"),
- "password": ignore_case_get(esr_system_info[0], "password"),
- "tenant": ignore_case_get(esr_system_info[0], "default-tenant"),
- "vendor": ignore_case_get(esr_system_info[0], "vendor"),
- "version": ignore_case_get(esr_system_info[0], "version"),
- "description": "vim",
- "domain": "",
- "type": ignore_case_get(esr_system_info[0], "type"),
- "createTime": "2016-07-18 12:22:53"
- }
- return vim_info
-
-
-def get_sdn_controller_by_id(sdn_ontroller_id):
- ret = call_aai("/external-system/esr-thirdparty-sdnc-list/esr-thirdparty-sdnc/%s?depth=all"
- % sdn_ontroller_id, "GET")
- if ret[0] != 0:
- logger.error("Failed to query sdn ontroller(%s) from extsys. detail is %s.", sdn_ontroller_id, ret[1])
- raise GenericparserException("Failed to query sdn ontroller(%s) from extsys." % sdn_ontroller_id)
- # convert vim_info_aai to internal vim_info
- sdnc_info_aai = json.JSONDecoder().decode(ret[1])
- sdnc_info = convert_sdnc_info(sdnc_info_aai)
- return sdnc_info
-
-
-def convert_sdnc_info(sdnc_info_aai):
- esr_system_info = ignore_case_get(ignore_case_get(sdnc_info_aai, "esr-system-info-list"), "esr-system-info")
- sdnc_info = {
- "sdnControllerId": sdnc_info_aai["thirdparty-sdnc-id"],
- "name": sdnc_info_aai["thirdparty-sdnc-id"],
- "url": ignore_case_get(esr_system_info[0], "service-url"),
- "userName": ignore_case_get(esr_system_info[0], "user-name"),
- "password": ignore_case_get(esr_system_info[0], "password"),
- "vendor": ignore_case_get(esr_system_info[0], "vendor"),
- "version": ignore_case_get(esr_system_info[0], "version"),
- "description": "",
- "protocol": ignore_case_get(esr_system_info[0], "protocal"),
- "productName": ignore_case_get(sdnc_info_aai, "product-name"),
- "type": ignore_case_get(esr_system_info[0], "type"),
- "createTime": "2016-07-18 12:22:53"
- }
- return sdnc_info
-
-
-def get_vnfm_by_id(vnfm_inst_id):
- uri = "/external-system/esr-vnfm-list/esr-vnfm/%s?depth=all" % vnfm_inst_id
- ret = call_aai(uri, "GET")
- if ret[0] > 0:
- logger.error('Send get VNFM information request to extsys failed.')
- raise GenericparserException('Send get VNFM information request to extsys failed.')
- # convert vnfm_info_aai to internal vnfm_info
- vnfm_info_aai = json.JSONDecoder().decode(ret[1])
- vnfm_info = convert_vnfm_info(vnfm_info_aai)
- return vnfm_info
-
-
-def convert_vnfm_info(vnfm_info_aai):
- esr_system_info = ignore_case_get(ignore_case_get(vnfm_info_aai, "esr-system-info-list"), "esr-system-info")
- vnfm_info = {
- "vnfmId": vnfm_info_aai["vnfm-id"],
- "name": vnfm_info_aai["vnfm-id"],
- "type": ignore_case_get(esr_system_info[0], "type"),
- "vimId": vnfm_info_aai["vim-id"],
- "vendor": ignore_case_get(esr_system_info[0], "vendor"),
- "version": ignore_case_get(esr_system_info[0], "version"),
- "description": "vnfm",
- "certificateUrl": vnfm_info_aai["certificate-url"],
- "url": ignore_case_get(esr_system_info[0], "service-url"),
- "userName": ignore_case_get(esr_system_info[0], "user-name"),
- "password": ignore_case_get(esr_system_info[0], "password"),
- "createTime": "2016-07-06 15:33:18"
- }
- return vnfm_info
-
-
-def select_vnfm(vnfm_type, vim_id):
- uri = "/external-system/esr-vnfm-list?depth=all"
- ret = call_aai(uri, "GET")
- if ret[0] > 0:
- logger.error("Failed to call %s: %s", uri, ret[1])
- raise GenericparserException('Failed to get vnfms from extsys.')
- vnfms = json.JSONDecoder().decode(ret[1])
- vnfms = ignore_case_get(vnfms, "esr-vnfm")
- for vnfm in vnfms:
- esr_system_info = ignore_case_get(vnfm, "esr-system-info")
- type = ignore_case_get(esr_system_info, "type")
- vimId = vnfm["vnfm-id"]
- if type == vnfm_type and vimId == vim_id:
- # convert vnfm_info_aai to internal vnfm_info
- vnfm = convert_vnfm_info(vnfm)
- return vnfm
- raise GenericparserException('No vnfm found with %s in vim(%s)' % (vnfm_type, vim_id))
diff --git a/genericparser/pub/msapi/sdc.py b/genericparser/pub/msapi/sdc.py
deleted file mode 100644
index 81715de..0000000
--- a/genericparser/pub/msapi/sdc.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-import os
-
-from genericparser.pub.config.config import SDC_BASE_URL, SDC_USER, SDC_PASSWD
-from genericparser.pub.exceptions import GenericparserException
-from genericparser.pub.utils import fileutil
-from genericparser.pub.utils import restcall
-
-logger = logging.getLogger(__name__)
-
-ASSETTYPE_RESOURCES = "resources"
-ASSETTYPE_SERVICES = "services"
-DISTRIBUTED = "DISTRIBUTED"
-
-
-def call_sdc(resource, method, content=''):
- additional_headers = {
- 'X-ECOMP-InstanceID': 'Modeling',
- }
- return restcall.call_req(base_url=SDC_BASE_URL,
- user=SDC_USER,
- passwd=SDC_PASSWD,
- auth_type=restcall.rest_no_auth,
- resource=resource,
- method=method,
- content=content,
- additional_headers=additional_headers)
-
-
-"""
-sample of return value
-[
- {
- "uuid": "c94490a0-f7ef-48be-b3f8-8d8662a37236",
- "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb",
- "name": "underlayvpn",
- "version": "2.0",
- "toscaModelURL": "/sdc/v1/catalog/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel",
- "category": "Volte",
- "subCategory": "VolteVF",
- "resourceType": "VF",
- "lifecycleState": "CERTIFIED",
- "lastUpdaterUserId": "jh0003"
- }
-]
-"""
-
-
-def get_artifacts(asset_type):
- resource = "/sdc/v1/catalog/{assetType}"
- resource = resource.format(assetType=asset_type)
- ret = call_sdc(resource, "GET")
- if ret[0] != 0:
- logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
- raise GenericparserException("Failed to query artifacts(%s) from sdc." % asset_type)
- return json.JSONDecoder().decode(ret[1])
-
-
-def get_artifact(asset_type, csar_id):
- artifacts = get_artifacts(asset_type)
- for artifact in artifacts:
- if artifact["uuid"] == csar_id:
- if asset_type == ASSETTYPE_SERVICES and \
- artifact.get("distributionStatus", None) != DISTRIBUTED:
- raise GenericparserException(
- "The artifact (%s,%s) is not distributed from sdc." % (asset_type, csar_id))
- else:
- return artifact
- raise GenericparserException("Failed to query artifact(%s,%s) from sdc." % (asset_type, csar_id))
-
-
-def get_asset(asset_type, uuid):
- resource = "/sdc/v1/catalog/{assetType}/{uuid}/metadata".format(assetType=asset_type, uuid=uuid)
- ret = call_sdc(resource, "GET")
- if ret[0] != 0:
- logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
- raise GenericparserException("Failed to get asset(%s, %s) from sdc." % (asset_type, uuid))
- asset = json.JSONDecoder().decode(ret[1])
- if asset.get("distributionStatus", None) != DISTRIBUTED:
- raise GenericparserException("The asset (%s,%s) is not distributed from sdc." % (asset_type, uuid))
- else:
- return asset
-
-
-def delete_artifact(asset_type, asset_id, artifact_id):
- resource = "/sdc/v1/catalog/{assetType}/{uuid}/artifacts/{artifactUUID}"
- resource = resource.format(assetType=asset_type, uuid=asset_id, artifactUUID=artifact_id)
- ret = call_sdc(resource, "DELETE")
- if ret[0] != 0:
- logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
- raise GenericparserException("Failed to delete artifacts(%s) from sdc." % artifact_id)
- return json.JSONDecoder().decode(ret[1])
-
-
-def download_artifacts(download_url, local_path, file_name):
- additional_headers = {
- 'X-ECOMP-InstanceID': 'Modeling',
- 'accept': 'application/octet-stream'
- }
- ret = restcall.call_req(base_url=SDC_BASE_URL,
- user=SDC_USER,
- passwd=SDC_PASSWD,
- auth_type=restcall.rest_no_auth,
- resource=download_url,
- method="GET",
- additional_headers=additional_headers)
- if ret[0] != 0:
- logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
- raise GenericparserException("Failed to download %s from sdc." % download_url)
- fileutil.make_dirs(local_path)
- local_file_name = os.path.join(local_path, file_name)
- local_file = open(local_file_name, 'wb')
- local_file.write(ret[1])
- local_file.close()
- return local_file_name
diff --git a/genericparser/pub/redisco/__init__.py b/genericparser/pub/redisco/__init__.py
deleted file mode 100644
index 217a232..0000000
--- a/genericparser/pub/redisco/__init__.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright (c) 2010 Tim Medina
-#
-# Permission is hereby granted, free of charge, to any person
-# obtaining a copy of this software and associated documentation
-# files (the "Software"), to deal in the Software without
-# restriction, including without limitation the rights to use,
-# copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the
-# Software is furnished to do so, subject to the following
-# conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-#
-# The original code link is https://github.com/iamteem/redisco/tree/master/redisco/__init__.py
-
-
-import redis
-
-
-class Client(object):
- def __init__(self, **kwargs):
- self.connection_settings = kwargs or {'host': 'localhost', 'port': 6379, 'db': 0}
-
- def redis(self):
- return redis.Redis(**self.connection_settings)
-
- def update(self, d):
- self.connection_settings.update(d)
-
-
-def connection_setup(**kwargs):
- global connection, client
- if client:
- client.update(kwargs)
- else:
- client = Client(**kwargs)
- connection = client.redis()
-
-
-def get_client():
- global connection
- return connection
-
-
-client = Client()
-connection = client.redis()
-
-__all__ = ['connection_setup', 'get_client']
diff --git a/genericparser/pub/redisco/containers.py b/genericparser/pub/redisco/containers.py
deleted file mode 100644
index d30c227..0000000
--- a/genericparser/pub/redisco/containers.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright (c) 2010 Tim Medina
-#
-# Permission is hereby granted, free of charge, to any person
-# obtaining a copy of this software and associated documentation
-# files (the "Software"), to deal in the Software without
-# restriction, including without limitation the rights to use,
-# copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the
-# Software is furnished to do so, subject to the following
-# conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-#
-# The original code link is https://github.com/iamteem/redisco/tree/master/redisco/containers.py
-
-"""
-This module contains the container classes to create objects
-that persist directly in a Redis server.
-"""
-
-import collections
-from functools import partial
-
-
-class Container(object):
- """Create a container object saved in Redis.
-
- Arguments:
- key -- the Redis key this container is stored at
- db -- the Redis client object. Default: None
-
- When ``db`` is not set, the gets the default connection from
- ``redisco.connection`` module.
- """
-
- def __init__(self, key, db=None, pipeline=None):
- self._db = db
- self.key = key
- self.pipeline = pipeline
-
- def clear(self):
- """Remove container from Redis database."""
- del self.db[self.key]
-
- def __getattribute__(self, att):
- if att in object.__getattribute__(self, 'DELEGATEABLE_METHODS'):
- return partial(getattr(object.__getattribute__(self, 'db'), att), self.key)
- else:
- return object.__getattribute__(self, att)
-
- @property
- def db(self):
- if self.pipeline:
- return self.pipeline
- if self._db:
- return self._db
- if hasattr(self, 'db_cache') and self.db_cache:
- return self.db_cache
- else:
- from . import connection
- self.db_cache = connection
- return self.db_cache
-
- DELEGATEABLE_METHODS = ()
-
-
-class Hash(Container, collections.MutableMapping):
-
- def __getitem__(self, att):
- return self.hget(att)
-
- def __setitem__(self, att, val):
- self.hset(att, val)
-
- def __delitem__(self, att):
- self.hdel(att)
-
- def __len__(self):
- return self.hlen()
-
- def __iter__(self):
- return self.hgetall().__iter__()
-
- def __contains__(self, att):
- return self.hexists(att)
-
- def __repr__(self):
- return "<%s '%s' %s>" % (self.__class__.__name__, self.key, self.hgetall())
-
- def keys(self):
- return self.hkeys()
-
- def values(self):
- return self.hvals()
-
- def _get_dict(self):
- return self.hgetall()
-
- def _set_dict(self, new_dict):
- self.clear()
- self.update(new_dict)
-
- dict = property(_get_dict, _set_dict)
-
- DELEGATEABLE_METHODS = ('hlen', 'hset', 'hdel', 'hkeys', 'hgetall', 'hvals',
- 'hget', 'hexists', 'hincrby', 'hmget', 'hmset')
diff --git a/genericparser/pub/ssl/cert/foobar.crt b/genericparser/pub/ssl/cert/foobar.crt
deleted file mode 100644
index 7ab6dd3..0000000
--- a/genericparser/pub/ssl/cert/foobar.crt
+++ /dev/null
@@ -1,20 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDRDCCAiwCCQD8zmUqCHOp2zANBgkqhkiG9w0BAQsFADBjMQswCQYDVQQGEwJD
-TjEQMA4GA1UECAwHQmVpSmluZzEQMA4GA1UEBwwHQmVpSmluZzENMAsGA1UECgwE
-Q21jYzESMBAGA1UECwwJQ21jYy1vbmFwMQ0wCwYDVQQDDARDbWNjMCAXDTE5MDMy
-NjAyNTI0N1oYDzIxMTkwMzAyMDI1MjQ3WjBjMQswCQYDVQQGEwJDTjEQMA4GA1UE
-CAwHQmVpSmluZzEQMA4GA1UEBwwHQmVpSmluZzENMAsGA1UECgwEQ21jYzESMBAG
-A1UECwwJQ21jYy1vbmFwMQ0wCwYDVQQDDARDbWNjMIIBIjANBgkqhkiG9w0BAQEF
-AAOCAQ8AMIIBCgKCAQEA4DurchTgEw/A1y/Q5gpSSJTLC+KFOV4Vmbz2hlvOGLwV
-NIX1+r7DpaiJTGjEKLCtGsD2tGm69KiUX9FBY1CStnwK2R4wA5NKW+ZKQLd3sRTc
-Hl+2bLFk7E5KvmKZZM4xhsN3ey7Ia8H0sSfKiGlxB1hZI2HibRNy8GWyi95j8MkP
-v+H7HbJlX1kIKb7p2y8aG8AnAzBWikJFcQ1y3bJA2r31wOht63pIekwh+nntt5u+
-Yh/STXHiAe2gT7b9x6RAn09tC6TsBKzdZ4ZKrBLfRwPv6+cbDLcqkhbPukqaFaEs
-rDCLhuWX10sGLEsqXULDwZRoYxTUueLek9v+/8f5EwIDAQABMA0GCSqGSIb3DQEB
-CwUAA4IBAQCenowNpFiy9vH18+9PL4rZjZ1NH+frGqsWvDiyHPnLpneCLOuiXvgv
-kcuLJDYatc6vTlXkJElxwF1fCaJEn6dNq3WtQxdJjhXidAKx8Hsf1Nxkwbvmahv2
-TIWV/FMvop+9SdonDBGZojrYKRsY3EilQf+7/rGEM52HE8S3yE8CCe9xTZSYUs1B
-B8CzOPBVU7SWSRSLUKfdRhjyl4Rqsslxzal+8A36yViHBPhJgmDRoVWVR+E289IH
-FCQ0d8qVvdTGkM79dvZrEH9WSzPwlTR0NSkBMWTNLcWyP8caDjg+fbSVOF+s+sd/
-bLuAyHyeXUzClJx6CA5zwLZz5K5SVxw+
------END CERTIFICATE-----
diff --git a/genericparser/pub/ssl/cert/foobar.csr b/genericparser/pub/ssl/cert/foobar.csr
deleted file mode 100644
index 30b381b..0000000
--- a/genericparser/pub/ssl/cert/foobar.csr
+++ /dev/null
@@ -1,18 +0,0 @@
------BEGIN CERTIFICATE REQUEST-----
-MIIC1DCCAbwCAQAwYzELMAkGA1UEBhMCQ04xEDAOBgNVBAgMB0JlaUppbmcxEDAO
-BgNVBAcMB0JlaUppbmcxDTALBgNVBAoMBENtY2MxEjAQBgNVBAsMCUNtY2Mtb25h
-cDENMAsGA1UEAwwEQ21jYzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
-AOA7q3IU4BMPwNcv0OYKUkiUywvihTleFZm89oZbzhi8FTSF9fq+w6WoiUxoxCiw
-rRrA9rRpuvSolF/RQWNQkrZ8CtkeMAOTSlvmSkC3d7EU3B5ftmyxZOxOSr5imWTO
-MYbDd3suyGvB9LEnyohpcQdYWSNh4m0TcvBlsoveY/DJD7/h+x2yZV9ZCCm+6dsv
-GhvAJwMwVopCRXENct2yQNq99cDobet6SHpMIfp57bebvmIf0k1x4gHtoE+2/cek
-QJ9PbQuk7ASs3WeGSqwS30cD7+vnGwy3KpIWz7pKmhWhLKwwi4bll9dLBixLKl1C
-w8GUaGMU1Lni3pPb/v/H+RMCAwEAAaAsMBMGCSqGSIb3DQEJAjEGDARDbWNjMBUG
-CSqGSIb3DQEJBzEIDAYxMjM0NTYwDQYJKoZIhvcNAQELBQADggEBAGr8XkV5G9bK
-lPc3jUvmS+KSg9UB1wrvf6kQUGDvCvXqZCGw1mRZekN4rH5c1fk9iLwLqDkWDnNo
-79jzAWV76U62GarTOng32TLTItxI/EeUhQFCf+AI/YcJEdHf8HGrDuvC0iSz6/9q
-Fe5HhVSO7zsHdP28J05wTyII+2k4ecAj3oXutUnGCBg0nlexDmxAZoe8x4XHpqkt
-tEKquZdq3l17+v5DKlKwczQcXUBC1yGw0ki67U5w9fVKzpAf7Frr7YnbGS35i5Pv
-ny4SlXPW167hRQKXCniY5QtCocP+GoPD+81uWwf+bjHyAZ3HCd532YFgXW01yJhM
-imRDxx2gDds=
------END CERTIFICATE REQUEST-----
diff --git a/genericparser/pub/ssl/cert/foobar.key b/genericparser/pub/ssl/cert/foobar.key
deleted file mode 100644
index 266f502..0000000
--- a/genericparser/pub/ssl/cert/foobar.key
+++ /dev/null
@@ -1,27 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIIEpQIBAAKCAQEA4DurchTgEw/A1y/Q5gpSSJTLC+KFOV4Vmbz2hlvOGLwVNIX1
-+r7DpaiJTGjEKLCtGsD2tGm69KiUX9FBY1CStnwK2R4wA5NKW+ZKQLd3sRTcHl+2
-bLFk7E5KvmKZZM4xhsN3ey7Ia8H0sSfKiGlxB1hZI2HibRNy8GWyi95j8MkPv+H7
-HbJlX1kIKb7p2y8aG8AnAzBWikJFcQ1y3bJA2r31wOht63pIekwh+nntt5u+Yh/S
-TXHiAe2gT7b9x6RAn09tC6TsBKzdZ4ZKrBLfRwPv6+cbDLcqkhbPukqaFaEsrDCL
-huWX10sGLEsqXULDwZRoYxTUueLek9v+/8f5EwIDAQABAoIBAQCL+dPBqHRkkc3w
-xsGiCMlq06+Y4LQHpsrXKNW/8+lJGYgnPITcHV+mtvnWgAQL3paA//pBj0sM1Xui
-AM/PvomHbxGajbStVrHxgmXR1nXaTkpGj7siSO7WcN1J0eUtv2W9WaHpfL/SPMaS
-HGPbGe9cBXPHmaAuNRjoJqP5mj9LHB0SebJImjiaCYsUkBgC0Ooo4UuwOXLYO/ak
-gZrbM8WwY21rRVc3uDyg5Ez8gxbFG3L39t26gpqBYosqNlPe7/JVkTpxUKk1Allf
-fAJNyfpS2CuY+nQWtCleJFtF1Yq9jwfPvtNUTrXeJq97xFqSIRnJbygttsokbPto
-tLqB4rSBAoGBAPPgidT0KyfYVUaWNEXtOOJyh3MCk0ssalRKf+Dap9J9Bgpjldbu
-/tBBrrbxSEAieXe8gKDwgDY2qBcsUUvEY+EWL7tiMBnS4HvK8/4aEIx14xMgiuCS
-bTnMGlIlImjMKdj0iKOd0N2NPQcfr0NTUdZJ/p1o965lq/9i7xcfHinTAoGBAOth
-JqwyGQ6oP005Vry3S/7E7UJjYxMaUfhRmMGoVz+qXAEfq0r4TkNrcEvP7mu72pVe
-q1P4imQjvvPXqoPBdh310a6OCQ7BrFpkOghHBIG0koblncml4hdBSReUA1auW2Qr
-c/MUSeV96DDbI2mZJulVdqINyaAt/JDMnfdcbCvBAoGAYPTI91/ndFzeckSvHYnV
-TrnnvcKtWnqa/03rDzL++4D3ENRMsvmrVpJ2aob8iXrrPb40iUd0QZlzNFtLKss2
-Rjty2JWNuAaNdsnWPRSRtbX8hBMxA11TjWHmqPfYeT+J95YoaJwKeLp5I8bl/+c1
-JvOeBWjA55XGTq8/jLqzXD8CgYEAiQVyJNW5Hn4083iIlK1DkRkEYRxIRYuR4jNl
-8H5V5BsBGipcZfUsYjT+FzQBQDgII+ILbIOH1Im2lG6ctbx+TSyXlrzaavu1oJ0t
-5zmoVvVOQzcR5pwphI4dxZsFYoV3cFWXVw8dgXoNG7vF3qgoLbbxq57JG/UJTSXA
-Y4oq8kECgYEAlgh6v+o6jCUD7l0JWdRtZy52rhC3W/HrhcHE0/l3RjeV+kLIWr9u
-WbNltgZQGvPVQ+ZwPIYj1gaGP17wm5pAsJNSN4LQ1v4Fj/XjT7zdwYwYOrXIJati
-5HTeyHjm+wwOPYrmH4YLGwAh6T1is42E0K2L7LG8HnO4bHbfV2mKji0=
------END RSA PRIVATE KEY-----
diff --git a/genericparser/pub/utils/__init__.py b/genericparser/pub/utils/__init__.py
deleted file mode 100644
index c7b6818..0000000
--- a/genericparser/pub/utils/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/genericparser/pub/utils/fileutil.py b/genericparser/pub/utils/fileutil.py
deleted file mode 100644
index 6ddfc72..0000000
--- a/genericparser/pub/utils/fileutil.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import os
-import shutil
-import logging
-import tempfile
-import traceback
-import urllib
-import zipfile
-
-
-logger = logging.getLogger(__name__)
-
-
-def make_dirs(path):
- if not os.path.exists(path):
- os.makedirs(path, 0o777)
-
-
-def delete_dirs(path):
- try:
- if os.path.exists(path):
- shutil.rmtree(path)
- except Exception as e:
- logger.error(traceback.format_exc())
- logger.error("Failed to delete %s:%s", path, e.args[0])
-
-
-def download_file_from_http(url, local_dir, file_name):
- local_file_name = os.path.join(local_dir, file_name)
- is_download_ok = False
- try:
- make_dirs(local_dir)
- req = urllib.request.urlopen(url)
- save_file = open(local_file_name, 'w')
- save_file.write(req.read())
- save_file.close()
- req.close()
- is_download_ok = True
- except:
- logger.error(traceback.format_exc())
- logger.error("Failed to download %s to %s.", url, local_file_name)
- return is_download_ok, local_file_name
-
-
-def unzip_file(zip_src, dst_dir, csar_path):
- if os.path.exists(zip_src):
- fz = zipfile.ZipFile(zip_src, 'r')
- for file in fz.namelist():
- fz.extract(file, dst_dir)
- return os.path.join(dst_dir, csar_path)
- else:
- return ""
-
-
-def unzip_csar_to_tmp(zip_src):
- dirpath = tempfile.mkdtemp()
- zip_ref = zipfile.ZipFile(zip_src, 'r')
- zip_ref.extractall(dirpath)
- return dirpath
-
-
-def get_artifact_path(vnf_path, artifact_file):
- for root, dirs, files in os.walk(vnf_path):
- if artifact_file in files:
- return os.path.join(root, artifact_file)
- return None
diff --git a/genericparser/pub/utils/idutil.py b/genericparser/pub/utils/idutil.py
deleted file mode 100644
index 768416c..0000000
--- a/genericparser/pub/utils/idutil.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2016 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from genericparser.pub.redisco import containers as cont
-
-
-def get_auto_id(id_type, id_group="auto_id_hash"):
- auto_id_hash = cont.Hash(id_group)
- auto_id_hash.hincrby(id_type, 1)
- return auto_id_hash.hget(id_type)
diff --git a/genericparser/pub/utils/jobutil.py b/genericparser/pub/utils/jobutil.py
deleted file mode 100644
index abe5227..0000000
--- a/genericparser/pub/utils/jobutil.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import datetime
-import logging
-import uuid
-import traceback
-from functools import reduce
-
-from genericparser.pub.database.models import JobStatusModel, JobModel
-from genericparser.pub.utils import idutil
-
-logger = logging.getLogger(__name__)
-
-
-def enum(**enums):
- return type('Enum', (), enums)
-
-
-JOB_STATUS = enum(PROCESSING=0, FINISH=1)
-JOB_MODEL_STATUS = enum(STARTED='started', PROCESSING='processing', FINISHED='finished', ERROR='error',
- TIMEOUT='timeout')
-JOB_TYPE = enum(CREATE_VNF="create vnf", TERMINATE_VNF="terminate vnf", GRANT_VNF="grant vnf", MANUAL_SCALE_VNF="manual scale vnf",
- HEAL_VNF="heal vnf")
-
-
-class JobUtil(object):
- def __init__(self):
- pass
-
- @staticmethod
- def __gen_job_id(job_name):
- return "%s-%s" % (job_name if job_name else "UnknownJob", uuid.uuid1())
-
- @staticmethod
- def query_job_status(job_id, index_id=-1):
- # logger.info("Query job status, jobid =[%s], responseid [%d]" % (job_id, index_id))
- jobs = []
- if index_id < 0:
- row = JobStatusModel.objects.filter(jobid=job_id).order_by("-indexid").first()
- if row:
- jobs.append(row)
- else:
- [jobs.append(job) for job in JobStatusModel.objects.filter(jobid=job_id).order_by("-indexid")
- if job.indexid > index_id]
-
- # logger.info("Query job status, rows=%s" % str(jobs))
- return jobs
-
- @staticmethod
- def is_job_exists(job_id):
- jobs = JobModel.objects.filter(jobid=job_id)
- return len(jobs) > 0
-
- @staticmethod
- def create_job(inst_type, jobaction, inst_id, user='', job_id=None, res_name=''):
- if job_id is None:
- job_id = JobUtil.__gen_job_id(
- '%s-%s-%s' % (str(inst_type).replace(' ', '_'), str(jobaction).replace(' ', '_'), str(inst_id)))
- job = JobModel()
- job.jobid = job_id
- job.jobtype = inst_type
- job.jobaction = jobaction
- job.resid = str(inst_id)
- job.status = JOB_STATUS.PROCESSING
- job.user = user
- job.starttime = datetime.datetime.now().strftime('%Y-%m-%d %X')
- job.progress = 0
- job.resname = res_name
- logger.debug("create a new job, jobid=%s, jobtype=%s, jobaction=%s, resid=%s, status=%d" %
- (job.jobid, job.jobtype, job.jobaction, job.resid, job.status))
- job.save()
- return job_id
-
- @staticmethod
- def clear_job(job_id):
- [job.delete() for job in JobModel.objects.filter(jobid=job_id)]
- logger.debug("Clear job, job_id=%s" % job_id)
-
- @staticmethod
- def add_job_status(job_id, progress, status_decs, error_code=""):
- jobs = JobModel.objects.filter(jobid=job_id)
- if not jobs:
- logger.error("Job[%s] is not exists, please create job first." % job_id)
- raise Exception("Job[%s] is not exists." % job_id)
- try:
- int_progress = int(progress)
- job_status = JobStatusModel()
- job_status.indexid = int(idutil.get_auto_id(job_id))
- job_status.jobid = job_id
- job_status.status = "processing"
- job_status.progress = int_progress
-
- if job_status.progress == 0:
- job_status.status = "started"
- elif job_status.progress == 100:
- job_status.status = "finished"
- elif job_status.progress == 101:
- job_status.status = "partly_finished"
- elif job_status.progress > 101:
- job_status.status = "error"
-
- if error_code == "255":
- job_status.status = "error"
-
- job_status.descp = status_decs
- # job_status.errcode = error_code
- job_status.errcode = error_code if error_code else "0"
- job_status.addtime = datetime.datetime.now().strftime('%Y-%m-%d %X')
- job_status.save()
- logger.debug("Add a new job status, jobid=%s, indexid=%d,"
- " status=%s, description=%s, progress=%d, errcode=%s, addtime=%r" %
- (job_status.jobid, job_status.indexid, job_status.status, job_status.descp,
- job_status.progress, job_status.errcode, job_status.addtime))
-
- job = jobs[0]
- job.progress = int_progress
- if job_status.progress >= 100:
- job.status = JOB_STATUS.FINISH
- job.endtime = datetime.datetime.now().strftime('%Y-%m-%d %X')
- job.save()
- logger.debug("update job, jobid=%s, progress=%d" % (job_status.jobid, int_progress))
- except:
- logger.error(traceback.format_exc())
-
- @staticmethod
- def clear_job_status(job_id):
- [job.delete() for job in JobStatusModel.objects.filter(jobid=job_id)]
- logger.debug("Clear job status, job_id=%s" % job_id)
-
- @staticmethod
- def get_unfinished_jobs(url_prefix, inst_id, inst_type):
- jobs = JobModel.objects.filter(resid=inst_id, jobtype=inst_type, status=JOB_STATUS.PROCESSING)
- progresses = reduce(lambda content, job: content + [url_prefix + "/" + job.jobid], jobs, [])
- return progresses
diff --git a/genericparser/pub/utils/restcall.py b/genericparser/pub/utils/restcall.py
deleted file mode 100644
index 5efa0cb..0000000
--- a/genericparser/pub/utils/restcall.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import traceback
-import logging
-import urllib
-import uuid
-import httplib2
-import base64
-
-from genericparser.pub.config.config import MSB_SERVICE_IP, MSB_SERVICE_PORT
-
-rest_no_auth, rest_oneway_auth, rest_bothway_auth = 0, 1, 2
-HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED = '200', '201', '204', '202'
-status_ok_list = [HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED]
-HTTP_404_NOTFOUND, HTTP_403_FORBIDDEN, HTTP_401_UNAUTHORIZED, HTTP_400_BADREQUEST = '404', '403', '401', '400'
-
-logger = logging.getLogger(__name__)
-
-
-def call_req(base_url, user, passwd, auth_type, resource, method, content='', additional_headers={}):
- callid = str(uuid.uuid1())
- logger.debug("[%s]call_req('%s','%s','%s',%s,'%s','%s','%s')" % (
- callid, base_url, user, passwd, auth_type, resource, method, content))
- ret = None
- resp_status = ''
- try:
- full_url = combine_url(base_url, resource)
- headers = {'content-type': 'application/json', 'accept': 'application/json'}
- if user:
- headers['Authorization'] = 'Basic %s' % base64.b64encode(bytes('%s:%s' % (user, passwd), "utf-8")).decode()
- ca_certs = None
- if additional_headers:
- headers.update(additional_headers)
- for retry_times in range(3):
- http = httplib2.Http(ca_certs=ca_certs, disable_ssl_certificate_validation=(auth_type == rest_no_auth))
- http.follow_all_redirects = True
- try:
- resp, resp_content = http.request(full_url, method=method.upper(), body=content, headers=headers)
- resp_status, resp_body = resp['status'], resp_content
- logger.debug("[%s][%d]status=%s)" % (callid, retry_times, resp_status))
- if headers['accept'] == 'application/json':
- resp_body = resp_content.decode('UTF-8')
- logger.debug("resp_body=%s", resp_body)
- if resp_status in status_ok_list:
- ret = [0, resp_body, resp_status]
- else:
- ret = [1, resp_body, resp_status]
- break
- except Exception as ex:
- if 'httplib.ResponseNotReady' in str(sys.exc_info()):
- logger.debug("retry_times=%d", retry_times)
- logger.error(traceback.format_exc())
- ret = [1, "Unable to connect to %s" % full_url, resp_status]
- continue
- raise ex
- except urllib.error.URLError as err:
- ret = [2, str(err), resp_status]
- except Exception as ex:
- logger.error(traceback.format_exc())
- logger.error("[%s]ret=%s" % (callid, str(sys.exc_info())))
- res_info = str(sys.exc_info())
- if 'httplib.ResponseNotReady' in res_info:
- res_info = "The URL[%s] request failed or is not responding." % full_url
- ret = [3, res_info, resp_status]
- except:
- logger.error(traceback.format_exc())
- ret = [4, str(sys.exc_info()), resp_status]
-
- logger.debug("[%s]ret=%s" % (callid, str(ret)))
- return ret
-
-
-def req_by_msb(resource, method, content=''):
- base_url = "http://%s:%s/" % (MSB_SERVICE_IP, MSB_SERVICE_PORT)
- return call_req(base_url, "", "", rest_no_auth, resource, method, content)
-
-
-def upload_by_msb(resource, method, file_data={}):
- headers = {'Content-Type': 'application/octet-stream'}
- full_url = "http://%s:%s/%s" % (MSB_SERVICE_IP, MSB_SERVICE_PORT, resource)
- http = httplib2.Http()
- resp, resp_content = http.request(full_url, method=method.upper(), body=file_data, headers=headers)
- resp_status, resp_body = resp['status'], resp_content.decode('UTF-8')
- if resp_status not in status_ok_list:
- logger.error("Status code is %s, detail is %s.", resp_status, resp_body)
- return [1, "Failed to upload file.", resp_status]
- logger.debug("resp_body=%s", resp_body)
- return [0, resp_body, resp_status]
-
-
-def combine_url(base_url, resource):
- full_url = None
- if base_url.endswith('/') and resource.startswith('/'):
- full_url = base_url[:-1] + resource
- elif base_url.endswith('/') and not resource.startswith('/'):
- full_url = base_url + resource
- elif not base_url.endswith('/') and resource.startswith('/'):
- full_url = base_url + resource
- else:
- full_url = base_url + '/' + resource
- return full_url
diff --git a/genericparser/pub/utils/syscomm.py b/genericparser/pub/utils/syscomm.py
deleted file mode 100644
index 89219ec..0000000
--- a/genericparser/pub/utils/syscomm.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import inspect
-
-
-def fun_name():
- return inspect.stack()[1][3]
diff --git a/genericparser/pub/utils/tests.py b/genericparser/pub/utils/tests.py
deleted file mode 100644
index 8390ce7..0000000
--- a/genericparser/pub/utils/tests.py
+++ /dev/null
@@ -1,221 +0,0 @@
-# Copyright 2018 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import platform
-import unittest
-import mock
-from . import fileutil
-import urllib
-from . import syscomm
-from . import timeutil
-from . import values
-
-from genericparser.pub.database.models import JobStatusModel, JobModel
-from genericparser.pub.utils.jobutil import JobUtil
-
-
-class MockReq():
- def read(self):
- return "1"
-
- def close(self):
- pass
-
-
-class UtilsTest(unittest.TestCase):
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def test_create_and_delete_dir(self):
- dirs = "abc/def/hij"
- fileutil.make_dirs(dirs)
- fileutil.make_dirs(dirs)
- fileutil.delete_dirs(dirs)
-
- @mock.patch.object(urllib.request, 'urlopen')
- def test_download_file_from_http(self, mock_urlopen):
- mock_urlopen.return_value = MockReq()
- fileutil.delete_dirs("abc")
- is_ok, f_name = fileutil.download_file_from_http("1", "abc", "1.txt")
- self.assertTrue(is_ok)
- if 'Windows' in platform.system():
- self.assertTrue(f_name.endswith("abc\\1.txt"))
- else:
- self.assertTrue(f_name.endswith("abc/1.txt"))
- fileutil.delete_dirs("abc")
-
- def test_query_job_status(self):
- job_id = "1"
- JobStatusModel.objects.filter().delete()
- JobStatusModel(
- indexid=1,
- jobid=job_id,
- status="success",
- progress=10
- ).save()
- JobStatusModel(
- indexid=2,
- jobid=job_id,
- status="success",
- progress=50
- ).save()
- JobStatusModel(
- indexid=3,
- jobid=job_id,
- status="success",
- progress=100
- ).save()
- jobs = JobUtil.query_job_status(job_id)
- self.assertEqual(1, len(jobs))
- self.assertEqual(3, jobs[0].indexid)
- jobs = JobUtil.query_job_status(job_id, 1)
- self.assertEqual(2, len(jobs))
- self.assertEqual(3, jobs[0].indexid)
- self.assertEqual(2, jobs[1].indexid)
- JobStatusModel.objects.filter().delete()
-
- def test_is_job_exists(self):
- job_id = "1"
- JobModel.objects.filter().delete()
- JobModel(
- jobid=job_id,
- jobtype="1",
- jobaction="2",
- resid="3",
- status=0
- ).save()
- self.assertTrue(JobUtil.is_job_exists(job_id))
- JobModel.objects.filter().delete()
-
- def test_create_job(self):
- job_id = "5"
- JobModel.objects.filter().delete()
- JobUtil.create_job(
- inst_type="1",
- jobaction="2",
- inst_id="3",
- user="4",
- job_id=5,
- res_name="6")
- self.assertEqual(1, len(JobModel.objects.filter(jobid=job_id)))
- JobModel.objects.filter().delete()
-
- def test_clear_job(self):
- job_id = "1"
- JobModel.objects.filter().delete()
- JobModel(
- jobid=job_id,
- jobtype="1",
- jobaction="2",
- resid="3",
- status=0
- ).save()
- JobUtil.clear_job(job_id)
- self.assertEqual(0, len(JobModel.objects.filter(jobid=job_id)))
-
- def test_add_job_status_when_job_is_not_created(self):
- JobModel.objects.filter().delete()
- self.assertRaises(
- Exception,
- JobUtil.add_job_status,
- job_id="1",
- progress=1,
- status_decs="2",
- error_code="0"
- )
-
- def test_add_job_status_normal(self):
- job_id = "1"
- JobModel.objects.filter().delete()
- JobStatusModel.objects.filter().delete()
- JobModel(
- jobid=job_id,
- jobtype="1",
- jobaction="2",
- resid="3",
- status=0
- ).save()
- JobUtil.add_job_status(
- job_id="1",
- progress=1,
- status_decs="2",
- error_code="0"
- )
- self.assertEqual(1, len(JobStatusModel.objects.filter(jobid=job_id)))
- JobStatusModel.objects.filter().delete()
- JobModel.objects.filter().delete()
-
- def test_clear_job_status(self):
- job_id = "1"
- JobStatusModel.objects.filter().delete()
- JobStatusModel(
- indexid=1,
- jobid=job_id,
- status="success",
- progress=10
- ).save()
- JobUtil.clear_job_status(job_id)
- self.assertEqual(0, len(JobStatusModel.objects.filter(jobid=job_id)))
-
- def test_get_unfinished_jobs(self):
- JobModel.objects.filter().delete()
- JobModel(
- jobid="11",
- jobtype="InstVnf",
- jobaction="2",
- resid="3",
- status=0
- ).save()
- JobModel(
- jobid="22",
- jobtype="InstVnf",
- jobaction="2",
- resid="3",
- status=0
- ).save()
- JobModel(
- jobid="33",
- jobtype="InstVnf",
- jobaction="2",
- resid="3",
- status=0
- ).save()
- progresses = JobUtil.get_unfinished_jobs(
- url_prefix="/vnfinst",
- inst_id="3",
- inst_type="InstVnf"
- )
- expect_progresses = ['/vnfinst/11', '/vnfinst/22', '/vnfinst/33']
- self.assertEqual(expect_progresses, progresses)
- JobModel.objects.filter().delete()
-
- def test_fun_name(self):
- self.assertEqual("test_fun_name", syscomm.fun_name())
-
- def test_now_time(self):
- self.assertIn(":", timeutil.now_time())
- self.assertIn("-", timeutil.now_time())
-
- def test_ignore_case_get(self):
- data = {
- "Abc": "def",
- "HIG": "klm"
- }
- self.assertEqual("def", values.ignore_case_get(data, 'ABC'))
- self.assertEqual("def", values.ignore_case_get(data, 'abc'))
- self.assertEqual("klm", values.ignore_case_get(data, 'hig'))
- self.assertEqual("bbb", values.ignore_case_get(data, 'aaa', 'bbb'))
diff --git a/genericparser/pub/utils/timeutil.py b/genericparser/pub/utils/timeutil.py
deleted file mode 100644
index 1d97e9d..0000000
--- a/genericparser/pub/utils/timeutil.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-
-
-def now_time(fmt="%Y-%m-%d %H:%M:%S"):
- return datetime.datetime.now().strftime(fmt)
diff --git a/genericparser/pub/utils/toscaparsers/__init__.py b/genericparser/pub/utils/toscaparsers/__init__.py
deleted file mode 100644
index 4b73f48..0000000
--- a/genericparser/pub/utils/toscaparsers/__init__.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-
-from genericparser.pub.utils.toscaparsers.nsdmodel import NsdInfoModel
-from genericparser.pub.utils.toscaparsers.pnfmodel import PnfdInfoModel
-from genericparser.pub.utils.toscaparsers.sdmodel import SdInfoModel
-from genericparser.pub.utils.toscaparsers.vnfdmodel import EtsiVnfdInfoModel
-
-
-def parse_nsd(path, input_parameters=[]):
- tosca_obj = NsdInfoModel(path, input_parameters).model
- strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
- strResponse = strResponse.replace(': null', ': ""')
- return strResponse
-
-
-def parse_sd(path, input_parameters=[]):
- tosca_obj = SdInfoModel(path, input_parameters)
- strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
- strResponse = strResponse.replace(': null', ': ""')
- return strResponse
-
-
-def parse_vnfd(path, input_parameters=[], isETSI=True):
- if isETSI:
- tosca_obj = EtsiVnfdInfoModel(path, input_parameters)
- else:
- tosca_obj = {}
- strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
- strResponse = strResponse.replace(': null', ': ""')
- return strResponse
-
-
-def parse_pnfd(path, input_parameters=[], isETSI=True):
- if isETSI:
- tosca_obj = PnfdInfoModel(path, input_parameters)
- else:
- tosca_obj = {}
- strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
- strResponse = strResponse.replace(': null', ': ""')
- return strResponse
diff --git a/genericparser/pub/utils/toscaparsers/basemodel.py b/genericparser/pub/utils/toscaparsers/basemodel.py
deleted file mode 100644
index 643041d..0000000
--- a/genericparser/pub/utils/toscaparsers/basemodel.py
+++ /dev/null
@@ -1,537 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ftplib
-import json
-import logging
-import os
-import re
-import shutil
-import urllib
-
-# import paramiko
-from toscaparser.tosca_template import ToscaTemplate
-from toscaparser.properties import Property
-from toscaparser.functions import Function, Concat, GetInput, get_function, function_mappings
-from genericparser.pub.utils.toscaparsers.graph import Graph
-
-from genericparser.pub.utils.toscaparsers.dataentityext import DataEntityExt
-
-logger = logging.getLogger(__name__)
-
-METADATA = "metadata"
-PROPERTIES = "properties"
-DESCRIPTION = "description"
-REQUIREMENTS = "requirements"
-INTERFACES = "interfaces"
-TOPOLOGY_TEMPLATE = "topology_template"
-INPUTS = "inputs"
-CAPABILITIES = "capabilities"
-ATTRIBUTES = "attributes"
-ARTIFACTS = "artifacts"
-DERIVED_FROM = "derived_from"
-
-NODE_NAME = "name"
-NODE_TYPE = "nodeType"
-NODE_ROOT = "tosca.nodes.Root"
-GROUP_TYPE = "groupType"
-GROUPS_ROOT = "tosca.groups.Root"
-
-
-class BaseInfoModel(object):
-
- def __init__(self, path=None, params=None, tosca=None):
- if tosca:
- _tosca = tosca
- else:
- _tosca = self.buildToscaTemplate(path, params)
- self.description = getattr(_tosca, "description", "")
- self.parseModel(_tosca)
-
- def parseModel(self, tosca):
- pass
-
- def buildInputs(self, tosca):
- topo = tosca.tpl.get(TOPOLOGY_TEMPLATE, None)
- return topo.get(INPUTS, {}) if topo else {}
-
- def buildToscaTemplate(self, path, params):
- file_name = None
- try:
- file_name = self._check_download_file(path)
- valid_params = self._validate_input_params(file_name, params)
- return self._create_tosca_template(file_name, valid_params)
- finally:
- if file_name is not None and file_name != path and os.path.exists(file_name):
- try:
- os.remove(file_name)
- except Exception as e:
- logger.error("Failed to parse package, error: %s", e.args[0])
-
- def _validate_input_params(self, path, params):
- valid_params = {}
- inputs = {}
- if isinstance(params, list):
- for param in params:
- key = param.get('key', 'undefined')
- value = param.get('value', 'undefined')
- inputs[key] = value
- params = inputs
-
- if params:
- tmp = self._create_tosca_template(path, None)
- if isinstance(params, dict):
- for key, value in list(params.items()):
- if hasattr(tmp, 'inputs') and len(tmp.inputs) > 0:
- for input_def in tmp.inputs:
- if (input_def.name == key):
- valid_params[key] = DataEntityExt.validate_datatype(input_def.type, value)
- return valid_params
-
- def _create_tosca_template(self, file_name, valid_params):
- tosca_tpl = None
- try:
- tosca_tpl = ToscaTemplate(path=file_name,
- parsed_params=valid_params,
- no_required_paras_check=True,
- debug_mode=True)
- except Exception as e:
- print(e.args[0])
- finally:
- if tosca_tpl is not None and hasattr(tosca_tpl, "temp_dir") and os.path.exists(tosca_tpl.temp_dir):
- try:
- shutil.rmtree(tosca_tpl.temp_dir)
- except Exception as e:
- logger.error("Failed to create tosca template, error: %s", e.args[0])
- print("-----------------------------")
- print('\n'.join(['%s:%s' % item for item in list(tosca_tpl.__dict__.items())]))
- print("-----------------------------")
- return tosca_tpl
-
- def _check_download_file(self, path):
- if (path.startswith("ftp") or path.startswith("sftp")):
- return self.downloadFileFromFtpServer(path)
- elif (path.startswith("http")):
- return self.download_file_from_httpserver(path)
- return path
-
- def download_file_from_httpserver(self, path):
- path = path.encode("utf-8")
- tmps = str.split(path, '/')
- localFileName = tmps[len(tmps) - 1]
- urllib.request.urlretrieve(path, localFileName)
- return localFileName
-
- def downloadFileFromFtpServer(self, path):
- path = path.encode("utf-8")
- tmp = str.split(path, '://')
- protocol = tmp[0]
- tmp = str.split(tmp[1], ':')
- if len(tmp) == 2:
- userName = tmp[0]
- tmp = str.split(tmp[1], '@')
- userPwd = tmp[0]
- index = tmp[1].index('/')
- hostIp = tmp[1][0:index]
- remoteFileName = tmp[1][index:len(tmp[1])]
- if protocol.lower() == 'ftp':
- hostPort = 21
- else:
- hostPort = 22
-
- if len(tmp) == 3:
- userName = tmp[0]
- userPwd = str.split(tmp[1], '@')[0]
- hostIp = str.split(tmp[1], '@')[1]
- index = tmp[2].index('/')
- hostPort = tmp[2][0:index]
- remoteFileName = tmp[2][index:len(tmp[2])]
-
- localFileName = str.split(remoteFileName, '/')
- localFileName = localFileName[len(localFileName) - 1]
-
- if protocol.lower() == 'sftp':
- self.sftp_get(userName, userPwd, hostIp, hostPort, remoteFileName, localFileName)
- else:
- self.ftp_get(userName, userPwd, hostIp, hostPort, remoteFileName, localFileName)
- return localFileName
-
- # def sftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName):
- # # return
- # t = None
- # try:
- # t = paramiko.Transport(hostIp, int(hostPort))
- # t.connect(username=userName, password=userPwd)
- # sftp = paramiko.SFTPClient.from_transport(t)
- # sftp.get(remoteFileName, localFileName)
- # finally:
- # if t is not None:
- # t.close()
-
- def ftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName):
- f = None
- try:
- ftp = ftplib.FTP()
- ftp.connect(hostIp, hostPort)
- ftp.login(userName, userPwd)
- f = open(localFileName, 'wb')
- ftp.retrbinary('RETR ' + remoteFileName, f.write, 1024)
- f.close()
- finally:
- if f is not None:
- f.close()
-
- def buildMetadata(self, tosca):
- return tosca.tpl.get(METADATA, {}) if tosca else {}
-
- def buildNode(self, nodeTemplate, tosca):
- inputs = tosca.inputs
- parsed_params = tosca.parsed_params
- ret = {}
- ret[NODE_NAME] = nodeTemplate.name
- ret[NODE_TYPE] = nodeTemplate.type
- if DESCRIPTION in nodeTemplate.entity_tpl:
- ret[DESCRIPTION] = nodeTemplate.entity_tpl[DESCRIPTION]
- else:
- ret[DESCRIPTION] = ''
- if METADATA in nodeTemplate.entity_tpl:
- ret[METADATA] = nodeTemplate.entity_tpl[METADATA]
- else:
- ret[METADATA] = ''
- props = self.buildProperties_ex(nodeTemplate, tosca.topology_template)
- ret[PROPERTIES] = self.verify_properties(props, inputs, parsed_params)
- ret[REQUIREMENTS] = self.build_requirements(nodeTemplate)
- self.buildCapabilities(nodeTemplate, inputs, ret)
- self.buildArtifacts(nodeTemplate, inputs, ret)
- interfaces = self.build_interfaces(nodeTemplate)
- if interfaces:
- ret[INTERFACES] = interfaces
- return ret
-
- def buildProperties(self, nodeTemplate, parsed_params):
- properties = {}
- isMappingParams = parsed_params and len(parsed_params) > 0
- for k, item in list(nodeTemplate.get_properties().items()):
- properties[k] = item.value
- if isinstance(item.value, GetInput):
- if item.value.result() and isMappingParams:
- properties[k] = DataEntityExt.validate_datatype(item.type, item.value.result())
- else:
- tmp = {}
- tmp[item.value.name] = item.value.input_name
- properties[k] = tmp
- if ATTRIBUTES in nodeTemplate.entity_tpl:
- for k, item in list(nodeTemplate.entity_tpl[ATTRIBUTES].items()):
- properties[k] = str(item)
- return properties
-
- def buildProperties_ex(self, nodeTemplate, topology_template, properties=None):
- if properties is None:
- properties = nodeTemplate.get_properties()
- _properties = {}
- if isinstance(properties, dict):
- for name, prop in list(properties.items()):
- if isinstance(prop, Property):
- if isinstance(prop.value, Function):
- if isinstance(prop.value, Concat): # support one layer inner function.
- value_str = ''
- for arg in prop.value.args:
- if isinstance(arg, str):
- value_str += arg
- elif isinstance(arg, dict):
- raw_func = {}
- for k, v in list(arg.items()):
- func_args = []
- func_args.append(v)
- raw_func[k] = func_args
- func = get_function(topology_template, nodeTemplate, raw_func)
- value_str += str(func.result())
- _properties[name] = value_str
- else:
- _properties[name] = prop.value.result()
- elif isinstance(prop.value, dict) or isinstance(prop.value, list):
- _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop.value)
- elif prop.type == 'string':
- _properties[name] = prop.value
- else:
- _properties[name] = json.dumps(prop.value)
- elif isinstance(prop, dict):
- _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop)
- elif isinstance(prop, list):
- _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop)
- elif name in function_mappings:
- raw_func = {}
- func_args = []
- func_args.append(prop)
- raw_func[name] = func_args
- if name == 'CONCAT':
- value_str = ''
- for arg in prop:
- if isinstance(arg, str):
- value_str += arg
- elif isinstance(arg, dict):
- raw_func = {}
- for k, v in list(arg.items()):
- func_args = []
- func_args.append(v)
- raw_func[k] = func_args
- value_str += str(
- get_function(topology_template, nodeTemplate, raw_func).result())
- value = value_str
- else:
- return get_function(topology_template, nodeTemplate, raw_func).result()
- else:
- _properties[name] = prop
- elif isinstance(properties, list):
- value = []
- for para in properties:
- if isinstance(para, dict) or isinstance(para, list):
- value.append(self.buildProperties_ex(nodeTemplate, topology_template, para))
- else:
- value.append(para)
- return value
- return _properties
-
- def verify_properties(self, props, inputs, parsed_params):
- ret_props = {}
- if (props and len(props) > 0):
- for key, value in list(props.items()):
- ret_props[key] = self._verify_value(value, inputs, parsed_params)
- # if isinstance(value, str):
- # ret_props[key] = self._verify_string(inputs, parsed_params, value);
- # continue
- # if isinstance(value, list):
- # ret_props[key] = map(lambda x: self._verify_dict(inputs, parsed_params, x), value)
- # continue
- # if isinstance(value, dict):
- # ret_props[key] = self._verify_map(inputs, parsed_params, value)
- # continue
- # ret_props[key] = value
- return ret_props
-
- def build_requirements(self, node_template):
- rets = []
- for req in node_template.requirements:
- for req_name, req_value in list(req.items()):
- if (isinstance(req_value, dict)):
- if ('node' in req_value and req_value['node'] not in node_template.templates):
- continue # No target requirement for aria parser, not add to result.
- rets.append({req_name: req_value})
- return rets
-
- def buildCapabilities(self, nodeTemplate, inputs, ret):
- capabilities = json.dumps(nodeTemplate.entity_tpl.get(CAPABILITIES, None))
- match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', capabilities)
- for m in match:
- aa = [input_def for input_def in inputs if m == input_def.name][0]
- capabilities = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), capabilities, 1)
- if capabilities != 'null':
- ret[CAPABILITIES] = json.loads(capabilities)
-
- def buildArtifacts(self, nodeTemplate, inputs, ret):
- artifacts = json.dumps(nodeTemplate.entity_tpl.get('artifacts', None))
- match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', artifacts)
- for m in match:
- aa = [input_def for input_def in inputs if m == input_def.name][0]
- artifacts = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), artifacts, 1)
- if artifacts != 'null':
- ret[ARTIFACTS] = json.loads(artifacts)
-
- def build_interfaces(self, node_template):
- if INTERFACES in node_template.entity_tpl:
- return node_template.entity_tpl[INTERFACES]
- return None
-
- def isNodeTypeX(self, node, nodeTypes, x):
- node_type = node[NODE_TYPE]
- while node_type != x:
- node_type_derived = node_type
- node_type = nodeTypes[node_type][DERIVED_FROM]
- if node_type == NODE_ROOT or node_type == node_type_derived:
- return False
- return True
-
- def get_requirement_node_name(self, req_value):
- return self.get_prop_from_obj(req_value, 'node')
-
- def getRequirementByNodeName(self, nodeTemplates, storage_name, prop):
- for node in nodeTemplates:
- if node[NODE_NAME] == storage_name:
- if prop in node:
- return node[prop]
-
- def get_prop_from_obj(self, obj, prop):
- if isinstance(obj, str):
- return obj
- if (isinstance(obj, dict) and prop in obj):
- return obj[prop]
- return None
-
- def getNodeDependencys(self, node):
- return self.getRequirementByName(node, 'dependency')
-
- def getRequirementByName(self, node, requirementName):
- requirements = []
- if REQUIREMENTS in node:
- for item in node[REQUIREMENTS]:
- for key, value in list(item.items()):
- if key == requirementName:
- requirements.append(value)
- return requirements
-
- def _verify_value(self, value, inputs, parsed_params):
- if value == '{}':
- return ''
- if isinstance(value, str):
- return self._verify_string(inputs, parsed_params, value)
- if isinstance(value, list) or isinstance(value, dict):
- return self._verify_object(value, inputs, parsed_params)
- return value
-
- def _verify_object(self, value, inputs, parsed_params):
- s = self._verify_string(inputs, parsed_params, json.dumps(value))
- return json.loads(s)
-
- def _get_input_name(self, getInput):
- input_name = getInput.split(':')[1]
- input_name = input_name.strip()
- return input_name.replace('"', '').replace('}', '')
-
- def _verify_string(self, inputs, parsed_params, value):
- getInputs = re.findall(r'{"get_input": "[a-zA-Z_0-9]+"}', value)
- for getInput in getInputs:
- input_name = self._get_input_name(getInput)
- if parsed_params and input_name in parsed_params:
- value = value.replace(getInput, json.dumps(parsed_params[input_name]))
- else:
- for input_def in inputs:
- if input_def.default and input_name == input_def.name:
- value = value.replace(getInput, json.dumps(input_def.default))
- return value
-
- def get_node_by_name(self, node_templates, name):
- for node in node_templates:
- if node[NODE_NAME] == name:
- return node
- return None
-
- def getCapabilityByName(self, node, capabilityName):
- if CAPABILITIES in node and capabilityName in node[CAPABILITIES]:
- return node[CAPABILITIES][capabilityName]
- return None
-
- def get_base_path(self, tosca):
- fpath, fname = os.path.split(tosca.path)
- return fpath
-
- def build_artifacts(self, node):
- rets = []
- if ARTIFACTS in node and len(node[ARTIFACTS]) > 0:
- artifacts = node[ARTIFACTS]
- for name, value in list(artifacts.items()):
- ret = {}
- ret['artifact_name'] = name
- ret['file'] = value
- if isinstance(value, dict):
- ret.update(value)
- rets.append(ret)
- else:
- # TODO It is workaround for SDC-1900.
- logger.error("VCPE specific code")
- ret = {}
- ret['artifact_name'] = "sw_image"
- ret['file'] = "ubuntu_16.04"
- ret['type'] = "tosca.artifacts.nfv.SwImage"
- rets.append(ret)
-
- return rets
-
- def get_node_by_req(self, node_templates, req):
- req_node_name = self.get_requirement_node_name(req)
- return self.get_node_by_name(node_templates, req_node_name)
-
- def isGroupTypeX(self, group, groupTypes, x):
- group_type = group[GROUP_TYPE]
- while group_type != x:
- group_type_derived = group_type
- group_type = groupTypes[group_type][DERIVED_FROM]
- if group_type == GROUPS_ROOT or group_type == group_type_derived:
- return False
- return True
-
- def setTargetValues(self, dict_target, target_keys, dict_source, source_keys):
- i = 0
- for item in source_keys:
- dict_target[target_keys[i]] = dict_source.get(item, "")
- i += 1
- return dict_target
-
- def get_deploy_graph(self, tosca, relations):
- nodes = tosca.graph.nodetemplates
- graph = Graph()
- for node in nodes:
- self._build_deploy_path(node, [], graph, relations)
- return graph.to_dict()
-
- def _build_deploy_path(self, node, node_parent, graph, relations):
- graph.add_node(node.name, node_parent)
- type_require_set = {}
- type_requires = node.type_definition.requirements
- for type_require in type_requires:
- type_require_set.update(type_require)
- for requirement in node.requirements:
- for k in list(requirement.keys()):
- if type_require_set[k].get('relationship', None) in relations[0] or type_require_set[k].get('capability', None) in relations[0]:
- if isinstance(requirement[k], dict):
- next_node = requirement[k].get('node', None)
- else:
- next_node = requirement[k]
- graph.add_node(next_node, [node.name])
- if type_require_set[k].get('relationship', None) in relations[1]:
- if isinstance(requirement[k], dict):
- next_node = requirement[k].get('node', None)
- else:
- next_node = requirement[k]
- graph.add_node(next_node, [node.name])
-
- def get_substitution_mappings(self, tosca):
- node = {
- 'properties': {},
- 'requirements': {},
- 'capabilities': {},
- 'metadata': {}
- }
- metadata = None
- substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None)
- if substitution_mappings:
- nodeType = substitution_mappings['node_type']
- logger.debug("nodeType %s", nodeType)
- if "node_types" in tosca.tpl:
- node_types = tosca.tpl['node_types'].get(nodeType, None)
- derivedFrom = node_types.get('derived_from', "")
- node['type'] = derivedFrom
- node['properties'] = node_types.get('properties', {})
- node['requirements'] = node_types.get('requirements', {})
- node['capabilities'] = node_types.get('capabilities', {})
- metadata = node_types.get('metadata', {})
-
- if "type" not in node or node['type'] == "":
- node['type'] = nodeType
- node['properties'] = substitution_mappings.get('properties', {})
- node['requirements'] = substitution_mappings.get('requirements', {})
- node['capabilities'] = substitution_mappings.get('capabilities', {})
- metadata = substitution_mappings.get('metadata', {})
-
- node['metadata'] = metadata if metadata and metadata != {} else self.buildMetadata(tosca)
- return node
diff --git a/genericparser/pub/utils/toscaparsers/const.py b/genericparser/pub/utils/toscaparsers/const.py
deleted file mode 100644
index 9c61c48..0000000
--- a/genericparser/pub/utils/toscaparsers/const.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2018 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-NS_METADATA_SECTIONS = (NS_UUID, NS_INVARIANTUUID, NS_NAME, NS_VERSION, NS_DESIGNER, NSD_RELEASE_DATE) =\
- ("nsd_id", "nsd_invariant_id", "nsd_name", "nsd_file_structure_version", "nsd_designer", "nsd_release_date_time")
-# ("id", "invariant_id", "name", "version", "designer", "description")
-
-SDC_SERVICE_METADATA_SECTIONS = (SRV_UUID, SRV_INVARIANTUUID, SRV_NAME) = ('UUID', 'invariantUUID', 'name')
-
-PNF_METADATA_SECTIONS = (PNF_UUID, PNF_INVARIANTUUID, PNF_NAME, PNF_METADATA_DESCRIPTION, PNF_VERSION, PNF_PROVIDER) = \
- ("descriptor_id", "descriptor_invariant_id", "name", "description", "version", "provider")
-PNF_SECTIONS = (PNF_ID, PNF_METADATA, PNF_PROPERTIES, PNF_DESCRIPTION) = \
- ("pnf_id", "metadata", "properties", "description")
-
-VNF_SECTIONS = (VNF_ID, VNF_METADATA, VNF_PROPERTIES, VNF_DESCRIPTION) = \
- ("vnf_id", "metadata", "properties", "description")
-
-VL_SECTIONS = (VL_ID, VL_METADATA, VL_PROPERTIES, VL_DESCRIPTION) = \
- ("vl_id", "metadata", "properties", "description")
diff --git a/genericparser/pub/utils/toscaparsers/dataentityext.py b/genericparser/pub/utils/toscaparsers/dataentityext.py
deleted file mode 100644
index 825e93b..0000000
--- a/genericparser/pub/utils/toscaparsers/dataentityext.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from toscaparser.dataentity import DataEntity
-from toscaparser.elements.constraints import Schema
-from toscaparser.common.exception import ExceptionCollector
-
-
-class DataEntityExt(object):
- '''A complex data value entity ext.'''
- @staticmethod
- def validate_datatype(type, value, entry_schema=None, custom_def=None):
- if value:
- if (type == Schema.STRING):
- return str(value)
- elif type == Schema.FLOAT:
- try:
- return float(value)
- except Exception:
- ExceptionCollector.appendException(ValueError(('"%s" is not an float.') % value))
- return DataEntity.validate_datatype(type, value, entry_schema, custom_def)
- return value
diff --git a/genericparser/pub/utils/toscaparsers/graph.py b/genericparser/pub/utils/toscaparsers/graph.py
deleted file mode 100644
index 0af2a14..0000000
--- a/genericparser/pub/utils/toscaparsers/graph.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright 2018 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from collections import deque
-from collections import OrderedDict
-
-
-class Graph(object):
-
- def __init__(self, graph_dict=None):
- self.graph = OrderedDict()
- if graph_dict:
- for node, dep_nodes in list(graph_dict.items()):
- self.add_node(node, dep_nodes)
-
- def add_node(self, node, dep_nodes):
- if node not in self.graph:
- self.graph[node] = set()
- if isinstance(dep_nodes, list):
- for dep_node in dep_nodes:
- if dep_node not in self.graph:
- self.graph[dep_node] = set()
- if dep_node not in self.graph[node]:
- self.graph[node].add(dep_node)
-
- def get_pre_nodes(self, node):
- return [k for k in self.graph if node in self.graph[k]]
-
- def topo_sort(self):
- degree = {}
- for node in self.graph:
- degree[node] = 0
-
- for node in self.graph:
- for dependent in self.graph[node]:
- degree[dependent] += 1
-
- queue = deque()
- for node in degree:
- if degree[node] == 0:
- queue.appendleft(node)
-
- sort_list = []
- while queue:
- node = queue.pop()
- sort_list.append(node)
- for dependent in self.graph[node]:
- degree[dependent] -= 1
- if degree[dependent] == 0:
- queue.appendleft(dependent)
-
- if len(sort_list) == len(self.graph):
- return sort_list
- else:
- return None
-
- def to_dict(self):
- dict = {}
- for node, dependents in self.graph.items():
- dict[node] = []
- for dep in dependents:
- dict[node].append(dep)
- return dict
diff --git a/genericparser/pub/utils/toscaparsers/nsdmodel.py b/genericparser/pub/utils/toscaparsers/nsdmodel.py
deleted file mode 100644
index 9cc706f..0000000
--- a/genericparser/pub/utils/toscaparsers/nsdmodel.py
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import functools
-import logging
-from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel
-from genericparser.pub.utils.toscaparsers.const import SDC_SERVICE_METADATA_SECTIONS
-from genericparser.pub.utils.toscaparsers.servicemodel import SdcServiceModel
-
-logger = logging.getLogger(__name__)
-
-SECTIONS = (NS_TYPE, NS_VNF_TYPE, NS_VL_TYPE, NS_PNF_TYPE, NS_NFP_TYPE, NS_VNFFG_TYPE) = \
- ('tosca.nodes.nfv.NS',
- 'tosca.nodes.nfv.VNF',
- 'tosca.nodes.nfv.NsVirtualLink',
- 'tosca.nodes.nfv.PNF',
- 'tosca.nodes.nfv.NFP',
- 'tosca.nodes.nfv.VNFFG')
-
-NFV_NS_RELATIONSHIPS = [["tosca.relationships.nfv.VirtualLinksTo", "tosca.relationships.DependsOn"], []]
-
-
-class NsdInfoModel(BaseInfoModel):
- def __init__(self, path, params):
- super(NsdInfoModel, self).__init__(path, params)
-
- def parseModel(self, tosca):
- metadata = self.buildMetadata(tosca)
- self.model = {}
- if self._is_etsi(metadata):
- self.model = EtsiNsdInfoModel(tosca)
- elif self._is_ecomp(metadata):
- self.model = SdcServiceModel(tosca)
-
- def _is_etsi(self, metadata):
- NS_METADATA_MUST = ["nsd_invariant_id", "nsd_name", "nsd_file_structure_version", "nsd_designer", "nsd_release_date_time"]
- return True if len([1 for key in NS_METADATA_MUST if key in metadata]) == len(NS_METADATA_MUST) else False
-
- def _is_ecomp(self, metadata):
- return True if len([1 for key in SDC_SERVICE_METADATA_SECTIONS if key in metadata]) == len(SDC_SERVICE_METADATA_SECTIONS) else False
-
-
-class EtsiNsdInfoModel(BaseInfoModel):
-
- def __init__(self, tosca):
- super(EtsiNsdInfoModel, self).__init__(tosca=tosca)
-
- def parseModel(self, tosca):
- self.metadata = self.buildMetadata(tosca)
- self.ns = self._build_ns(tosca)
- self.inputs = self.buildInputs(tosca)
- nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
- types = tosca.topology_template.custom_defs
- self.basepath = self.get_base_path(tosca)
- self.vnfs = self._get_all_vnf(nodeTemplates, types)
- self.pnfs = self._get_all_pnf(nodeTemplates, types)
- self.vls = self._get_all_vl(nodeTemplates, types)
- self.fps = self._get_all_fp(nodeTemplates, types)
- self.vnffgs = self._get_all_vnffg(tosca.topology_template.groups, types)
- self.ns_exposed = self._get_all_endpoint_exposed(tosca.topology_template)
- self.nested_ns = self._get_all_nested_ns(nodeTemplates, types)
- self.graph = self.get_deploy_graph(tosca, NFV_NS_RELATIONSHIPS)
-
- def _get_all_vnf(self, nodeTemplates, node_types):
- vnfs = []
- for node in nodeTemplates:
- if self.isNodeTypeX(node, node_types, NS_VNF_TYPE):
- vnf = {}
- vnf['vnf_id'] = node['name']
- vnf['description'] = node['description']
- vnf['properties'] = node['properties']
- if not vnf['properties'].get('id', None):
- vnf['properties']['id'] = vnf['properties'].get('descriptor_id', None)
- vnf['dependencies'] = self._get_networks(node, node_types)
- vnf['networks'] = self._get_networks(node, node_types)
- vnfs.append(vnf)
- return vnfs
-
- def _get_all_pnf(self, nodeTemplates, node_types):
- pnfs = []
- for node in nodeTemplates:
- if self.isNodeTypeX(node, node_types, NS_PNF_TYPE):
- pnf = {}
- pnf['pnf_id'] = node['name']
- pnf['description'] = node['description']
- pnf['properties'] = node['properties']
- pnf['networks'] = self._get_networks(node, node_types)
- pnfs.append(pnf)
- return pnfs
-
- def _get_all_vl(self, nodeTemplates, node_types):
- vls = []
- for node in nodeTemplates:
- if self.isNodeTypeX(node, node_types, NS_VL_TYPE):
- vl = dict()
- vl['vl_id'] = node['name']
- vl['description'] = node['description']
- vl['properties'] = node['properties']
- vls.append(vl)
- return vls
-
- def _get_all_fp(self, nodeTemplates, node_types):
- fps = []
- for node in nodeTemplates:
- if self.isNodeTypeX(node, node_types, NS_NFP_TYPE):
- fp = {}
- fp['fp_id'] = node['name']
- fp['description'] = node['description']
- fp['properties'] = node['properties']
- fp['forwarder_list'] = self._getForwarderList(node, nodeTemplates, node_types)
- fps.append(fp)
- return fps
-
- def _getForwarderList(self, node, node_templates, node_types):
- forwarderList = []
- if 'requirements' in node:
- for item in node['requirements']:
- for key, value in list(item.items()):
- if key == 'forwarder':
- tmpnode = self.get_node_by_req(node_templates, value)
- type = 'pnf' if self.isNodeTypeX(tmpnode, node_types, NS_PNF_TYPE) else 'vnf'
- req_node_name = self.get_requirement_node_name(value)
- if isinstance(value, dict) and 'capability' in value:
- forwarderList.append(
- {"type": type, "node_name": req_node_name, "capability": value['capability']})
- else:
- forwarderList.append({"type": type, "node_name": req_node_name, "capability": ""})
- return forwarderList
-
- def _get_all_vnffg(self, groups, group_types):
- vnffgs = []
- for group in groups:
- if self.isGroupTypeX(group, group_types, NS_VNFFG_TYPE):
- vnffg = {}
- vnffg['vnffg_id'] = group.name
- vnffg['description'] = group.description
- if 'properties' in group.tpl:
- vnffg['properties'] = group.tpl['properties']
- vnffg['members'] = group.members
- vnffgs.append(vnffg)
- return vnffgs
-
- def _get_all_endpoint_exposed(self, topo_tpl):
- if 'substitution_mappings' in topo_tpl.tpl:
- external_cps = self._get_external_cps(topo_tpl.tpl['substitution_mappings'])
- forward_cps = self._get_forward_cps(topo_tpl.tpl['substitution_mappings'])
- return {"external_cps": external_cps, "forward_cps": forward_cps}
- return {}
-
- def _get_external_cps(self, subs_mappings):
- external_cps = []
- if 'requirements' in subs_mappings:
- for key, value in list(subs_mappings['requirements'].items()):
- if isinstance(value, list) and len(value) > 0:
- external_cps.append({"key_name": key, "cpd_id": value[0]})
- else:
- external_cps.append({"key_name": key, "cpd_id": value})
- return external_cps
-
- def _get_forward_cps(self, subs_mappings):
- forward_cps = []
- if 'capabilities' in subs_mappings:
- for key, value in list(subs_mappings['capabilities'].items()):
- if isinstance(value, list) and len(value) > 0:
- forward_cps.append({"key_name": key, "cpd_id": value[0]})
- else:
- forward_cps.append({"key_name": key, "cpd_id": value})
- return forward_cps
-
- def _get_all_nested_ns(self, nodes, node_types):
- nss = []
- for node in nodes:
- if self.isNodeTypeX(node, node_types, NS_TYPE):
- ns = {}
- ns['ns_id'] = node['name']
- ns['description'] = node['description']
- ns['properties'] = node['properties']
- ns['networks'] = self._get_networks(node, node_types)
- nss.append(ns)
- return nss
-
- def _get_networks(self, node, node_types):
- rets = []
- if 'requirements' in node and (self.isNodeTypeX(node, node_types, NS_TYPE) or self.isNodeTypeX(node, node_types, NS_VNF_TYPE)):
- for item in node['requirements']:
- for key, value in list(item.items()):
- rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
- return rets
-
- def _build_ns(self, tosca):
- ns = self.get_substitution_mappings(tosca)
- properties = ns.get("properties", {})
- metadata = ns.get("metadata", {})
- if properties.get("descriptor_id", "") == "":
- descriptor_id = metadata.get("nsd_id", "")
- properties["descriptor_id"] = descriptor_id
- if properties.get("verison", "") == "":
- version = metadata.get("nsd_file_structure_version", "")
- properties["verison"] = version
- if properties.get("designer", "") == "":
- author = metadata.get("nsd_designer", "")
- properties["designer"] = author
- if properties.get("name", "") == "":
- template_name = metadata.get("nsd_name", "")
- properties["name"] = template_name
- if properties.get("invariant_id", "") == "":
- nsd_invariant_id = metadata.get("nsd_invariant_id", "")
- properties["invariant_id"] = nsd_invariant_id
- return ns
diff --git a/genericparser/pub/utils/toscaparsers/pnfmodel.py b/genericparser/pub/utils/toscaparsers/pnfmodel.py
deleted file mode 100644
index 546861b..0000000
--- a/genericparser/pub/utils/toscaparsers/pnfmodel.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2018 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import functools
-import logging
-from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel
-logger = logging.getLogger(__name__)
-
-
-class PnfdInfoModel(BaseInfoModel):
-
- def __init__(self, path, params):
- super(PnfdInfoModel, self).__init__(path, params)
-
- def parseModel(self, tosca):
- self.metadata = self.buildMetadata(tosca)
- self.inputs = self.buildInputs(tosca)
- nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca),
- tosca.nodetemplates)
- self.basepath = self.get_base_path(tosca)
- self.pnf = {}
- self.get_substitution_mappings(tosca)
- self.get_all_cp(nodeTemplates)
-
- def get_substitution_mappings(self, tosca):
- pnf_substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None)
- if pnf_substitution_mappings:
- self.pnf['type'] = pnf_substitution_mappings['node_type']
- self.pnf['properties'] = pnf_substitution_mappings.get('properties', {})
-
- def get_all_cp(self, nodeTemplates):
- self.pnf['ExtPorts'] = []
- for node in nodeTemplates:
- if self.isPnfExtPort(node):
- cp = {}
- cp['id'] = node['name']
- cp['type'] = node['nodeType']
- cp['properties'] = node['properties']
- self.pnf['ExtPorts'].append(cp)
-
- def isPnfExtPort(self, node):
- return node['nodeType'].find('tosca.nodes.nfv.PnfExtPort') >= 0
diff --git a/genericparser/pub/utils/toscaparsers/sdmodel.py b/genericparser/pub/utils/toscaparsers/sdmodel.py
deleted file mode 100644
index 7635ab3..0000000
--- a/genericparser/pub/utils/toscaparsers/sdmodel.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-
-from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel
-from genericparser.pub.utils.toscaparsers.servicemodel import SdcServiceModel
-
-logger = logging.getLogger(__name__)
-
-
-class SdInfoModel(BaseInfoModel):
- def __init__(self, path, params):
- super(SdInfoModel, self).__init__(path, params)
-
- def parseModel(self, tosca):
- self.metadata = self.buildMetadata(tosca)
- self.inputs = self.build_inputs(tosca)
-
- sdcModle = SdcServiceModel(tosca)
- if sdcModle:
- self.service = sdcModle.ns
- if hasattr(tosca, 'nodetemplates'):
- self.basepath = sdcModle.basepath
- self.vnfs = sdcModle.vnfs
- self.pnfs = sdcModle.pnfs
- self.vls = sdcModle.vls
- self.graph = sdcModle.graph
-
- def build_inputs(self, tosca):
- """ Get all the inputs for complex type"""
- result_inputs = {}
-
- if not tosca.inputs:
- return {}
-
- for input in tosca.inputs:
- type = input.schema.type
- if type.__eq__('list') or type.__eq__('map'):
- complex_input = []
- entry_schema = self.get_entry_schema(input.schema.schema['entry_schema'])
- self.get_child_input_repeat(complex_input, entry_schema, input)
- result_inputs[input.schema.name] = complex_input
-
- else:
- simple_input = {
- "type": input.schema.type,
- "description": input.schema.description,
- "required": input.schema.required,
- }
- result_inputs[input.schema.name] = simple_input
- return result_inputs
-
- def get_child_input_repeat(self, complex_input, entry_schema, input):
- custom_defs = input.custom_defs
- properties = custom_defs[entry_schema]['properties']
- for key, value in properties.items():
- if value['type'].__eq__('list'):
- child_complex_input = []
- child_entry_schema = self.get_entry_schema(value['entry_schema'])
- self.get_child_input_repeat(child_complex_input, child_entry_schema, input)
- complex_input.append({key: child_complex_input})
- else:
- if 'description' in list(value.keys()):
- simple_input = {
- key: "",
- "type": value['type'],
- "required": value['required'],
- "description": value['description'],
- }
- else:
- simple_input = {
- key: "",
- "type": value['type'],
- "required": value['required'],
- }
- complex_input.append(simple_input)
-
- def get_entry_schema(self, entry_schema):
- if isinstance(entry_schema, dict):
- if 'type' in list(entry_schema.keys()):
- entry_schema = entry_schema['type']
- return entry_schema
diff --git a/genericparser/pub/utils/toscaparsers/servicemodel.py b/genericparser/pub/utils/toscaparsers/servicemodel.py
deleted file mode 100644
index 6321e04..0000000
--- a/genericparser/pub/utils/toscaparsers/servicemodel.py
+++ /dev/null
@@ -1,188 +0,0 @@
-# Copyright 2018 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import functools
-import logging
-from genericparser.pub.utils.toscaparsers.const import NS_METADATA_SECTIONS, PNF_METADATA_SECTIONS, VNF_SECTIONS, PNF_SECTIONS, VL_SECTIONS
-from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel
-
-logger = logging.getLogger(__name__)
-
-SDC_SERVICE_SECTIONS = (SERVICE_TYPE, SRV_DESCRIPTION) = (
- 'org.openecomp.resource.abstract.nodes.service', 'description')
-
-SDC_SERVICE_METADATA_SECTIONS = (SRV_UUID, SRV_INVARIANTUUID, SRV_NAME) = (
- 'UUID', 'invariantUUID', 'name')
-
-SDC_VL = (VL_TYPE) = ('tosca.nodes.nfv.ext.zte.VL')
-SDC_VL_SECTIONS = (VL_ID, VL_METADATA, VL_PROPERTIES, VL_DESCRIPTION) = \
- ("name", "metadata", "properties", "description")
-
-SDC_VF = (VF_TYPE, VF_UUID) = \
- ('org.openecomp.resource.abstract.nodes.VF', 'UUID')
-SDC_VF_SECTIONS = (VF_ID, VF_METADATA, VF_PROPERTIES, VF_DESCRIPTION) = \
- ("name", "metadata", "properties", "description")
-
-SDC_PNF = (PNF_TYPE) = \
- ('org.openecomp.resource.abstract.nodes.PNF')
-SDC_PNF_METADATA_SECTIONS = (SDC_PNF_UUID, SDC_PNF_INVARIANTUUID, SDC_PNF_NAME, SDC_PNF_METADATA_DESCRIPTION, SDC_PNF_VERSION) = \
- ("UUID", "invariantUUID", "name", "description", "version")
-SDC_PNF_SECTIONS = (SDC_PNF_ID, SDC_PNF_METADATA, SDC_PNF_PROPERTIES, SDC_PNF_DESCRIPTION) = \
- ("name", "metadata", "properties", "description")
-
-SERVICE_RELATIONSHIPS = [["tosca.relationships.network.LinksTo", "tosca.relationships.nfv.VirtualLinksTo", "tosca.capabilities.nfv.VirtualLinkable", "tosca.relationships.DependsOn"], []]
-
-
-class SdcServiceModel(BaseInfoModel):
-
- def __init__(self, tosca):
- super(SdcServiceModel, self).__init__(tosca=tosca)
-
- def parseModel(self, tosca):
- self.metadata = self._buildServiceMetadata(tosca)
- self.ns = self._build_ns(tosca)
- self.inputs = self.buildInputs(tosca)
- if hasattr(tosca, 'nodetemplates'):
- nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
- types = tosca.topology_template.custom_defs
- self.basepath = self.get_base_path(tosca)
- self.vnfs = self._get_all_vnf(nodeTemplates, types)
- self.pnfs = self._get_all_pnf(nodeTemplates, types)
- self.vls = self._get_all_vl(nodeTemplates, types)
- self.graph = self.get_deploy_graph(tosca, SERVICE_RELATIONSHIPS)
-
- def _buildServiceMetadata(self, tosca):
- """ SDC service Meta Format
- invariantUUID: e2618ee1 - a29a - 44c4 - a52a - b718fe1269f4
- UUID: 2362d14a - 115f - 4a2b - b449 - e2f93c0b7c89
- name: demoVLB
- description: catalogservicedescription
- type: Service
- category: NetworkL1 - 3
- serviceType: ''
- serviceRole: ''
- serviceEcompNaming: true
- ecompGeneratedNaming: true
- namingPolicy: ''
- """
- metadata_temp = self.buildMetadata(tosca)
- metadata = {}
- return self.setTargetValues(metadata, NS_METADATA_SECTIONS, metadata_temp, SDC_SERVICE_METADATA_SECTIONS)
-
- def _get_all_vnf(self, nodeTemplates, node_types):
- """ SDC Resource Metadata
- invariantUUID: 9ed46ddc-8eb7-4cb0-a1b6-04136c921af4
- UUID: b56ba35d-45fb-41e3-b6b8-b4f66917baa1
- customizationUUID: af0a6e64-967b-476b-87bc-959dcf59c305
- version: '1.0'
- name: b7d2fceb-dd11-43cd-a3fa
- description: vendor software product
- type: VF
- category: Generic
- subcategory: Abstract
- resourceVendor: b9d9f9f7-7994-4f0d-8104
- resourceVendorRelease: '1.0'
- resourceVendorModelNumber: ''
- """
- vnfs = []
- for node in nodeTemplates:
- if self.isNodeTypeX(node, node_types, VF_TYPE):
- vnf = {}
- self.setTargetValues(vnf, VNF_SECTIONS, node, SDC_VF_SECTIONS)
- if not vnf['properties'].get('id', None) and node['metadata']:
- vnf['properties']['id'] = node['metadata'].get('UUID', None)
- vnf['properties']['vnfm_info'] = vnf['properties'].get('nf_type', None)
- vnf['dependencies'] = self._get_networks(node, node_types)
- vnf['networks'] = self._get_networks(node, node_types)
- vnfs.append(vnf)
- return vnfs
-
- def _get_all_pnf(self, nodeTemplates, node_types):
- pnfs = []
- for node in nodeTemplates:
- if self.isNodeTypeX(node, node_types, PNF_TYPE):
- pnf = {}
- self.setTargetValues(pnf, PNF_SECTIONS, node, SDC_PNF_SECTIONS)
- self.setTargetValues(pnf['properties'], PNF_METADATA_SECTIONS, node['metadata'], SDC_PNF_METADATA_SECTIONS)
- pnf['networks'] = self._get_networks(node, node_types)
- pnfs.append(pnf)
- return pnfs
-
- def _get_all_vl(self, nodeTemplates, node_types):
- vls = []
- for node in nodeTemplates:
- if self.isNodeTypeX(node, node_types, VL_TYPE):
- vl = {}
- self.setTargetValues(vl, VL_SECTIONS, node, SDC_VL_SECTIONS)
- vl_profile = {}
- if 'segmentation_id' in vl['properties']:
- vl_profile['segmentationId'] = vl['properties'].get('segmentation_id')
- if 'network_name' in vl['properties']:
- vl_profile['networkName'] = vl['properties'].get('network_name')
- if 'cidr' in vl['properties']:
- vl_profile['cidr'] = vl['properties'].get('cidr')
- if 'network_name' in vl['properties']:
- vl_profile['networkName'] = vl['properties'].get('network_name')
- if 'start_ip' in vl['properties']:
- vl_profile['startIp'] = vl['properties'].get('start_ip', '')
- if 'end_ip' in vl['properties']:
- vl_profile['endIp'] = vl['properties'].get('end_ip', '')
- if 'gateway_ip' in vl['properties']:
- vl_profile['gatewayIp'] = vl['properties'].get('gateway_ip', '')
- if 'physical_network' in vl['properties']:
- vl_profile['physicalNetwork'] = vl['properties'].get('physical_network', '')
- if 'network_type' in vl['properties']:
- vl_profile['networkType'] = vl['properties'].get('network_type', '')
- if 'dhcp_enabled' in vl['properties']:
- vl_profile['dhcpEnabled'] = vl['properties'].get('dhcp_enabled', '')
- if 'vlan_transparent' in vl['properties']:
- vl_profile['vlanTransparent'] = vl['properties'].get('vlan_transparent', '')
- if 'mtu' in vl['properties']:
- vl_profile['mtu'] = vl['properties'].get('mtu', '')
- if 'ip_version' in vl['properties']:
- vl_profile['ip_version'] = vl['properties'].get('ip_version', '')
- if 'dns_nameservers' in vl['properties']:
- vl_profile['dns_nameservers'] = vl['properties'].get('dns_nameservers', [])
- if 'host_routes' in vl['properties']:
- vl_profile['host_routes'] = vl['properties'].get('host_routes', [])
- if 'network_id' in vl['properties']:
- vl_profile['network_id'] = vl['properties'].get('network_id', '')
- vl['properties']['vl_profile'] = vl_profile
- vls.append(vl)
- return vls
-
- def _get_networks(self, node, node_types):
- rets = []
- if 'requirements' in node and self.isNodeTypeX(node, node_types, VF_TYPE):
- for item in node['requirements']:
- for key, value in list(item.items()):
- rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
- return rets
-
- def _build_ns(self, tosca):
- ns = self.get_substitution_mappings(tosca)
- properties = ns.get("properties", {})
- metadata = ns.get("metadata", {})
- if properties.get("descriptor_id", "") == "":
- descriptor_id = metadata.get(SRV_UUID, "")
- properties["descriptor_id"] = descriptor_id
- properties["verison"] = ""
- properties["designer"] = ""
- if properties.get("name", "") == "":
- template_name = metadata.get(SRV_NAME, "")
- properties["name"] = template_name
- if properties.get("invariant_id", "") == "":
- nsd_invariant_id = metadata.get(SRV_INVARIANTUUID, "")
- properties["invariant_id"] = nsd_invariant_id
- return ns
diff --git a/genericparser/pub/utils/toscaparsers/testdata/ns/ran.csar b/genericparser/pub/utils/toscaparsers/testdata/ns/ran.csar
deleted file mode 100644
index 9ea868c..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/ns/ran.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csar b/genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csar
deleted file mode 100644
index 0aeed58..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csar b/genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csar
deleted file mode 100644
index 45168a9..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csar
deleted file mode 100644
index 921eafd..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csar
deleted file mode 100644
index 5c9fbcf..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csar
deleted file mode 100644
index b11a6ef..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csar
deleted file mode 100644
index 730ea8d..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csar
deleted file mode 100644
index b0f37a7..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csar
deleted file mode 100644
index ca652bf..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csar
deleted file mode 100644
index c91c034..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csar
deleted file mode 100644
index 5011563..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csar
deleted file mode 100644
index 0f99199..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csar
deleted file mode 100644
index 3d2dbf7..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csar
deleted file mode 100644
index 79e0d20..0000000
--- a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csar
+++ /dev/null
Binary files differ
diff --git a/genericparser/pub/utils/toscaparsers/tests.py b/genericparser/pub/utils/toscaparsers/tests.py
deleted file mode 100644
index e28f712..0000000
--- a/genericparser/pub/utils/toscaparsers/tests.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# Copyright 2018 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import json
-import os
-import logging
-import tempfile
-import shutil
-
-from django.test import TestCase
-
-from genericparser.pub.utils.toscaparsers import parse_vnfd, parse_pnfd, parse_nsd
-from genericparser.pub.utils.toscaparsers.graph import Graph
-
-logger = logging.getLogger(__name__)
-
-
-class TestToscaparser(TestCase):
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def test_vnfd_parse(self):
- self.remove_temp_dir()
- input_parameters = [{"value": "222222", "key": "sdncontroller"}]
- # vcpe = ["vgw", "infra", "vbng", "vbrgemu", "vgmux"]
- vcpe_part = 'vgw'
- sriov_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/vnf/vcpesriov"
- csar_file = ("%s/%s.csar" % (sriov_path, vcpe_part))
- logger.debug("csar_file:%s", csar_file)
- vnfd_json = parse_vnfd(csar_file, input_parameters)
- metadata = json.loads(vnfd_json).get("metadata")
- logger.debug("sriov metadata:%s", metadata)
- self.assertEqual(("vCPE_%s" % vcpe_part), metadata.get("template_name", ""))
- if vcpe_part == "infra":
- self.assertEqual("b1bb0ce7-1111-4fa7-95ed-4840d70a1177",
- json.loads(vnfd_json)["vnf"]["properties"]["descriptor_id"])
-
- dpdk_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/vnf/vcpedpdk"
- csar_file = ("%s/%s.csar" % (dpdk_path, vcpe_part))
- logger.debug("csar_file:%s", csar_file)
- vnfd_json = parse_vnfd(csar_file, input_parameters)
- metadata = json.loads(vnfd_json).get("metadata")
- logger.debug("dpdk metadata:%s", metadata)
- self.assertEqual(("vCPE_%s" % vcpe_part), metadata.get("template_name", ""))
-
- def test_pnfd_parse(self):
- self.remove_temp_dir()
- csar_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/pnf/ran-du.csar"
- pnfd_json = parse_pnfd(csar_path)
- pnfd_dict = json.loads(pnfd_json)
- metadata = pnfd_dict.get("metadata")
- self.assertEqual("RAN_DU", metadata.get("template_name", ""))
- descriptor_id = pnfd_dict["pnf"]["properties"]["descriptor_id"]
- self.assertEqual(1, descriptor_id)
-
- def test_nsd_parse(self):
- self.remove_temp_dir()
- # ran_csar = os.path.dirname(os.path.abspath(__file__)) + "/testdata/ns/ran.csar"
- # nsd_json = parse_nsd(ran_csar, [])
- # logger.debug("NS ran json: %s" % nsd_json)
- # metadata = json.loads(nsd_json).get("metadata")
- # self.assertEqual("RAN-NS", metadata.get("nsd_name", ""))
-
- def test_service_descriptor_parse(self):
- self.remove_temp_dir()
- service_test_csar = os.path.dirname(os.path.abspath(__file__)) + "/testdata/ns/service-vIMS.csar"
- test_json = parse_nsd(service_test_csar, [])
- logger.debug("service-vIMS json: %s" % test_json)
- metadata = json.loads(test_json).get("metadata")
- self.assertEqual("vIMS_v2", metadata.get("nsd_name", ""))
-
- def remove_temp_dir(self):
- tempdir = tempfile.gettempdir()
- for dir in os.listdir(tempdir):
- if dir.startswith("tmp"):
- path = tempfile.tempdir + "/" + dir
- if (not os.path.isfile(path)) and os.path.exists(path):
- shutil.rmtree(tempfile.tempdir + "/" + dir)
-
- def test_graph(self):
- data = {
- "cucp": [],
- "du": [],
- "vl_flat_net": ["cucp", "cuup"],
- "vl_ext_net": ["cucp", "cuup"],
- "cuup": []
- }
- graph = Graph(data)
- self.assertEqual(['vl_ext_net', 'vl_flat_net'].sort(), graph.get_pre_nodes("cucp").sort())
diff --git a/genericparser/pub/utils/toscaparsers/vnfdmodel.py b/genericparser/pub/utils/toscaparsers/vnfdmodel.py
deleted file mode 100644
index 2e48b4d..0000000
--- a/genericparser/pub/utils/toscaparsers/vnfdmodel.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import functools
-import logging
-from genericparser.pub.config.config import VNFD_SCHEMA_VERSION_DEFAULT
-from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel
-from genericparser.pub.utils.toscaparsers.vnfdparser import CreateVnfdSOLParser
-
-
-logger = logging.getLogger(__name__)
-
-NFV_VNF_RELATIONSHIPS = [["tosca.relationships.nfv.VirtualLinksTo", "tosca.relationships.nfv.VduAttachesTo", "tosca.relationships.nfv.AttachesTo", "tosca.relationships.nfv.Vdu.AttachedTo", "tosca.relationships.DependsOn"],
- ["tosca.nodes.relationships.VirtualBindsTo", "tosca.relationships.nfv.VirtualBindsTo"]]
-
-
-class EtsiVnfdInfoModel(BaseInfoModel):
-
- def __init__(self, path, params):
- self.vnf = {}
- super(EtsiVnfdInfoModel, self).__init__(path, params)
-
- def parseModel(self, tosca):
- self.metadata = self.buildMetadata(tosca)
- self.inputs = self.buildInputs(tosca)
- nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
- self.basepath = self.get_base_path(tosca)
- node_types = tosca.topology_template.custom_defs
- sol_version = self.metadata.get("VNFD_SCHEMA_VERSION", VNFD_SCHEMA_VERSION_DEFAULT) if isinstance(self.metadata, dict) else VNFD_SCHEMA_VERSION_DEFAULT
- vnfd_sol_parser = CreateVnfdSOLParser(sol_version, self)
- self.vnf = vnfd_sol_parser.build_vnf(tosca)
- self.volume_storages = vnfd_sol_parser.get_all_volume_storage(nodeTemplates, node_types)
- self.vdus = vnfd_sol_parser.get_all_vdu(nodeTemplates, node_types)
- self.vls = vnfd_sol_parser.get_all_vl(nodeTemplates, node_types)
- self.cps = vnfd_sol_parser.get_all_cp(nodeTemplates, node_types)
- self.vnf_exposed = vnfd_sol_parser.get_all_endpoint_exposed()
- self.graph = self.get_deploy_graph(tosca, NFV_VNF_RELATIONSHIPS)
diff --git a/genericparser/pub/utils/toscaparsers/vnfdparser/__init__.py b/genericparser/pub/utils/toscaparsers/vnfdparser/__init__.py
deleted file mode 100644
index 179fb4c..0000000
--- a/genericparser/pub/utils/toscaparsers/vnfdparser/__init__.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2019 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from genericparser.pub.utils.toscaparsers.vnfdparser.vnfd_sol_base import VnfdSOLBase
-from genericparser.pub.utils.toscaparsers.vnfdparser.vnfd_sol_251 import VnfdSOL251
-
-
-def CreateVnfdSOLParser(sol_version, etsi_vnfd_model):
- switcher = {
- "base": VnfdSOLBase(etsi_vnfd_model),
- "2.5.1+1": VnfdSOL251(etsi_vnfd_model)
- }
- return switcher.get(sol_version, lambda: "Invalid Version")
diff --git a/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_251.py b/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_251.py
deleted file mode 100644
index e71623a..0000000
--- a/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_251.py
+++ /dev/null
@@ -1,264 +0,0 @@
-# Copyright 2019 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import functools
-import logging
-import os
-import base64
-
-
-logger = logging.getLogger(__name__)
-
-SECTIONS = (VDU_COMPUTE_TYPE, VNF_VL_TYPE, VDU_CP_TYPE, VDU_STORAGE_TYPE) = \
- ('tosca.nodes.nfv.Vdu.Compute', 'tosca.nodes.nfv.VnfVirtualLink', 'tosca.nodes.nfv.VduCp', 'tosca.nodes.nfv.Vdu.VirtualStorage')
-
-
-class VnfdSOL251():
-
- def __init__(self, model):
- self.model = model
-
- def build_vnf(self, tosca):
- vnf = self.model.get_substitution_mappings(tosca)
- properties = vnf.get("properties", {})
- metadata = vnf.get("metadata", {})
-
- for key, value in list(properties.items()):
- if isinstance(value, dict):
- if value["type"] == "string":
- properties[key] = value.get("default", "")
- elif value["type"] == "list":
- properties[key] = value.get("default", {})
- else:
- properties[key] = value.get("default", "")
- ptype = "descriptor_id"
- meta_types = ["descriptor_id", "id", "UUID"]
- self._get_property(properties, metadata, ptype, meta_types)
-
- ptype = "descriptor_version"
- meta_types = ["template_version", "version"]
- self._get_property(properties, metadata, ptype, meta_types)
-
- ptype = "provider"
- meta_types = ["template_author", "provider"]
- self._get_property(properties, metadata, ptype, meta_types)
-
- ptype = "template_name"
- meta_types = ["template_name"]
- self._get_property(properties, metadata, ptype, meta_types)
-
- ptype = "software_version"
- meta_types = ["software_version"]
- self._get_property(properties, metadata, ptype, meta_types)
-
- ptype = "product_name"
- meta_types = ["product_name"]
- self._get_property(properties, metadata, ptype, meta_types)
-
- ptype = "flavour_description"
- meta_types = ["flavour_description"]
- self._get_property(properties, metadata, ptype, meta_types)
-
- ptype = "vnfm_info"
- meta_types = ["vnfm_info"]
- self._get_property(properties, metadata, ptype, meta_types)
-
- ptype = "flavour_id"
- meta_types = ["flavour_id"]
- self._get_property(properties, metadata, ptype, meta_types)
-
- logger.debug("vnf:%s", vnf)
-
- return vnf
-
- def get_all_vl(self, nodeTemplates, node_types):
- vls = []
- for node in nodeTemplates:
- if self.model.isNodeTypeX(node, node_types, VNF_VL_TYPE):
- vl = dict()
- vl['vl_id'] = node['name']
- vl['description'] = node['description']
- vl['properties'] = node['properties']
- vls.append(vl)
- return vls
-
- def get_all_cp(self, nodeTemplates, node_types):
- cps = []
- for node in nodeTemplates:
- if self.model.isNodeTypeX(node, node_types, VDU_CP_TYPE):
- cp = {}
- cp['cp_id'] = node['name']
- cp['cpd_id'] = node['name']
- cp['description'] = node['description']
- cp['properties'] = node['properties']
- cp['vl_id'] = self._get_node_vl_id(node)
- cp['vdu_id'] = self._get_node_vdu_id(node)
- vls = self._buil_cp_vls(node)
- if len(vls) > 1:
- cp['vls'] = vls
- cps.append(cp)
- return cps
-
- def get_all_volume_storage(self, nodeTemplates, node_types):
- rets = []
- for node in nodeTemplates:
- if self.model.isNodeTypeX(node, node_types, VDU_STORAGE_TYPE):
- ret = {}
- ret['volume_storage_id'] = node['name']
- if 'description' in node:
- ret['description'] = node['description']
- ret['properties'] = node['properties']
- rets.append(ret)
- return rets
-
- def get_all_vdu(self, nodeTemplates, node_types):
- rets = []
- inject_files = []
- for node in nodeTemplates:
- logger.debug("nodeTemplates :%s", node)
- if self.model.isNodeTypeX(node, node_types, VDU_COMPUTE_TYPE):
- ret = {}
- ret['vdu_id'] = node['name']
- ret['type'] = node['nodeType']
- if 'description' in node:
- ret['description'] = node['description']
- ret['properties'] = node['properties']
- if 'inject_files' in node['properties']:
- inject_files = node['properties']['inject_files']
- if inject_files is not None:
- if isinstance(inject_files, list):
- for inject_file in inject_files:
- source_path = os.path.join(self.model.basepath, inject_file['source_path'])
- with open(source_path, "rb") as f:
- source_data = f.read()
- source_data_base64 = base64.b64encode(source_data)
- inject_file["source_data_base64"] = source_data_base64.decode()
- if isinstance(inject_files, dict):
- source_path = os.path.join(self.model.basepath, inject_files['source_path'])
- with open(source_path, "rb") as f:
- source_data = f.read()
- source_data_base64 = base64.b64encode(source_data)
- inject_files["source_data_base64"] = source_data_base64.decode()
- virtual_storages = self.model.getRequirementByName(node, 'virtual_storage')
- ret['virtual_storages'] = list(map(functools.partial(self._trans_virtual_storage), virtual_storages))
- ret['dependencies'] = [self.model.get_requirement_node_name(x) for x in self.model.getNodeDependencys(node)]
- virtual_compute = self.model.getCapabilityByName(node, 'virtual_compute')
- if virtual_compute is not None and 'properties' in virtual_compute:
- ret['virtual_compute'] = virtual_compute['properties']
- ret['vls'] = self._get_linked_vl_ids(node, nodeTemplates)
- ret['cps'] = self._get_virtal_binding_cp_ids(node, nodeTemplates)
- ret['artifacts'] = self.model.build_artifacts(node)
- rets.append(ret)
- logger.debug("rets:%s", rets)
- return rets
-
- def get_all_endpoint_exposed(self):
- if self.model.vnf:
- external_cps = self._get_external_cps(self.model.vnf.get('requirements', None))
- forward_cps = self._get_forward_cps(self.model.vnf.get('capabilities', None))
- return {"external_cps": external_cps, "forward_cps": forward_cps}
- return {}
-
- def _get_property(self, properties, metadata, ptype, meta_types):
- if ptype not in properties or properties[ptype] == "":
- for mtype in meta_types:
- data = metadata.get(mtype, "")
- if data != "":
- properties[ptype] = data
-
- def _trans_virtual_storage(self, virtual_storage):
- if isinstance(virtual_storage, str):
- return {"virtual_storage_id": virtual_storage}
- else:
- ret = {}
- ret['virtual_storage_id'] = self.model.get_requirement_node_name(virtual_storage)
- return ret
-
- def _get_linked_vl_ids(self, node, node_templates):
- vl_ids = []
- cps = self._get_virtal_binding_cps(node, node_templates)
- for cp in cps:
- vl_reqs = self.model.getRequirementByName(cp, 'virtual_link')
- for vl_req in vl_reqs:
- vl_ids.append(self.model.get_requirement_node_name(vl_req))
- return vl_ids
-
- def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
- return [x['name'] for x in self._get_virtal_binding_cps(node, nodeTemplates)]
-
- def _get_virtal_binding_cps(self, node, nodeTemplates):
- cps = []
- for tmpnode in nodeTemplates:
- if 'requirements' in tmpnode:
- for item in tmpnode['requirements']:
- for key, value in list(item.items()):
- if key.upper().startswith('VIRTUAL_BINDING'):
- req_node_name = self.model.get_requirement_node_name(value)
- if req_node_name is not None and req_node_name == node['name']:
- cps.append(tmpnode)
- return cps
-
- def _get_node_vdu_id(self, node):
- vdu_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_binding')]
- if len(vdu_ids) > 0:
- return vdu_ids[0]
- return ""
-
- def _get_node_vl_id(self, node):
- vl_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
- if len(vl_ids) > 0:
- return vl_ids[0]
- return ""
-
- def _buil_cp_vls(self, node):
- return [self._build_cp_vl(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
-
- def _build_cp_vl(self, req):
- cp_vl = {}
- cp_vl['vl_id'] = self.model.get_prop_from_obj(req, 'node')
- relationship = self.model.get_prop_from_obj(req, 'relationship')
- if relationship is not None:
- properties = self.model.get_prop_from_obj(relationship, 'properties')
- if properties is not None and isinstance(properties, dict):
- for key, value in list(properties.items()):
- cp_vl[key] = value
- return cp_vl
-
- def _get_external_cps(self, vnf_requirements):
- external_cps = []
- if vnf_requirements:
- if isinstance(vnf_requirements, dict):
- for key, value in list(vnf_requirements.items()):
- if isinstance(value, list) and len(value) > 0:
- external_cps.append({"key_name": key, "cpd_id": value[0]})
- else:
- external_cps.append({"key_name": key, "cpd_id": value})
- elif isinstance(vnf_requirements, list):
- for vnf_requirement in vnf_requirements:
- for key, value in list(vnf_requirement.items()):
- if isinstance(value, list) and len(value) > 0:
- external_cps.append({"key_name": key, "cpd_id": value[0]})
- else:
- external_cps.append({"key_name": key, "cpd_id": value})
- return external_cps
-
- def _get_forward_cps(self, vnf_capabilities):
- forward_cps = []
- if vnf_capabilities:
- for key, value in list(vnf_capabilities.items()):
- if isinstance(value, list) and len(value) > 0:
- forward_cps.append({"key_name": key, "cpd_id": value[0]})
- else:
- forward_cps.append({"key_name": key, "cpd_id": value})
- return forward_cps
diff --git a/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_base.py b/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_base.py
deleted file mode 100644
index 7b3a1a0..0000000
--- a/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_base.py
+++ /dev/null
@@ -1,236 +0,0 @@
-# Copyright 2019 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import functools
-import logging
-import os
-import base64
-
-
-logger = logging.getLogger(__name__)
-
-SECTIONS = (VDU_COMPUTE_TYPE, VNF_VL_TYPE, VDU_CP_TYPE, VDU_STORAGE_TYPE) = \
- ('tosca.nodes.nfv.Vdu.Compute', 'tosca.nodes.nfv.VnfVirtualLink', 'tosca.nodes.nfv.VduCp', 'tosca.nodes.nfv.Vdu.VirtualStorage')
-
-
-class VnfdSOLBase():
-
- def __init__(self, model):
- self.model = model
-
- def build_vnf(self, tosca):
- vnf = self.model.get_substitution_mappings(tosca)
- properties = vnf.get("properties", {})
- metadata = vnf.get("metadata", {})
- if properties.get("descriptor_id", "") == "":
- descriptor_id = metadata.get("descriptor_id", "")
- if descriptor_id == "":
- descriptor_id = metadata.get("id", "")
- if descriptor_id == "":
- descriptor_id = metadata.get("UUID", "")
- properties["descriptor_id"] = descriptor_id
-
- if properties.get("descriptor_version", "") == "":
- version = metadata.get("template_version", "")
- if version == "":
- version = metadata.get("version", "")
- properties["descriptor_version"] = version
-
- if properties.get("provider", "") == "":
- provider = metadata.get("template_author", "")
- if provider == "":
- provider = metadata.get("provider", "")
- properties["provider"] = provider
-
- if properties.get("template_name", "") == "":
- template_name = metadata.get("template_name", "")
- if template_name == "":
- template_name = metadata.get("template_name", "")
- properties["template_name"] = template_name
- logger.debug("vnf:%s", vnf)
- return vnf
-
- def get_all_vl(self, nodeTemplates, node_types):
- vls = []
- for node in nodeTemplates:
- if self.model.isNodeTypeX(node, node_types, VNF_VL_TYPE):
- vl = dict()
- vl['vl_id'] = node['name']
- vl['description'] = node['description']
- vl['properties'] = node['properties']
- vls.append(vl)
- return vls
-
- def get_all_cp(self, nodeTemplates, node_types):
- cps = []
- for node in nodeTemplates:
- if self.model.isNodeTypeX(node, node_types, VDU_CP_TYPE):
- cp = {}
- cp['cp_id'] = node['name']
- cp['cpd_id'] = node['name']
- cp['description'] = node['description']
- cp['properties'] = node['properties']
- cp['vl_id'] = self._get_node_vl_id(node)
- cp['vdu_id'] = self._get_node_vdu_id(node)
- vls = self._buil_cp_vls(node)
- if len(vls) > 1:
- cp['vls'] = vls
- cps.append(cp)
- return cps
-
- def get_all_volume_storage(self, nodeTemplates, node_types):
- rets = []
- for node in nodeTemplates:
- if self.model.isNodeTypeX(node, node_types, VDU_STORAGE_TYPE):
- ret = {}
- ret['volume_storage_id'] = node['name']
- if 'description' in node:
- ret['description'] = node['description']
- ret['properties'] = node['properties']
- rets.append(ret)
- return rets
-
- def get_all_vdu(self, nodeTemplates, node_types):
- rets = []
- inject_files = []
- for node in nodeTemplates:
- logger.debug("nodeTemplates :%s", node)
- if self.model.isNodeTypeX(node, node_types, VDU_COMPUTE_TYPE):
- ret = {}
- ret['vdu_id'] = node['name']
- ret['type'] = node['nodeType']
- if 'description' in node:
- ret['description'] = node['description']
- ret['properties'] = node['properties']
- if 'inject_files' in node['properties']:
- inject_files = node['properties']['inject_files']
- if inject_files is not None:
- if isinstance(inject_files, list):
- for inject_file in inject_files:
- source_path = os.path.join(self.model.basepath, inject_file['source_path'])
- with open(source_path, "rb") as f:
- source_data = f.read()
- source_data_base64 = base64.b64encode(source_data)
- inject_file["source_data_base64"] = source_data_base64.decode()
- if isinstance(inject_files, dict):
- source_path = os.path.join(self.model.basepath, inject_files['source_path'])
- with open(source_path, "rb") as f:
- source_data = f.read()
- source_data_base64 = base64.b64encode(source_data)
- inject_files["source_data_base64"] = source_data_base64.decode()
- virtual_storages = self.model.getRequirementByName(node, 'virtual_storage')
- ret['virtual_storages'] = list(map(functools.partial(self._trans_virtual_storage), virtual_storages))
- ret['dependencies'] = [self.model.get_requirement_node_name(x) for x in self.model.getNodeDependencys(node)]
- virtual_compute = self.model.getCapabilityByName(node, 'virtual_compute')
- if virtual_compute is not None and 'properties' in virtual_compute:
- ret['virtual_compute'] = virtual_compute['properties']
- ret['vls'] = self._get_linked_vl_ids(node, nodeTemplates)
- ret['cps'] = self._get_virtal_binding_cp_ids(node, nodeTemplates)
- ret['artifacts'] = self.model.build_artifacts(node)
- rets.append(ret)
- logger.debug("rets:%s", rets)
- return rets
-
- def get_all_endpoint_exposed(self):
- if self.model.vnf:
- external_cps = self._get_external_cps(self.model.vnf.get('requirements', None))
- forward_cps = self._get_forward_cps(self.model.vnf.get('capabilities', None))
- return {"external_cps": external_cps, "forward_cps": forward_cps}
- return {}
-
- def _trans_virtual_storage(self, virtual_storage):
- if isinstance(virtual_storage, str):
- return {"virtual_storage_id": virtual_storage}
- else:
- ret = {}
- ret['virtual_storage_id'] = self.model.get_requirement_node_name(virtual_storage)
- return ret
-
- def _get_linked_vl_ids(self, node, node_templates):
- vl_ids = []
- cps = self._get_virtal_binding_cps(node, node_templates)
- for cp in cps:
- vl_reqs = self.model.getRequirementByName(cp, 'virtual_link')
- for vl_req in vl_reqs:
- vl_ids.append(self.model.get_requirement_node_name(vl_req))
- return vl_ids
-
- def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
- return [x['name'] for x in self._get_virtal_binding_cps(node, nodeTemplates)]
-
- def _get_virtal_binding_cps(self, node, nodeTemplates):
- cps = []
- for tmpnode in nodeTemplates:
- if 'requirements' in tmpnode:
- for item in tmpnode['requirements']:
- for key, value in list(item.items()):
- if key.upper().startswith('VIRTUAL_BINDING'):
- req_node_name = self.model.get_requirement_node_name(value)
- if req_node_name is not None and req_node_name == node['name']:
- cps.append(tmpnode)
- return cps
-
- def _get_node_vdu_id(self, node):
- vdu_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_binding')]
- if len(vdu_ids) > 0:
- return vdu_ids[0]
- return ""
-
- def _get_node_vl_id(self, node):
- vl_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
- if len(vl_ids) > 0:
- return vl_ids[0]
- return ""
-
- def _buil_cp_vls(self, node):
- return [self._build_cp_vl(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
-
- def _build_cp_vl(self, req):
- cp_vl = {}
- cp_vl['vl_id'] = self.model.get_prop_from_obj(req, 'node')
- relationship = self.model.get_prop_from_obj(req, 'relationship')
- if relationship is not None:
- properties = self.model.get_prop_from_obj(relationship, 'properties')
- if properties is not None and isinstance(properties, dict):
- for key, value in list(properties.items()):
- cp_vl[key] = value
- return cp_vl
-
- def _get_external_cps(self, vnf_requirements):
- external_cps = []
- if vnf_requirements:
- if isinstance(vnf_requirements, dict):
- for key, value in list(vnf_requirements.items()):
- if isinstance(value, list) and len(value) > 0:
- external_cps.append({"key_name": key, "cpd_id": value[0]})
- else:
- external_cps.append({"key_name": key, "cpd_id": value})
- elif isinstance(vnf_requirements, list):
- for vnf_requirement in vnf_requirements:
- for key, value in list(vnf_requirement.items()):
- if isinstance(value, list) and len(value) > 0:
- external_cps.append({"key_name": key, "cpd_id": value[0]})
- else:
- external_cps.append({"key_name": key, "cpd_id": value})
- return external_cps
-
- def _get_forward_cps(self, vnf_capabilities):
- forward_cps = []
- if vnf_capabilities:
- for key, value in list(vnf_capabilities.items()):
- if isinstance(value, list) and len(value) > 0:
- forward_cps.append({"key_name": key, "cpd_id": value[0]})
- else:
- forward_cps.append({"key_name": key, "cpd_id": value})
- return forward_cps
diff --git a/genericparser/pub/utils/values.py b/genericparser/pub/utils/values.py
deleted file mode 100644
index d02d544..0000000
--- a/genericparser/pub/utils/values.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2017 ZTE Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-def ignore_case_get(args, key, def_val=""):
- if not key:
- return def_val
- if key in args:
- return args[key]
- for old_key in args:
- if old_key.upper() == key.upper():
- return args[old_key]
- return def_val
-
-
-def remove_none_key(data, none_list=None):
- none_list = none_list if none_list else [None, '', 'NULL', 'None', [], {}]
- if isinstance(data, dict):
- data = dict([(k, remove_none_key(v, none_list)) for k, v in list(data.items()) if v not in none_list])
- if isinstance(data, list):
- data = [remove_none_key(s, none_list) for s in data if s not in none_list]
- return data