diff options
author | dyh <dengyuanhong@chinamobile.com> | 2019-04-19 13:57:46 +0800 |
---|---|---|
committer | dyh <dengyuanhong@chinamobile.com> | 2019-04-19 14:01:10 +0800 |
commit | affcd0828f0dd0941ba546aca5bce05e8ee1a598 (patch) | |
tree | a20b80dbc987dde4ee3e65f3b29e81b49c93cb4f /genericparser/pub | |
parent | 4735409872776f9675df733087dcb1b61b038ab0 (diff) |
genericparser seed code
Change-Id: Id15ac689c1d560619bf6c699fb0786e7381d3def
Issue-ID: MODELING-153
Signed-off-by: dyh <dengyuanhong@chinamobile.com>
Diffstat (limited to 'genericparser/pub')
48 files changed, 3308 insertions, 0 deletions
diff --git a/genericparser/pub/__init__.py b/genericparser/pub/__init__.py new file mode 100644 index 0000000..c7b6818 --- /dev/null +++ b/genericparser/pub/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/genericparser/pub/config/__init__.py b/genericparser/pub/config/__init__.py new file mode 100644 index 0000000..c7b6818 --- /dev/null +++ b/genericparser/pub/config/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/genericparser/pub/config/config.py b/genericparser/pub/config/config.py new file mode 100644 index 0000000..abecd79 --- /dev/null +++ b/genericparser/pub/config/config.py @@ -0,0 +1,83 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [MSB] +MSB_SERVICE_IP = '127.0.0.1' +MSB_SERVICE_PORT = '80' + +# [REDIS] +REDIS_HOST = '127.0.0.1' +REDIS_PORT = '6379' +REDIS_PASSWD = '' + +# [mysql] +DB_IP = "127.0.0.1" +DB_PORT = 3306 +DB_NAME = "genericpaser" +DB_USER = "genericpaser" +DB_PASSWD = "genericpaser" + +# [MDC] +SERVICE_NAME = "genericparser" +FORWARDED_FOR_FIELDS = ["HTTP_X_FORWARDED_FOR", "HTTP_X_FORWARDED_HOST", + "HTTP_X_FORWARDED_SERVER"] + +# [register] +REG_TO_MSB_WHEN_START = True +REG_TO_MSB_REG_URL = "/api/microservices/v1/services" +REG_TO_MSB_REG_PARAM = [{ + "serviceName": "genericparser", + "version": "v1", + "url": "/api/genericparser/v1", + "protocol": "REST", + "visualRange": "1", + "nodes": [{ + "ip": "127.0.0.1", + "port": "8806", + "ttl": 0 + }] +}, { + "serviceName": "nsd", + "version": "v1", + "url": "/api/nsd/v1", + "protocol": "REST", + "visualRange": "1", + "nodes": [{ + "ip": "127.0.0.1", + "port": "8806", + "ttl": 0 + }] +}, { + "serviceName": "vnfpkgm", + "version": "v1", + "url": "/api/vnfpkgm/v1", + "protocol": "REST", + "visualRange": "1", + "nodes": [{ + "ip": "127.0.0.1", + "port": "8806", + "ttl": 0 + }] +}] + +# genericparser path(values is defined in settings.py) +# CATALOG_ROOT_PATH = None +# CATALOG_URL_PATH = None +GENERICPARSER_ROOT_PATH = None +GENERICPARSER_URL_PATH = None + +# [sdc config] +SDC_BASE_URL = "http://msb-iag/api" +SDC_USER = "aai" +SDC_PASSWD = "Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" diff --git a/genericparser/pub/database/__init__.py b/genericparser/pub/database/__init__.py new file mode 100644 index 0000000..c7b6818 --- /dev/null +++ b/genericparser/pub/database/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/genericparser/pub/database/migrations/0001_initial.py b/genericparser/pub/database/migrations/0001_initial.py new file mode 100644 index 0000000..8446b6e --- /dev/null +++ b/genericparser/pub/database/migrations/0001_initial.py @@ -0,0 +1,229 @@ +# Copyright 2019 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Generated by Django 1.11.9 on 2019-04-16 03:53 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='JobModel', + fields=[ + ('jobid', models.CharField(db_column=b'JOBID', max_length=255, primary_key=True, serialize=False)), + ('jobtype', models.CharField(db_column=b'JOBTYPE', max_length=255)), + ('jobaction', models.CharField(db_column=b'JOBACTION', max_length=255)), + ('resid', models.CharField(db_column=b'RESID', max_length=255)), + ('status', models.IntegerField(blank=True, db_column=b'STATUS', null=True)), + ('starttime', models.CharField(blank=True, db_column=b'STARTTIME', max_length=255, null=True)), + ('endtime', models.CharField(blank=True, db_column=b'ENDTIME', max_length=255, null=True)), + ('progress', models.IntegerField(blank=True, db_column=b'PROGRESS', null=True)), + ('user', models.CharField(blank=True, db_column=b'USER', max_length=255, null=True)), + ('parentjobid', models.CharField(blank=True, db_column=b'PARENTJOBID', max_length=255, null=True)), + ('resname', models.CharField(blank=True, db_column=b'RESNAME', max_length=255, null=True)), + ], + options={ + 'db_table': 'CATALOG_JOB', + }, + ), + migrations.CreateModel( + name='JobStatusModel', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('indexid', models.IntegerField(db_column=b'INDEXID')), + ('jobid', models.CharField(db_column=b'JOBID', max_length=255)), + ('status', models.CharField(db_column=b'STATUS', max_length=255)), + ('progress', models.IntegerField(blank=True, db_column=b'PROGRESS', null=True)), + ('descp', models.TextField(db_column=b'DESCP', max_length=65535)), + ('errcode', models.CharField(blank=True, db_column=b'ERRCODE', max_length=255, null=True)), + ('addtime', models.CharField(blank=True, db_column=b'ADDTIME', max_length=255, null=True)), + ], + options={ + 'db_table': 'CATALOG_JOB_STATUS', + }, + ), + migrations.CreateModel( + name='NsdmSubscriptionModel', + fields=[ + ('subscriptionid', models.CharField(db_column=b'SUBSCRIPTIONID', max_length=255, primary_key=True, serialize=False)), + ('notificationTypes', models.TextField(db_column=b'NOTIFICATIONTYPES', null=True)), + ('auth_info', models.TextField(db_column=b'AUTHINFO', null=True)), + ('callback_uri', models.CharField(db_column=b'CALLBACKURI', max_length=255)), + ('nsdInfoId', models.TextField(db_column=b'NSDINFOID', null=True)), + ('nsdId', models.TextField(db_column=b'NSDID', null=True)), + ('nsdName', models.TextField(db_column=b'NSDNAME', null=True)), + ('nsdVersion', models.TextField(db_column=b'NSDVERSION', null=True)), + ('nsdDesigner', models.TextField(db_column=b'NSDDESIGNER', null=True)), + ('nsdInvariantId', models.TextField(db_column=b'NSDINVARIANTID', null=True)), + ('vnfPkgIds', models.TextField(db_column=b'VNFPKGIDS', null=True)), + ('pnfdInfoIds', models.TextField(db_column=b'PNFDINFOIDS', null=True)), + ('nestedNsdInfoIds', models.TextField(db_column=b'NESTEDNSDINFOIDS', null=True)), + ('nsdOnboardingState', models.TextField(db_column=b'NSDONBOARDINGSTATE', null=True)), + ('nsdOperationalState', models.TextField(db_column=b'NSDOPERATIONALSTATE', null=True)), + ('nsdUsageState', models.TextField(db_column=b'NSDUSAGESTATE', null=True)), + ('pnfdId', models.TextField(db_column=b'PNFDID', null=True)), + ('pnfdName', models.TextField(db_column=b'PNFDNAME', null=True)), + ('pnfdVersion', models.TextField(db_column=b'PNFDVERSION', null=True)), + ('pnfdProvider', models.TextField(db_column=b'PNFDPROVIDER', null=True)), + ('pnfdInvariantId', models.TextField(db_column=b'PNFDINVARIANTID', null=True)), + ('pnfdOnboardingState', models.TextField(db_column=b'PNFDONBOARDINGSTATE', null=True)), + ('pnfdUsageState', models.TextField(db_column=b'PNFDUSAGESTATE', null=True)), + ('links', models.TextField(db_column=b'LINKS')), + ], + options={ + 'db_table': 'CATALOG_NSDM_SUBSCRIPTION', + }, + ), + migrations.CreateModel( + name='NSPackageModel', + fields=[ + ('nsPackageId', models.CharField(db_column=b'NSPACKAGEID', max_length=50, primary_key=True, serialize=False)), + ('nsPackageUri', models.CharField(blank=True, db_column=b'NSPACKAGEURI', max_length=300, null=True)), + ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)), + ('sdcCsarId', models.CharField(blank=True, db_column=b'SDCCSARID', max_length=50, null=True)), + ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)), + ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)), + ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)), + ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)), + ('nsdId', models.CharField(blank=True, db_column=b'NSDID', max_length=50, null=True)), + ('invariantId', models.CharField(blank=True, db_column=b'INVARIANTID', max_length=50, null=True)), + ('nsdName', models.CharField(blank=True, db_column=b'NSDNAME', max_length=50, null=True)), + ('nsdDesginer', models.CharField(blank=True, db_column=b'NSDDESIGNER', max_length=50, null=True)), + ('nsdDescription', models.CharField(blank=True, db_column=b'NSDDESCRIPTION', max_length=100, null=True)), + ('nsdVersion', models.CharField(blank=True, db_column=b'NSDVERSION', max_length=20, null=True)), + ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)), + ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)), + ('nsdModel', models.TextField(blank=True, db_column=b'NSDMODEL', max_length=65535, null=True)), + ], + options={ + 'db_table': 'CATALOG_NSPACKAGE', + }, + ), + migrations.CreateModel( + name='PnfPackageModel', + fields=[ + ('pnfPackageId', models.CharField(db_column=b'PNFPACKAGEID', max_length=50, primary_key=True, serialize=False)), + ('pnfPackageUri', models.CharField(blank=True, db_column=b'PNFPACKAGEURI', max_length=300, null=True)), + ('sdcCSARUri', models.CharField(blank=True, db_column=b'SDCCSARURI', max_length=300, null=True)), + ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)), + ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)), + ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)), + ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)), + ('pnfdId', models.CharField(blank=True, db_column=b'PNFDID', max_length=50, null=True)), + ('pnfVendor', models.CharField(blank=True, db_column=b'VENDOR', max_length=50, null=True)), + ('pnfdProductName', models.CharField(blank=True, db_column=b'PNFDPRODUCTNAME', max_length=50, null=True)), + ('pnfdVersion', models.CharField(blank=True, db_column=b'PNFDVERSION', max_length=20, null=True)), + ('pnfSoftwareVersion', models.CharField(blank=True, db_column=b'PNFSOFTWAREVERSION', max_length=20, null=True)), + ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)), + ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)), + ('pnfdModel', models.TextField(blank=True, db_column=b'PNFDMODEL', max_length=65535, null=True)), + ('pnfdName', models.TextField(blank=True, db_column=b'PNFDNAME', max_length=65535, null=True)), + ], + options={ + 'db_table': 'CATALOG_PNFPACKAGE', + }, + ), + migrations.CreateModel( + name='ServicePackageModel', + fields=[ + ('servicePackageId', models.CharField(db_column=b'SERVICEPACKAGEID', max_length=50, primary_key=True, serialize=False)), + ('servicePackageUri', models.CharField(blank=True, db_column=b'SERVICEPACKAGEURI', max_length=300, null=True)), + ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)), + ('sdcCsarId', models.CharField(blank=True, db_column=b'SDCCSARID', max_length=50, null=True)), + ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)), + ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)), + ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)), + ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)), + ('servicedId', models.CharField(blank=True, db_column=b'SERVICEDID', max_length=50, null=True)), + ('invariantId', models.CharField(blank=True, db_column=b'INVARIANTID', max_length=50, null=True)), + ('servicedName', models.CharField(blank=True, db_column=b'SERVICEDNAME', max_length=50, null=True)), + ('servicedDesigner', models.CharField(blank=True, db_column=b'SERVICEDDESIGNER', max_length=50, null=True)), + ('servicedDescription', models.CharField(blank=True, db_column=b'SERVICEDDESCRIPTION', max_length=100, null=True)), + ('servicedVersion', models.CharField(blank=True, db_column=b'SERVICEDVERSION', max_length=20, null=True)), + ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)), + ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)), + ('servicedModel', models.TextField(blank=True, db_column=b'SERVICEDMODEL', max_length=65535, null=True)), + ], + options={ + 'db_table': 'CATALOG_SERVICEPACKAGE', + }, + ), + migrations.CreateModel( + name='SoftwareImageModel', + fields=[ + ('imageid', models.CharField(db_column=b'IMAGEID', max_length=50, primary_key=True, serialize=False)), + ('containerFormat', models.CharField(db_column=b'CONTAINERFORMAT', max_length=20)), + ('diskFormat', models.CharField(db_column=b'DISKFORMAT', max_length=20)), + ('mindisk', models.CharField(db_column=b'MINDISK', max_length=20)), + ('minram', models.CharField(db_column=b'MINRAM', max_length=20)), + ('usermetadata', models.CharField(db_column=b'USAERMETADATA', max_length=1024)), + ('vnfPackageId', models.CharField(db_column=b'VNFPACKAGEID', max_length=50)), + ('filePath', models.CharField(db_column=b'FILEPATH', max_length=300)), + ('status', models.CharField(db_column=b'STATUS', max_length=10)), + ('vimid', models.CharField(db_column=b'VIMID', max_length=50)), + ], + options={ + 'db_table': 'CATALOG_SOFTWAREIMAGEMODEL', + }, + ), + migrations.CreateModel( + name='VnfPackageModel', + fields=[ + ('vnfPackageId', models.CharField(db_column=b'VNFPACKAGEID', max_length=50, primary_key=True, serialize=False)), + ('vnfPackageUri', models.CharField(blank=True, db_column=b'VNFPACKAGEURI', max_length=300, null=True)), + ('SdcCSARUri', models.CharField(blank=True, db_column=b'SDCCSARURI', max_length=300, null=True)), + ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)), + ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)), + ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)), + ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)), + ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)), + ('vnfdId', models.CharField(blank=True, db_column=b'VNFDID', max_length=50, null=True)), + ('vnfVendor', models.CharField(blank=True, db_column=b'VENDOR', max_length=50, null=True)), + ('vnfdProductName', models.CharField(blank=True, db_column=b'VNFDPRODUCTNAME', max_length=50, null=True)), + ('vnfdVersion', models.CharField(blank=True, db_column=b'VNFDVERSION', max_length=20, null=True)), + ('vnfSoftwareVersion', models.CharField(blank=True, db_column=b'VNFSOFTWAREVERSION', max_length=20, null=True)), + ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)), + ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)), + ('vnfdModel', models.TextField(blank=True, db_column=b'VNFDMODEL', max_length=65535, null=True)), + ], + options={ + 'db_table': 'CATALOG_VNFPACKAGE', + }, + ), + migrations.CreateModel( + name='VnfPkgSubscriptionModel', + fields=[ + ('subscription_id', models.CharField(db_column=b'SUBSCRIPTION_ID', max_length=255, primary_key=True, serialize=False)), + ('callback_uri', models.URLField(db_column=b'CALLBACK_URI', max_length=255)), + ('auth_info', models.TextField(db_column=b'AUTH_INFO')), + ('usage_states', models.TextField(db_column=b'USAGE_STATES')), + ('notification_types', models.TextField(db_column=b'NOTIFICATION_TYPES')), + ('vnfd_id', models.TextField(db_column=b'VNFD_ID')), + ('vnf_pkg_id', models.TextField(db_column=b'VNF_PKG_ID')), + ('operation_states', models.TextField(db_column=b'OPERATION_STATES')), + ('vnf_products_from_provider', models.TextField(db_column=b'VNF_PRODUCTS_FROM_PROVIDER')), + ('links', models.TextField(db_column=b'LINKS')), + ], + options={ + 'db_table': 'VNF_PKG_SUBSCRIPTION', + }, + ), + ] diff --git a/genericparser/pub/database/migrations/__init__.py b/genericparser/pub/database/migrations/__init__.py new file mode 100644 index 0000000..0c847b7 --- /dev/null +++ b/genericparser/pub/database/migrations/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2019 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/genericparser/pub/database/models.py b/genericparser/pub/database/models.py new file mode 100644 index 0000000..9f0b498 --- /dev/null +++ b/genericparser/pub/database/models.py @@ -0,0 +1,234 @@ +# Copyright 2016-2018 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.db import models + + +class NSPackageModel(models.Model): + nsPackageId = models.CharField(db_column='NSPACKAGEID', primary_key=True, max_length=50) + nsPackageUri = models.CharField(db_column='NSPACKAGEURI', max_length=300, null=True, blank=True) + checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum + sdcCsarId = models.CharField(db_column='SDCCSARID', max_length=50, null=True, blank=True) # SdcCSARUri + onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True) + operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True) # operationalState + usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState + deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending + nsdId = models.CharField(db_column='NSDID', max_length=50, blank=True, null=True) + invariantId = models.CharField(db_column='INVARIANTID', max_length=50, blank=True, null=True) # nsdInvariantId + nsdName = models.CharField(db_column='NSDNAME', max_length=50, blank=True, null=True) + nsdDesginer = models.CharField(db_column='NSDDESIGNER', max_length=50, null=True, blank=True) + nsdDescription = models.CharField(db_column='NSDDESCRIPTION', max_length=100, null=True, blank=True) + nsdVersion = models.CharField(db_column='NSDVERSION', max_length=20, null=True, blank=True) + userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData + localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True) + nsdModel = models.TextField(db_column='NSDMODEL', max_length=65535, null=True, blank=True) + + class Meta: + db_table = 'CATALOG_NSPACKAGE' + + +class ServicePackageModel(models.Model): + servicePackageId = models.CharField(db_column='SERVICEPACKAGEID', primary_key=True, max_length=50) + servicePackageUri = models.CharField(db_column='SERVICEPACKAGEURI', max_length=300, null=True, blank=True) + checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum + sdcCsarId = models.CharField(db_column='SDCCSARID', max_length=50, null=True, blank=True) # SdcCSARUri + onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True) + operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True) # operationalState + usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState + deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending + servicedId = models.CharField(db_column='SERVICEDID', max_length=50, blank=True, null=True) + invariantId = models.CharField(db_column='INVARIANTID', max_length=50, blank=True, null=True) # servicedInvariantId + servicedName = models.CharField(db_column='SERVICEDNAME', max_length=50, blank=True, null=True) + servicedDesigner = models.CharField(db_column='SERVICEDDESIGNER', max_length=50, null=True, blank=True) + servicedDescription = models.CharField(db_column='SERVICEDDESCRIPTION', max_length=100, null=True, blank=True) + servicedVersion = models.CharField(db_column='SERVICEDVERSION', max_length=20, null=True, blank=True) + userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData + localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True) + servicedModel = models.TextField(db_column='SERVICEDMODEL', max_length=65535, null=True, blank=True) + + class Meta: + db_table = 'CATALOG_SERVICEPACKAGE' + + +class VnfPackageModel(models.Model): + # uuid = models.CharField(db_column='UUID', primary_key=True, max_length=255) + vnfPackageId = models.CharField(db_column='VNFPACKAGEID', primary_key=True, max_length=50) # onboardedVnfPkgInfoId + vnfPackageUri = models.CharField(db_column='VNFPACKAGEURI', max_length=300, null=True, blank=True) # downloadUri + SdcCSARUri = models.CharField(db_column='SDCCSARURI', max_length=300, null=True, blank=True) # SdcCSARUri + checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum + onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True) + operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True) # operationalState + usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState + deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending + vnfdId = models.CharField(db_column='VNFDID', max_length=50, blank=True, null=True) # vnfdId + vnfVendor = models.CharField(db_column='VENDOR', max_length=50, blank=True, null=True) # vnfProvider + vnfdProductName = models.CharField(db_column='VNFDPRODUCTNAME', max_length=50, blank=True, null=True) # vnfProductName + vnfdVersion = models.CharField(db_column='VNFDVERSION', max_length=20, blank=True, null=True) # vnfdVersion + vnfSoftwareVersion = models.CharField(db_column='VNFSOFTWAREVERSION', max_length=20, blank=True, null=True) # vnfSoftwareVersion + userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData + localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True) + vnfdModel = models.TextField(db_column='VNFDMODEL', max_length=65535, blank=True, null=True) # vnfd + + class Meta: + db_table = 'CATALOG_VNFPACKAGE' + + +class PnfPackageModel(models.Model): + # uuid = models.CharField(db_column='UUID', primary_key=True, max_length=255) + pnfPackageId = models.CharField(db_column='PNFPACKAGEID', primary_key=True, max_length=50) # onboardedPnfPkgInfoId + pnfPackageUri = models.CharField(db_column='PNFPACKAGEURI', max_length=300, null=True, blank=True) # downloadUri + sdcCSARUri = models.CharField(db_column='SDCCSARURI', max_length=300, null=True, blank=True) # sdcCSARUri + checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True) # checksum + onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True) + usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True) # usageState + deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True) # deletionPending + pnfdId = models.CharField(db_column='PNFDID', max_length=50, blank=True, null=True) # pnfdId + pnfVendor = models.CharField(db_column='VENDOR', max_length=50, blank=True, null=True) # pnfProvider + pnfdProductName = models.CharField(db_column='PNFDPRODUCTNAME', max_length=50, blank=True, null=True) # pnfProductName + pnfdVersion = models.CharField(db_column='PNFDVERSION', max_length=20, blank=True, null=True) # pnfdVersion + pnfSoftwareVersion = models.CharField(db_column='PNFSOFTWAREVERSION', max_length=20, blank=True, null=True) # pnfSoftwareVersion + userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True) # userDefinedData + localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True) + pnfdModel = models.TextField(db_column='PNFDMODEL', max_length=65535, blank=True, null=True) # pnfd + pnfdName = models.TextField(db_column='PNFDNAME', max_length=65535, blank=True, null=True) # pnfd_name + + class Meta: + db_table = 'CATALOG_PNFPACKAGE' + + +class SoftwareImageModel(models.Model): + imageid = models.CharField(db_column='IMAGEID', primary_key=True, max_length=50) + containerFormat = models.CharField(db_column='CONTAINERFORMAT', max_length=20) + diskFormat = models.CharField(db_column='DISKFORMAT', max_length=20) + mindisk = models.CharField(db_column='MINDISK', max_length=20) + minram = models.CharField(db_column='MINRAM', max_length=20) + usermetadata = models.CharField(db_column='USAERMETADATA', max_length=1024) + vnfPackageId = models.CharField(db_column='VNFPACKAGEID', max_length=50) + filePath = models.CharField(db_column='FILEPATH', max_length=300) + status = models.CharField(db_column='STATUS', max_length=10) + vimid = models.CharField(db_column='VIMID', max_length=50) + # filetype = models.CharField(db_column='FILETYPE', max_length=2) + # vimuser = models.CharField(db_column='VIMUSER', max_length=50) + # tenant = models.CharField(db_column='TENANT', max_length=50) + # purpose = models.CharField(db_column='PURPOSE', max_length=1000) + + class Meta: + db_table = 'CATALOG_SOFTWAREIMAGEMODEL' + + +class JobModel(models.Model): + jobid = models.CharField(db_column='JOBID', primary_key=True, max_length=255) + jobtype = models.CharField(db_column='JOBTYPE', max_length=255) + jobaction = models.CharField(db_column='JOBACTION', max_length=255) + resid = models.CharField(db_column='RESID', max_length=255) + status = models.IntegerField(db_column='STATUS', null=True, blank=True) + starttime = models.CharField(db_column='STARTTIME', max_length=255, null=True, blank=True) + endtime = models.CharField(db_column='ENDTIME', max_length=255, null=True, blank=True) + progress = models.IntegerField(db_column='PROGRESS', null=True, blank=True) + user = models.CharField(db_column='USER', max_length=255, null=True, blank=True) + parentjobid = models.CharField(db_column='PARENTJOBID', max_length=255, null=True, blank=True) + resname = models.CharField(db_column='RESNAME', max_length=255, null=True, blank=True) + + class Meta: + db_table = 'CATALOG_JOB' + + def toJSON(self): + import json + return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]])) + + +class JobStatusModel(models.Model): + indexid = models.IntegerField(db_column='INDEXID') + jobid = models.CharField(db_column='JOBID', max_length=255) + status = models.CharField(db_column='STATUS', max_length=255) + progress = models.IntegerField(db_column='PROGRESS', null=True, blank=True) + descp = models.TextField(db_column='DESCP', max_length=65535) + errcode = models.CharField(db_column='ERRCODE', max_length=255, null=True, blank=True) + addtime = models.CharField(db_column='ADDTIME', max_length=255, null=True, blank=True) + + class Meta: + db_table = 'CATALOG_JOB_STATUS' + + def toJSON(self): + import json + return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]])) + + +class NsdmSubscriptionModel(models.Model): + subscriptionid = models.CharField(db_column='SUBSCRIPTIONID', max_length=255, primary_key=True) + notificationTypes = models.TextField(db_column='NOTIFICATIONTYPES', null=True) + auth_info = models.TextField(db_column='AUTHINFO', null=True) + callback_uri = models.CharField(db_column='CALLBACKURI', max_length=255) + nsdInfoId = models.TextField(db_column='NSDINFOID', null=True) + nsdId = models.TextField(db_column='NSDID', null=True) + nsdName = models.TextField(db_column='NSDNAME', null=True) + nsdVersion = models.TextField(db_column='NSDVERSION', null=True) + nsdDesigner = models.TextField(db_column='NSDDESIGNER', null=True) + nsdInvariantId = models.TextField(db_column='NSDINVARIANTID', null=True) + vnfPkgIds = models.TextField(db_column='VNFPKGIDS', null=True) + pnfdInfoIds = models.TextField(db_column='PNFDINFOIDS', null=True) + nestedNsdInfoIds = models.TextField(db_column='NESTEDNSDINFOIDS', null=True) + nsdOnboardingState = models.TextField(db_column='NSDONBOARDINGSTATE', null=True) + nsdOperationalState = models.TextField(db_column='NSDOPERATIONALSTATE', null=True) + nsdUsageState = models.TextField(db_column='NSDUSAGESTATE', null=True) + pnfdId = models.TextField(db_column='PNFDID', null=True) + pnfdName = models.TextField(db_column='PNFDNAME', null=True) + pnfdVersion = models.TextField(db_column='PNFDVERSION', null=True) + pnfdProvider = models.TextField(db_column='PNFDPROVIDER', null=True) + pnfdInvariantId = models.TextField(db_column='PNFDINVARIANTID', null=True) + pnfdOnboardingState = models.TextField(db_column='PNFDONBOARDINGSTATE', null=True) + pnfdUsageState = models.TextField(db_column='PNFDUSAGESTATE', null=True) + links = models.TextField(db_column='LINKS') + + class Meta: + db_table = 'CATALOG_NSDM_SUBSCRIPTION' + + def toJSON(self): + import json + return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]])) + + +class VnfPkgSubscriptionModel(models.Model): + subscription_id = models.CharField(max_length=255, primary_key=True, db_column='SUBSCRIPTION_ID') + callback_uri = models.URLField(db_column="CALLBACK_URI", max_length=255) + auth_info = models.TextField(db_column="AUTH_INFO") + usage_states = models.TextField(db_column="USAGE_STATES") + notification_types = models.TextField(db_column="NOTIFICATION_TYPES") + vnfd_id = models.TextField(db_column="VNFD_ID") + vnf_pkg_id = models.TextField(db_column="VNF_PKG_ID") + operation_states = models.TextField(db_column="OPERATION_STATES") + vnf_products_from_provider = \ + models.TextField(db_column="VNF_PRODUCTS_FROM_PROVIDER") + links = models.TextField(db_column="LINKS") + + class Meta: + db_table = 'VNF_PKG_SUBSCRIPTION' + + def toDict(self): + import json + subscription_obj = { + "id": self.subscription_id, + "callbackUri": self.callback_uri, + "_links": json.loads(self.links) + } + filter_obj = { + "notificationTypes": json.loads(self.notification_types), + "vnfdId": json.loads(self.vnfd_id), + "vnfPkgId": json.loads(self.vnf_pkg_id), + "operationalState": json.loads(self.operation_states), + "usageState": json.loads(self.usage_states), + "vnfProductsFromProviders": json.loads(self.vnf_products_from_provider) + } + subscription_obj["filter"] = filter_obj + return subscription_obj diff --git a/genericparser/pub/exceptions.py b/genericparser/pub/exceptions.py new file mode 100644 index 0000000..626e567 --- /dev/null +++ b/genericparser/pub/exceptions.py @@ -0,0 +1,53 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class GenericparserException(Exception): + pass + + +class ResourceNotFoundException(GenericparserException): + pass + + +class PackageNotFoundException(GenericparserException): + pass + + +class PackageHasExistsException(GenericparserException): + pass + + +class VnfPkgSubscriptionException(GenericparserException): + pass + + +class VnfPkgDuplicateSubscriptionException(GenericparserException): + pass + + +class SubscriptionDoesNotExistsException(GenericparserException): + pass + + +class NsdmBadRequestException(GenericparserException): + pass + + +class NsdmDuplicateSubscriptionException(GenericparserException): + pass + + +class ArtifactNotFoundException(GenericparserException): + pass diff --git a/genericparser/pub/msapi/__init__.py b/genericparser/pub/msapi/__init__.py new file mode 100644 index 0000000..c7b6818 --- /dev/null +++ b/genericparser/pub/msapi/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/genericparser/pub/msapi/extsys.py b/genericparser/pub/msapi/extsys.py new file mode 100644 index 0000000..639513f --- /dev/null +++ b/genericparser/pub/msapi/extsys.py @@ -0,0 +1,175 @@ +# Copyright 2016 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging +import uuid + +from genericparser.pub.config.config import AAI_BASE_URL, AAI_USER, AAI_PASSWD +from genericparser.pub.exceptions import GenericparserException +from genericparser.pub.utils import restcall +from genericparser.pub.utils.values import ignore_case_get + +logger = logging.getLogger(__name__) + + +def call_aai(resource, method, content=''): + additional_headers = { + 'X-FromAppId': 'MODEL-GENERICPARSER', + 'X-TransactionId': str(uuid.uuid1()) + } + return restcall.call_req(AAI_BASE_URL, + AAI_USER, + AAI_PASSWD, + restcall.rest_no_auth, + resource, + method, + content, + additional_headers) + + +def get_vims(): + ret = call_aai("/cloud-infrastructure/cloud-regions?depth=all", "GET") + if ret[0] != 0: + logger.error("Status code is %s, detail is %s.", ret[2], ret[1]) + raise GenericparserException("Failed to query vims from extsys.") + # convert vim_info_aai to internal vim_info + vims_aai = json.JSONDecoder().decode(ret[1]) + vims_aai = ignore_case_get(vims_aai, "cloud-region") + vims_info = [] + for vim in vims_aai: + vim = convert_vim_info(vim) + vims_info.append(vim) + return vims_info + + +def get_vim_by_id(vim_id): + cloud_owner, cloud_region = split_vim_to_owner_region(vim_id) + ret = call_aai("/cloud-infrastructure/cloud-regions/cloud-region/%s/%s?depth=all" + % (cloud_owner, cloud_region), "GET") + if ret[0] != 0: + logger.error("Status code is %s, detail is %s.", ret[2], ret[1]) + raise GenericparserException("Failed to query vim(%s) from extsys." % vim_id) + # convert vim_info_aai to internal vim_info + vim_info_aai = json.JSONDecoder().decode(ret[1]) + vim_info = convert_vim_info(vim_info_aai) + return vim_info + + +def split_vim_to_owner_region(vim_id): + split_vim = vim_id.split('_') + cloud_owner = split_vim[0] + cloud_region = "".join(split_vim[1:]) + return cloud_owner, cloud_region + + +def convert_vim_info(vim_info_aai): + vim_id = vim_info_aai["cloud-owner"] + "_" + vim_info_aai["cloud-region-id"] + esr_system_info = ignore_case_get(ignore_case_get(vim_info_aai, "esr-system-info-list"), "esr-system-info") + vim_info = { + "vimId": vim_id, + "name": vim_id, + "url": ignore_case_get(esr_system_info[0], "service-url"), + "userName": ignore_case_get(esr_system_info[0], "user-name"), + "password": ignore_case_get(esr_system_info[0], "password"), + "tenant": ignore_case_get(esr_system_info[0], "default-tenant"), + "vendor": ignore_case_get(esr_system_info[0], "vendor"), + "version": ignore_case_get(esr_system_info[0], "version"), + "description": "vim", + "domain": "", + "type": ignore_case_get(esr_system_info[0], "type"), + "createTime": "2016-07-18 12:22:53" + } + return vim_info + + +def get_sdn_controller_by_id(sdn_ontroller_id): + ret = call_aai("/external-system/esr-thirdparty-sdnc-list/esr-thirdparty-sdnc/%s?depth=all" + % sdn_ontroller_id, "GET") + if ret[0] != 0: + logger.error("Failed to query sdn ontroller(%s) from extsys. detail is %s.", sdn_ontroller_id, ret[1]) + raise GenericparserException("Failed to query sdn ontroller(%s) from extsys." % sdn_ontroller_id) + # convert vim_info_aai to internal vim_info + sdnc_info_aai = json.JSONDecoder().decode(ret[1]) + sdnc_info = convert_sdnc_info(sdnc_info_aai) + return sdnc_info + + +def convert_sdnc_info(sdnc_info_aai): + esr_system_info = ignore_case_get(ignore_case_get(sdnc_info_aai, "esr-system-info-list"), "esr-system-info") + sdnc_info = { + "sdnControllerId": sdnc_info_aai["thirdparty-sdnc-id"], + "name": sdnc_info_aai["thirdparty-sdnc-id"], + "url": ignore_case_get(esr_system_info[0], "service-url"), + "userName": ignore_case_get(esr_system_info[0], "user-name"), + "password": ignore_case_get(esr_system_info[0], "password"), + "vendor": ignore_case_get(esr_system_info[0], "vendor"), + "version": ignore_case_get(esr_system_info[0], "version"), + "description": "", + "protocol": ignore_case_get(esr_system_info[0], "protocal"), + "productName": ignore_case_get(sdnc_info_aai, "product-name"), + "type": ignore_case_get(esr_system_info[0], "type"), + "createTime": "2016-07-18 12:22:53" + } + return sdnc_info + + +def get_vnfm_by_id(vnfm_inst_id): + uri = "/external-system/esr-vnfm-list/esr-vnfm/%s?depth=all" % vnfm_inst_id + ret = call_aai(uri, "GET") + if ret[0] > 0: + logger.error('Send get VNFM information request to extsys failed.') + raise GenericparserException('Send get VNFM information request to extsys failed.') + # convert vnfm_info_aai to internal vnfm_info + vnfm_info_aai = json.JSONDecoder().decode(ret[1]) + vnfm_info = convert_vnfm_info(vnfm_info_aai) + return vnfm_info + + +def convert_vnfm_info(vnfm_info_aai): + esr_system_info = ignore_case_get(ignore_case_get(vnfm_info_aai, "esr-system-info-list"), "esr-system-info") + vnfm_info = { + "vnfmId": vnfm_info_aai["vnfm-id"], + "name": vnfm_info_aai["vnfm-id"], + "type": ignore_case_get(esr_system_info[0], "type"), + "vimId": vnfm_info_aai["vim-id"], + "vendor": ignore_case_get(esr_system_info[0], "vendor"), + "version": ignore_case_get(esr_system_info[0], "version"), + "description": "vnfm", + "certificateUrl": vnfm_info_aai["certificate-url"], + "url": ignore_case_get(esr_system_info[0], "service-url"), + "userName": ignore_case_get(esr_system_info[0], "user-name"), + "password": ignore_case_get(esr_system_info[0], "password"), + "createTime": "2016-07-06 15:33:18" + } + return vnfm_info + + +def select_vnfm(vnfm_type, vim_id): + uri = "/external-system/esr-vnfm-list?depth=all" + ret = call_aai(uri, "GET") + if ret[0] > 0: + logger.error("Failed to call %s: %s", uri, ret[1]) + raise GenericparserException('Failed to get vnfms from extsys.') + vnfms = json.JSONDecoder().decode(ret[1]) + vnfms = ignore_case_get(vnfms, "esr-vnfm") + for vnfm in vnfms: + esr_system_info = ignore_case_get(vnfm, "esr-system-info") + type = ignore_case_get(esr_system_info, "type") + vimId = vnfm["vnfm-id"] + if type == vnfm_type and vimId == vim_id: + # convert vnfm_info_aai to internal vnfm_info + vnfm = convert_vnfm_info(vnfm) + return vnfm + raise GenericparserException('No vnfm found with %s in vim(%s)' % (vnfm_type, vim_id)) diff --git a/genericparser/pub/msapi/sdc.py b/genericparser/pub/msapi/sdc.py new file mode 100644 index 0000000..f7b9271 --- /dev/null +++ b/genericparser/pub/msapi/sdc.py @@ -0,0 +1,116 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging +import os + +from genericparser.pub.exceptions import GenericparserException +from genericparser.pub.utils import restcall +from genericparser.pub.utils import fileutil +from genericparser.pub.config.config import SDC_BASE_URL, SDC_USER, SDC_PASSWD + +logger = logging.getLogger(__name__) + +ASSETTYPE_RESOURCES = "resources" +ASSETTYPE_SERVICES = "services" +DISTRIBUTED = "DISTRIBUTED" + + +def call_sdc(resource, method, content=''): + additional_headers = { + 'X-ECOMP-InstanceID': 'VFC', + } + return restcall.call_req(base_url=SDC_BASE_URL, + user=SDC_USER, + passwd=SDC_PASSWD, + auth_type=restcall.rest_no_auth, + resource=resource, + method=method, + content=content, + additional_headers=additional_headers) + + +""" +sample of return value +[ + { + "uuid": "c94490a0-f7ef-48be-b3f8-8d8662a37236", + "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb", + "name": "underlayvpn", + "version": "2.0", + "toscaModelURL": "/sdc/v1/genericparser/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel", + "category": "Volte", + "subCategory": "VolteVF", + "resourceType": "VF", + "lifecycleState": "CERTIFIED", + "lastUpdaterUserId": "jh0003" + } +] +""" + + +def get_artifacts(asset_type): + resource = "/sdc/v1/genericparser/{assetType}" + resource = resource.format(assetType=asset_type) + ret = call_sdc(resource, "GET") + if ret[0] != 0: + logger.error("Status code is %s, detail is %s.", ret[2], ret[1]) + raise GenericparserException("Failed to query artifacts(%s) from sdc." % asset_type) + return json.JSONDecoder().decode(ret[1]) + + +def get_artifact(asset_type, csar_id): + artifacts = get_artifacts(asset_type) + for artifact in artifacts: + if artifact["uuid"] == csar_id: + if asset_type == ASSETTYPE_SERVICES and \ + artifact.get("distributionStatus", None) != DISTRIBUTED: + raise GenericparserException("The artifact (%s,%s) is not distributed from sdc." % (asset_type, csar_id)) + else: + return artifact + raise GenericparserException("Failed to query artifact(%s,%s) from sdc." % (asset_type, csar_id)) + + +def delete_artifact(asset_type, asset_id, artifact_id): + resource = "/sdc/v1/genericparser/{assetType}/{uuid}/artifacts/{artifactUUID}" + resource = resource.format(assetType=asset_type, uuid=asset_id, artifactUUID=artifact_id) + ret = call_sdc(resource, "DELETE") + if ret[0] != 0: + logger.error("Status code is %s, detail is %s.", ret[2], ret[1]) + raise GenericparserException("Failed to delete artifacts(%s) from sdc." % artifact_id) + return json.JSONDecoder().decode(ret[1]) + + +def download_artifacts(download_url, local_path, file_name): + additional_headers = { + 'X-ECOMP-InstanceID': 'VFC', + 'accept': 'application/octet-stream' + } + ret = restcall.call_req(base_url=SDC_BASE_URL, + user=SDC_USER, + passwd=SDC_PASSWD, + auth_type=restcall.rest_no_auth, + resource=download_url, + method="GET", + additional_headers=additional_headers) + if ret[0] != 0: + logger.error("Status code is %s, detail is %s.", ret[2], ret[1]) + raise GenericparserException("Failed to download %s from sdc." % download_url) + fileutil.make_dirs(local_path) + local_file_name = os.path.join(local_path, file_name) + local_file = open(local_file_name, 'wb') + local_file.write(ret[1]) + local_file.close() + return local_file_name diff --git a/genericparser/pub/ssl/cert/foobar.crt b/genericparser/pub/ssl/cert/foobar.crt new file mode 100644 index 0000000..7ab6dd3 --- /dev/null +++ b/genericparser/pub/ssl/cert/foobar.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDRDCCAiwCCQD8zmUqCHOp2zANBgkqhkiG9w0BAQsFADBjMQswCQYDVQQGEwJD +TjEQMA4GA1UECAwHQmVpSmluZzEQMA4GA1UEBwwHQmVpSmluZzENMAsGA1UECgwE +Q21jYzESMBAGA1UECwwJQ21jYy1vbmFwMQ0wCwYDVQQDDARDbWNjMCAXDTE5MDMy +NjAyNTI0N1oYDzIxMTkwMzAyMDI1MjQ3WjBjMQswCQYDVQQGEwJDTjEQMA4GA1UE +CAwHQmVpSmluZzEQMA4GA1UEBwwHQmVpSmluZzENMAsGA1UECgwEQ21jYzESMBAG +A1UECwwJQ21jYy1vbmFwMQ0wCwYDVQQDDARDbWNjMIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEA4DurchTgEw/A1y/Q5gpSSJTLC+KFOV4Vmbz2hlvOGLwV +NIX1+r7DpaiJTGjEKLCtGsD2tGm69KiUX9FBY1CStnwK2R4wA5NKW+ZKQLd3sRTc +Hl+2bLFk7E5KvmKZZM4xhsN3ey7Ia8H0sSfKiGlxB1hZI2HibRNy8GWyi95j8MkP +v+H7HbJlX1kIKb7p2y8aG8AnAzBWikJFcQ1y3bJA2r31wOht63pIekwh+nntt5u+ +Yh/STXHiAe2gT7b9x6RAn09tC6TsBKzdZ4ZKrBLfRwPv6+cbDLcqkhbPukqaFaEs +rDCLhuWX10sGLEsqXULDwZRoYxTUueLek9v+/8f5EwIDAQABMA0GCSqGSIb3DQEB +CwUAA4IBAQCenowNpFiy9vH18+9PL4rZjZ1NH+frGqsWvDiyHPnLpneCLOuiXvgv +kcuLJDYatc6vTlXkJElxwF1fCaJEn6dNq3WtQxdJjhXidAKx8Hsf1Nxkwbvmahv2 +TIWV/FMvop+9SdonDBGZojrYKRsY3EilQf+7/rGEM52HE8S3yE8CCe9xTZSYUs1B +B8CzOPBVU7SWSRSLUKfdRhjyl4Rqsslxzal+8A36yViHBPhJgmDRoVWVR+E289IH +FCQ0d8qVvdTGkM79dvZrEH9WSzPwlTR0NSkBMWTNLcWyP8caDjg+fbSVOF+s+sd/ +bLuAyHyeXUzClJx6CA5zwLZz5K5SVxw+ +-----END CERTIFICATE----- diff --git a/genericparser/pub/ssl/cert/foobar.csr b/genericparser/pub/ssl/cert/foobar.csr new file mode 100644 index 0000000..30b381b --- /dev/null +++ b/genericparser/pub/ssl/cert/foobar.csr @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIIC1DCCAbwCAQAwYzELMAkGA1UEBhMCQ04xEDAOBgNVBAgMB0JlaUppbmcxEDAO +BgNVBAcMB0JlaUppbmcxDTALBgNVBAoMBENtY2MxEjAQBgNVBAsMCUNtY2Mtb25h +cDENMAsGA1UEAwwEQ21jYzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AOA7q3IU4BMPwNcv0OYKUkiUywvihTleFZm89oZbzhi8FTSF9fq+w6WoiUxoxCiw +rRrA9rRpuvSolF/RQWNQkrZ8CtkeMAOTSlvmSkC3d7EU3B5ftmyxZOxOSr5imWTO +MYbDd3suyGvB9LEnyohpcQdYWSNh4m0TcvBlsoveY/DJD7/h+x2yZV9ZCCm+6dsv +GhvAJwMwVopCRXENct2yQNq99cDobet6SHpMIfp57bebvmIf0k1x4gHtoE+2/cek +QJ9PbQuk7ASs3WeGSqwS30cD7+vnGwy3KpIWz7pKmhWhLKwwi4bll9dLBixLKl1C +w8GUaGMU1Lni3pPb/v/H+RMCAwEAAaAsMBMGCSqGSIb3DQEJAjEGDARDbWNjMBUG +CSqGSIb3DQEJBzEIDAYxMjM0NTYwDQYJKoZIhvcNAQELBQADggEBAGr8XkV5G9bK +lPc3jUvmS+KSg9UB1wrvf6kQUGDvCvXqZCGw1mRZekN4rH5c1fk9iLwLqDkWDnNo +79jzAWV76U62GarTOng32TLTItxI/EeUhQFCf+AI/YcJEdHf8HGrDuvC0iSz6/9q +Fe5HhVSO7zsHdP28J05wTyII+2k4ecAj3oXutUnGCBg0nlexDmxAZoe8x4XHpqkt +tEKquZdq3l17+v5DKlKwczQcXUBC1yGw0ki67U5w9fVKzpAf7Frr7YnbGS35i5Pv +ny4SlXPW167hRQKXCniY5QtCocP+GoPD+81uWwf+bjHyAZ3HCd532YFgXW01yJhM +imRDxx2gDds= +-----END CERTIFICATE REQUEST----- diff --git a/genericparser/pub/ssl/cert/foobar.key b/genericparser/pub/ssl/cert/foobar.key new file mode 100644 index 0000000..266f502 --- /dev/null +++ b/genericparser/pub/ssl/cert/foobar.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpQIBAAKCAQEA4DurchTgEw/A1y/Q5gpSSJTLC+KFOV4Vmbz2hlvOGLwVNIX1 ++r7DpaiJTGjEKLCtGsD2tGm69KiUX9FBY1CStnwK2R4wA5NKW+ZKQLd3sRTcHl+2 +bLFk7E5KvmKZZM4xhsN3ey7Ia8H0sSfKiGlxB1hZI2HibRNy8GWyi95j8MkPv+H7 +HbJlX1kIKb7p2y8aG8AnAzBWikJFcQ1y3bJA2r31wOht63pIekwh+nntt5u+Yh/S +TXHiAe2gT7b9x6RAn09tC6TsBKzdZ4ZKrBLfRwPv6+cbDLcqkhbPukqaFaEsrDCL +huWX10sGLEsqXULDwZRoYxTUueLek9v+/8f5EwIDAQABAoIBAQCL+dPBqHRkkc3w +xsGiCMlq06+Y4LQHpsrXKNW/8+lJGYgnPITcHV+mtvnWgAQL3paA//pBj0sM1Xui +AM/PvomHbxGajbStVrHxgmXR1nXaTkpGj7siSO7WcN1J0eUtv2W9WaHpfL/SPMaS +HGPbGe9cBXPHmaAuNRjoJqP5mj9LHB0SebJImjiaCYsUkBgC0Ooo4UuwOXLYO/ak +gZrbM8WwY21rRVc3uDyg5Ez8gxbFG3L39t26gpqBYosqNlPe7/JVkTpxUKk1Allf +fAJNyfpS2CuY+nQWtCleJFtF1Yq9jwfPvtNUTrXeJq97xFqSIRnJbygttsokbPto +tLqB4rSBAoGBAPPgidT0KyfYVUaWNEXtOOJyh3MCk0ssalRKf+Dap9J9Bgpjldbu +/tBBrrbxSEAieXe8gKDwgDY2qBcsUUvEY+EWL7tiMBnS4HvK8/4aEIx14xMgiuCS +bTnMGlIlImjMKdj0iKOd0N2NPQcfr0NTUdZJ/p1o965lq/9i7xcfHinTAoGBAOth +JqwyGQ6oP005Vry3S/7E7UJjYxMaUfhRmMGoVz+qXAEfq0r4TkNrcEvP7mu72pVe +q1P4imQjvvPXqoPBdh310a6OCQ7BrFpkOghHBIG0koblncml4hdBSReUA1auW2Qr +c/MUSeV96DDbI2mZJulVdqINyaAt/JDMnfdcbCvBAoGAYPTI91/ndFzeckSvHYnV +TrnnvcKtWnqa/03rDzL++4D3ENRMsvmrVpJ2aob8iXrrPb40iUd0QZlzNFtLKss2 +Rjty2JWNuAaNdsnWPRSRtbX8hBMxA11TjWHmqPfYeT+J95YoaJwKeLp5I8bl/+c1 +JvOeBWjA55XGTq8/jLqzXD8CgYEAiQVyJNW5Hn4083iIlK1DkRkEYRxIRYuR4jNl +8H5V5BsBGipcZfUsYjT+FzQBQDgII+ILbIOH1Im2lG6ctbx+TSyXlrzaavu1oJ0t +5zmoVvVOQzcR5pwphI4dxZsFYoV3cFWXVw8dgXoNG7vF3qgoLbbxq57JG/UJTSXA +Y4oq8kECgYEAlgh6v+o6jCUD7l0JWdRtZy52rhC3W/HrhcHE0/l3RjeV+kLIWr9u +WbNltgZQGvPVQ+ZwPIYj1gaGP17wm5pAsJNSN4LQ1v4Fj/XjT7zdwYwYOrXIJati +5HTeyHjm+wwOPYrmH4YLGwAh6T1is42E0K2L7LG8HnO4bHbfV2mKji0= +-----END RSA PRIVATE KEY----- diff --git a/genericparser/pub/utils/__init__.py b/genericparser/pub/utils/__init__.py new file mode 100644 index 0000000..c7b6818 --- /dev/null +++ b/genericparser/pub/utils/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/genericparser/pub/utils/fileutil.py b/genericparser/pub/utils/fileutil.py new file mode 100644 index 0000000..d7811b8 --- /dev/null +++ b/genericparser/pub/utils/fileutil.py @@ -0,0 +1,79 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import shutil +import logging +import tempfile +import traceback +import urllib2 +import zipfile + + +logger = logging.getLogger(__name__) + + +def make_dirs(path): + if not os.path.exists(path): + os.makedirs(path, 0777) + + +def delete_dirs(path): + try: + if os.path.exists(path): + shutil.rmtree(path) + except Exception as e: + logger.error(traceback.format_exc()) + logger.error("Failed to delete %s:%s", path, e.message) + + +def download_file_from_http(url, local_dir, file_name): + local_file_name = os.path.join(local_dir, file_name) + is_download_ok = False + try: + make_dirs(local_dir) + r = urllib2.Request(url) + req = urllib2.urlopen(r) + save_file = open(local_file_name, 'wb') + save_file.write(req.read()) + save_file.close() + req.close() + is_download_ok = True + except: + logger.error(traceback.format_exc()) + logger.error("Failed to download %s to %s.", url, local_file_name) + return is_download_ok, local_file_name + + +def unzip_file(zip_src, dst_dir, csar_path): + if os.path.exists(zip_src): + fz = zipfile.ZipFile(zip_src, 'r') + for file in fz.namelist(): + fz.extract(file, dst_dir) + return os.path.join(dst_dir, csar_path) + else: + return "" + + +def unzip_csar_to_tmp(zip_src): + dirpath = tempfile.mkdtemp() + zip_ref = zipfile.ZipFile(zip_src, 'r') + zip_ref.extractall(dirpath) + return dirpath + + +def get_artifact_path(vnf_path, artifact_file): + for root, dirs, files in os.walk(vnf_path): + if artifact_file in files: + return os.path.join(root, artifact_file) + return None diff --git a/genericparser/pub/utils/idutil.py b/genericparser/pub/utils/idutil.py new file mode 100644 index 0000000..85bebb8 --- /dev/null +++ b/genericparser/pub/utils/idutil.py @@ -0,0 +1,20 @@ +# Copyright 2016 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from redisco import containers as cont + + +def get_auto_id(id_type, id_group="auto_id_hash"): + auto_id_hash = cont.Hash(id_group) + auto_id_hash.hincrby(id_type, 1) + return auto_id_hash.hget(id_type) diff --git a/genericparser/pub/utils/jobutil.py b/genericparser/pub/utils/jobutil.py new file mode 100644 index 0000000..c06c72d --- /dev/null +++ b/genericparser/pub/utils/jobutil.py @@ -0,0 +1,144 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import datetime +import logging +import uuid +import traceback + +from genericparser.pub.database.models import JobStatusModel, JobModel +from genericparser.pub.utils import idutil + +logger = logging.getLogger(__name__) + + +def enum(**enums): + return type('Enum', (), enums) + + +JOB_STATUS = enum(PROCESSING=0, FINISH=1) +JOB_MODEL_STATUS = enum(STARTED='started', PROCESSING='processing', FINISHED='finished', ERROR='error', + TIMEOUT='timeout') +JOB_TYPE = enum(CREATE_VNF="create vnf", TERMINATE_VNF="terminate vnf", GRANT_VNF="grant vnf", MANUAL_SCALE_VNF="manual scale vnf", + HEAL_VNF="heal vnf") + + +class JobUtil(object): + def __init__(self): + pass + + @staticmethod + def __gen_job_id(job_name): + return "%s-%s" % (job_name if job_name else "UnknownJob", uuid.uuid1()) + + @staticmethod + def query_job_status(job_id, index_id=-1): + # logger.info("Query job status, jobid =[%s], responseid [%d]" % (job_id, index_id)) + jobs = [] + if index_id < 0: + row = JobStatusModel.objects.filter(jobid=job_id).order_by("-indexid").first() + if row: + jobs.append(row) + else: + [jobs.append(job) for job in JobStatusModel.objects.filter(jobid=job_id).order_by("-indexid") + if job.indexid > index_id] + + # logger.info("Query job status, rows=%s" % str(jobs)) + return jobs + + @staticmethod + def is_job_exists(job_id): + jobs = JobModel.objects.filter(jobid=job_id) + return len(jobs) > 0 + + @staticmethod + def create_job(inst_type, jobaction, inst_id, user='', job_id=None, res_name=''): + if job_id is None: + job_id = JobUtil.__gen_job_id( + '%s-%s-%s' % (str(inst_type).replace(' ', '_'), str(jobaction).replace(' ', '_'), str(inst_id))) + job = JobModel() + job.jobid = job_id + job.jobtype = inst_type + job.jobaction = jobaction + job.resid = str(inst_id) + job.status = JOB_STATUS.PROCESSING + job.user = user + job.starttime = datetime.datetime.now().strftime('%Y-%m-%d %X') + job.progress = 0 + job.resname = res_name + logger.debug("create a new job, jobid=%s, jobtype=%s, jobaction=%s, resid=%s, status=%d" % + (job.jobid, job.jobtype, job.jobaction, job.resid, job.status)) + job.save() + return job_id + + @staticmethod + def clear_job(job_id): + [job.delete() for job in JobModel.objects.filter(jobid=job_id)] + logger.debug("Clear job, job_id=%s" % job_id) + + @staticmethod + def add_job_status(job_id, progress, status_decs, error_code=""): + jobs = JobModel.objects.filter(jobid=job_id) + if not jobs: + logger.error("Job[%s] is not exists, please create job first." % job_id) + raise Exception("Job[%s] is not exists." % job_id) + try: + int_progress = int(progress) + job_status = JobStatusModel() + job_status.indexid = int(idutil.get_auto_id(job_id)) + job_status.jobid = job_id + job_status.status = "processing" + job_status.progress = int_progress + + if job_status.progress == 0: + job_status.status = "started" + elif job_status.progress == 100: + job_status.status = "finished" + elif job_status.progress == 101: + job_status.status = "partly_finished" + elif job_status.progress > 101: + job_status.status = "error" + + if error_code == "255": + job_status.status = "error" + + job_status.descp = status_decs + # job_status.errcode = error_code + job_status.errcode = error_code if error_code else "0" + job_status.addtime = datetime.datetime.now().strftime('%Y-%m-%d %X') + job_status.save() + logger.debug("Add a new job status, jobid=%s, indexid=%d," + " status=%s, description=%s, progress=%d, errcode=%s, addtime=%r" % + (job_status.jobid, job_status.indexid, job_status.status, job_status.descp, + job_status.progress, job_status.errcode, job_status.addtime)) + + job = jobs[0] + job.progress = int_progress + if job_status.progress >= 100: + job.status = JOB_STATUS.FINISH + job.endtime = datetime.datetime.now().strftime('%Y-%m-%d %X') + job.save() + logger.debug("update job, jobid=%s, progress=%d" % (job_status.jobid, int_progress)) + except: + logger.error(traceback.format_exc()) + + @staticmethod + def clear_job_status(job_id): + [job.delete() for job in JobStatusModel.objects.filter(jobid=job_id)] + logger.debug("Clear job status, job_id=%s" % job_id) + + @staticmethod + def get_unfinished_jobs(url_prefix, inst_id, inst_type): + jobs = JobModel.objects.filter(resid=inst_id, jobtype=inst_type, status=JOB_STATUS.PROCESSING) + progresses = reduce(lambda content, job: content + [url_prefix + "/" + job.jobid], jobs, []) + return progresses diff --git a/genericparser/pub/utils/restcall.py b/genericparser/pub/utils/restcall.py new file mode 100644 index 0000000..a8944b5 --- /dev/null +++ b/genericparser/pub/utils/restcall.py @@ -0,0 +1,113 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import traceback +import logging +import urllib2 +import uuid +import httplib2 + +from genericparser.pub.config.config import MSB_SERVICE_IP, MSB_SERVICE_PORT + +rest_no_auth, rest_oneway_auth, rest_bothway_auth = 0, 1, 2 +HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED = '200', '201', '204', '202' +status_ok_list = [HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED] +HTTP_404_NOTFOUND, HTTP_403_FORBIDDEN, HTTP_401_UNAUTHORIZED, HTTP_400_BADREQUEST = '404', '403', '401', '400' + +logger = logging.getLogger(__name__) + + +def call_req(base_url, user, passwd, auth_type, resource, method, content='', additional_headers={}): + callid = str(uuid.uuid1()) + logger.debug("[%s]call_req('%s','%s','%s',%s,'%s','%s','%s')" % ( + callid, base_url, user, passwd, auth_type, resource, method, content)) + ret = None + resp_status = '' + try: + full_url = combine_url(base_url, resource) + headers = {'content-type': 'application/json', 'accept': 'application/json'} + if user: + headers['Authorization'] = 'Basic ' + ('%s:%s' % (user, passwd)).encode("base64") + ca_certs = None + if additional_headers: + headers.update(additional_headers) + for retry_times in range(3): + http = httplib2.Http(ca_certs=ca_certs, disable_ssl_certificate_validation=(auth_type == rest_no_auth)) + http.follow_all_redirects = True + try: + resp, resp_content = http.request(full_url, method=method.upper(), body=content, headers=headers) + resp_status, resp_body = resp['status'], resp_content + logger.debug("[%s][%d]status=%s)" % (callid, retry_times, resp_status)) + if headers['accept'] == 'application/json': + resp_body = resp_content.decode('UTF-8') + logger.debug("resp_body=%s", resp_body) + if resp_status in status_ok_list: + ret = [0, resp_body, resp_status] + else: + ret = [1, resp_body, resp_status] + break + except Exception as ex: + if 'httplib.ResponseNotReady' in str(sys.exc_info()): + logger.debug("retry_times=%d", retry_times) + logger.error(traceback.format_exc()) + ret = [1, "Unable to connect to %s" % full_url, resp_status] + continue + raise ex + except urllib2.URLError as err: + ret = [2, str(err), resp_status] + except Exception as ex: + logger.error(traceback.format_exc()) + logger.error("[%s]ret=%s" % (callid, str(sys.exc_info()))) + res_info = str(sys.exc_info()) + if 'httplib.ResponseNotReady' in res_info: + res_info = "The URL[%s] request failed or is not responding." % full_url + ret = [3, res_info, resp_status] + except: + logger.error(traceback.format_exc()) + ret = [4, str(sys.exc_info()), resp_status] + + logger.debug("[%s]ret=%s" % (callid, str(ret))) + return ret + + +def req_by_msb(resource, method, content=''): + base_url = "http://%s:%s/" % (MSB_SERVICE_IP, MSB_SERVICE_PORT) + return call_req(base_url, "", "", rest_no_auth, resource, method, content) + + +def upload_by_msb(resource, method, file_data={}): + headers = {'Content-Type': 'application/octet-stream'} + full_url = "http://%s:%s/%s" % (MSB_SERVICE_IP, MSB_SERVICE_PORT, resource) + http = httplib2.Http() + resp, resp_content = http.request(full_url, method=method.upper(), body=file_data, headers=headers) + resp_status, resp_body = resp['status'], resp_content.decode('UTF-8') + if resp_status not in status_ok_list: + logger.error("Status code is %s, detail is %s.", resp_status, resp_body) + return [1, "Failed to upload file.", resp_status] + logger.debug("resp_body=%s", resp_body) + return [0, resp_body, resp_status] + + +def combine_url(base_url, resource): + full_url = None + if base_url.endswith('/') and resource.startswith('/'): + full_url = base_url[:-1] + resource + elif base_url.endswith('/') and not resource.startswith('/'): + full_url = base_url + resource + elif not base_url.endswith('/') and resource.startswith('/'): + full_url = base_url + resource + else: + full_url = base_url + '/' + resource + return full_url diff --git a/genericparser/pub/utils/syscomm.py b/genericparser/pub/utils/syscomm.py new file mode 100644 index 0000000..89219ec --- /dev/null +++ b/genericparser/pub/utils/syscomm.py @@ -0,0 +1,19 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect + + +def fun_name(): + return inspect.stack()[1][3] diff --git a/genericparser/pub/utils/tests.py b/genericparser/pub/utils/tests.py new file mode 100644 index 0000000..7f8a391 --- /dev/null +++ b/genericparser/pub/utils/tests.py @@ -0,0 +1,221 @@ +# Copyright 2018 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import platform +import unittest +import mock +import fileutil +import urllib2 +import syscomm +import timeutil +import values + +from genericparser.pub.database.models import JobStatusModel, JobModel +from genericparser.pub.utils.jobutil import JobUtil + + +class MockReq(): + def read(self): + return "1" + + def close(self): + pass + + +class UtilsTest(unittest.TestCase): + def setUp(self): + pass + + def tearDown(self): + pass + + def test_create_and_delete_dir(self): + dirs = "abc/def/hij" + fileutil.make_dirs(dirs) + fileutil.make_dirs(dirs) + fileutil.delete_dirs(dirs) + + @mock.patch.object(urllib2, 'urlopen') + def test_download_file_from_http(self, mock_urlopen): + mock_urlopen.return_value = MockReq() + fileutil.delete_dirs("abc") + is_ok, f_name = fileutil.download_file_from_http("1", "abc", "1.txt") + self.assertTrue(is_ok) + if 'Windows' in platform.system(): + self.assertTrue(f_name.endswith("abc\\1.txt")) + else: + self.assertTrue(f_name.endswith("abc/1.txt")) + fileutil.delete_dirs("abc") + + def test_query_job_status(self): + job_id = "1" + JobStatusModel.objects.filter().delete() + JobStatusModel( + indexid=1, + jobid=job_id, + status="success", + progress=10 + ).save() + JobStatusModel( + indexid=2, + jobid=job_id, + status="success", + progress=50 + ).save() + JobStatusModel( + indexid=3, + jobid=job_id, + status="success", + progress=100 + ).save() + jobs = JobUtil.query_job_status(job_id) + self.assertEqual(1, len(jobs)) + self.assertEqual(3, jobs[0].indexid) + jobs = JobUtil.query_job_status(job_id, 1) + self.assertEqual(2, len(jobs)) + self.assertEqual(3, jobs[0].indexid) + self.assertEqual(2, jobs[1].indexid) + JobStatusModel.objects.filter().delete() + + def test_is_job_exists(self): + job_id = "1" + JobModel.objects.filter().delete() + JobModel( + jobid=job_id, + jobtype="1", + jobaction="2", + resid="3", + status=0 + ).save() + self.assertTrue(JobUtil.is_job_exists(job_id)) + JobModel.objects.filter().delete() + + def test_create_job(self): + job_id = "5" + JobModel.objects.filter().delete() + JobUtil.create_job( + inst_type="1", + jobaction="2", + inst_id="3", + user="4", + job_id=5, + res_name="6") + self.assertEqual(1, len(JobModel.objects.filter(jobid=job_id))) + JobModel.objects.filter().delete() + + def test_clear_job(self): + job_id = "1" + JobModel.objects.filter().delete() + JobModel( + jobid=job_id, + jobtype="1", + jobaction="2", + resid="3", + status=0 + ).save() + JobUtil.clear_job(job_id) + self.assertEqual(0, len(JobModel.objects.filter(jobid=job_id))) + + def test_add_job_status_when_job_is_not_created(self): + JobModel.objects.filter().delete() + self.assertRaises( + Exception, + JobUtil.add_job_status, + job_id="1", + progress=1, + status_decs="2", + error_code="0" + ) + + def test_add_job_status_normal(self): + job_id = "1" + JobModel.objects.filter().delete() + JobStatusModel.objects.filter().delete() + JobModel( + jobid=job_id, + jobtype="1", + jobaction="2", + resid="3", + status=0 + ).save() + JobUtil.add_job_status( + job_id="1", + progress=1, + status_decs="2", + error_code="0" + ) + self.assertEqual(1, len(JobStatusModel.objects.filter(jobid=job_id))) + JobStatusModel.objects.filter().delete() + JobModel.objects.filter().delete() + + def test_clear_job_status(self): + job_id = "1" + JobStatusModel.objects.filter().delete() + JobStatusModel( + indexid=1, + jobid=job_id, + status="success", + progress=10 + ).save() + JobUtil.clear_job_status(job_id) + self.assertEqual(0, len(JobStatusModel.objects.filter(jobid=job_id))) + + def test_get_unfinished_jobs(self): + JobModel.objects.filter().delete() + JobModel( + jobid="11", + jobtype="InstVnf", + jobaction="2", + resid="3", + status=0 + ).save() + JobModel( + jobid="22", + jobtype="InstVnf", + jobaction="2", + resid="3", + status=0 + ).save() + JobModel( + jobid="33", + jobtype="InstVnf", + jobaction="2", + resid="3", + status=0 + ).save() + progresses = JobUtil.get_unfinished_jobs( + url_prefix="/vnfinst", + inst_id="3", + inst_type="InstVnf" + ) + expect_progresses = ['/vnfinst/11', '/vnfinst/22', '/vnfinst/33'] + self.assertEqual(expect_progresses, progresses) + JobModel.objects.filter().delete() + + def test_fun_name(self): + self.assertEqual("test_fun_name", syscomm.fun_name()) + + def test_now_time(self): + self.assertIn(":", timeutil.now_time()) + self.assertIn("-", timeutil.now_time()) + + def test_ignore_case_get(self): + data = { + "Abc": "def", + "HIG": "klm" + } + self.assertEqual("def", values.ignore_case_get(data, 'ABC')) + self.assertEqual("def", values.ignore_case_get(data, 'abc')) + self.assertEqual("klm", values.ignore_case_get(data, 'hig')) + self.assertEqual("bbb", values.ignore_case_get(data, 'aaa', 'bbb')) diff --git a/genericparser/pub/utils/timeutil.py b/genericparser/pub/utils/timeutil.py new file mode 100644 index 0000000..1d97e9d --- /dev/null +++ b/genericparser/pub/utils/timeutil.py @@ -0,0 +1,19 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + + +def now_time(fmt="%Y-%m-%d %H:%M:%S"): + return datetime.datetime.now().strftime(fmt) diff --git a/genericparser/pub/utils/toscaparsers/__init__.py b/genericparser/pub/utils/toscaparsers/__init__.py new file mode 100644 index 0000000..4b73f48 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/__init__.py @@ -0,0 +1,54 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from genericparser.pub.utils.toscaparsers.nsdmodel import NsdInfoModel +from genericparser.pub.utils.toscaparsers.pnfmodel import PnfdInfoModel +from genericparser.pub.utils.toscaparsers.sdmodel import SdInfoModel +from genericparser.pub.utils.toscaparsers.vnfdmodel import EtsiVnfdInfoModel + + +def parse_nsd(path, input_parameters=[]): + tosca_obj = NsdInfoModel(path, input_parameters).model + strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__) + strResponse = strResponse.replace(': null', ': ""') + return strResponse + + +def parse_sd(path, input_parameters=[]): + tosca_obj = SdInfoModel(path, input_parameters) + strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__) + strResponse = strResponse.replace(': null', ': ""') + return strResponse + + +def parse_vnfd(path, input_parameters=[], isETSI=True): + if isETSI: + tosca_obj = EtsiVnfdInfoModel(path, input_parameters) + else: + tosca_obj = {} + strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__) + strResponse = strResponse.replace(': null', ': ""') + return strResponse + + +def parse_pnfd(path, input_parameters=[], isETSI=True): + if isETSI: + tosca_obj = PnfdInfoModel(path, input_parameters) + else: + tosca_obj = {} + strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__) + strResponse = strResponse.replace(': null', ': ""') + return strResponse diff --git a/genericparser/pub/utils/toscaparsers/basemodel.py b/genericparser/pub/utils/toscaparsers/basemodel.py new file mode 100644 index 0000000..a5c1f45 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/basemodel.py @@ -0,0 +1,524 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import ftplib +import json +import logging +import os +import re +import shutil +import urllib + +import paramiko +from toscaparser.tosca_template import ToscaTemplate +from toscaparser.properties import Property +from toscaparser.functions import Function, Concat, GetInput, get_function, function_mappings +from genericparser.pub.utils.toscaparsers.graph import Graph + +from genericparser.pub.utils.toscaparsers.dataentityext import DataEntityExt + +logger = logging.getLogger(__name__) + +METADATA = "metadata" +PROPERTIES = "properties" +DESCRIPTION = "description" +REQUIREMENTS = "requirements" +INTERFACES = "interfaces" +TOPOLOGY_TEMPLATE = "topology_template" +INPUTS = "inputs" +CAPABILITIES = "capabilities" +ATTRIBUTES = "attributes" +ARTIFACTS = "artifacts" +DERIVED_FROM = "derived_from" + +NODE_NAME = "name" +NODE_TYPE = "nodeType" +NODE_ROOT = "tosca.nodes.Root" +GROUP_TYPE = "groupType" +GROUPS_ROOT = "tosca.groups.Root" + + +class BaseInfoModel(object): + + def __init__(self, path=None, params=None, tosca=None): + if tosca: + _tosca = tosca + else: + _tosca = self.buildToscaTemplate(path, params) + self.description = getattr(_tosca, "description", "") + self.parseModel(_tosca) + + def parseModel(self, tosca): + pass + + def buildInputs(self, tosca): + topo = tosca.tpl.get(TOPOLOGY_TEMPLATE, None) + return topo.get(INPUTS, {}) if topo else {} + + def buildToscaTemplate(self, path, params): + file_name = None + try: + file_name = self._check_download_file(path) + valid_params = self._validate_input_params(file_name, params) + return self._create_tosca_template(file_name, valid_params) + finally: + if file_name is not None and file_name != path and os.path.exists(file_name): + try: + os.remove(file_name) + except Exception as e: + logger.error("Failed to parse package, error: %s", e.message) + + def _validate_input_params(self, path, params): + valid_params = {} + inputs = {} + if isinstance(params, list): + for param in params: + key = param.get('key', 'undefined') + value = param.get('value', 'undefined') + inputs[key] = value + params = inputs + + if params: + tmp = self._create_tosca_template(path, None) + if isinstance(params, dict): + for key, value in params.items(): + if hasattr(tmp, 'inputs') and len(tmp.inputs) > 0: + for input_def in tmp.inputs: + if (input_def.name == key): + valid_params[key] = DataEntityExt.validate_datatype(input_def.type, value) + return valid_params + + def _create_tosca_template(self, file_name, valid_params): + tosca_tpl = None + try: + tosca_tpl = ToscaTemplate(path=file_name, + parsed_params=valid_params, + no_required_paras_check=True, + debug_mode=True) + except Exception as e: + print e.message + finally: + if tosca_tpl is not None and hasattr(tosca_tpl, "temp_dir") and os.path.exists(tosca_tpl.temp_dir): + try: + shutil.rmtree(tosca_tpl.temp_dir) + except Exception as e: + logger.error("Failed to create tosca template, error: %s", e.message) + print "-----------------------------" + print '\n'.join(['%s:%s' % item for item in tosca_tpl.__dict__.items()]) + print "-----------------------------" + return tosca_tpl + + def _check_download_file(self, path): + if (path.startswith("ftp") or path.startswith("sftp")): + return self.downloadFileFromFtpServer(path) + elif (path.startswith("http")): + return self.download_file_from_httpserver(path) + return path + + def download_file_from_httpserver(self, path): + path = path.encode("utf-8") + tmps = str.split(path, '/') + localFileName = tmps[len(tmps) - 1] + urllib.urlretrieve(path, localFileName) + return localFileName + + def downloadFileFromFtpServer(self, path): + path = path.encode("utf-8") + tmp = str.split(path, '://') + protocol = tmp[0] + tmp = str.split(tmp[1], ':') + if len(tmp) == 2: + userName = tmp[0] + tmp = str.split(tmp[1], '@') + userPwd = tmp[0] + index = tmp[1].index('/') + hostIp = tmp[1][0:index] + remoteFileName = tmp[1][index:len(tmp[1])] + if protocol.lower() == 'ftp': + hostPort = 21 + else: + hostPort = 22 + + if len(tmp) == 3: + userName = tmp[0] + userPwd = str.split(tmp[1], '@')[0] + hostIp = str.split(tmp[1], '@')[1] + index = tmp[2].index('/') + hostPort = tmp[2][0:index] + remoteFileName = tmp[2][index:len(tmp[2])] + + localFileName = str.split(remoteFileName, '/') + localFileName = localFileName[len(localFileName) - 1] + + if protocol.lower() == 'sftp': + self.sftp_get(userName, userPwd, hostIp, hostPort, remoteFileName, localFileName) + else: + self.ftp_get(userName, userPwd, hostIp, hostPort, remoteFileName, localFileName) + return localFileName + + def sftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName): + # return + t = None + try: + t = paramiko.Transport(hostIp, int(hostPort)) + t.connect(username=userName, password=userPwd) + sftp = paramiko.SFTPClient.from_transport(t) + sftp.get(remoteFileName, localFileName) + finally: + if t is not None: + t.close() + + def ftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName): + f = None + try: + ftp = ftplib.FTP() + ftp.connect(hostIp, hostPort) + ftp.login(userName, userPwd) + f = open(localFileName, 'wb') + ftp.retrbinary('RETR ' + remoteFileName, f.write, 1024) + f.close() + finally: + if f is not None: + f.close() + + def buildMetadata(self, tosca): + return tosca.tpl.get(METADATA, {}) if tosca else {} + + def buildNode(self, nodeTemplate, tosca): + inputs = tosca.inputs + parsed_params = tosca.parsed_params + ret = {} + ret[NODE_NAME] = nodeTemplate.name + ret[NODE_TYPE] = nodeTemplate.type + if DESCRIPTION in nodeTemplate.entity_tpl: + ret[DESCRIPTION] = nodeTemplate.entity_tpl[DESCRIPTION] + else: + ret[DESCRIPTION] = '' + if METADATA in nodeTemplate.entity_tpl: + ret[METADATA] = nodeTemplate.entity_tpl[METADATA] + else: + ret[METADATA] = '' + props = self.buildProperties_ex(nodeTemplate, tosca.topology_template) + ret[PROPERTIES] = self.verify_properties(props, inputs, parsed_params) + ret[REQUIREMENTS] = self.build_requirements(nodeTemplate) + self.buildCapabilities(nodeTemplate, inputs, ret) + self.buildArtifacts(nodeTemplate, inputs, ret) + interfaces = self.build_interfaces(nodeTemplate) + if interfaces: + ret[INTERFACES] = interfaces + return ret + + def buildProperties(self, nodeTemplate, parsed_params): + properties = {} + isMappingParams = parsed_params and len(parsed_params) > 0 + for k, item in nodeTemplate.get_properties().items(): + properties[k] = item.value + if isinstance(item.value, GetInput): + if item.value.result() and isMappingParams: + properties[k] = DataEntityExt.validate_datatype(item.type, item.value.result()) + else: + tmp = {} + tmp[item.value.name] = item.value.input_name + properties[k] = tmp + if ATTRIBUTES in nodeTemplate.entity_tpl: + for k, item in nodeTemplate.entity_tpl[ATTRIBUTES].items(): + properties[k] = str(item) + return properties + + def buildProperties_ex(self, nodeTemplate, topology_template, properties=None): + if properties is None: + properties = nodeTemplate.get_properties() + _properties = {} + if isinstance(properties, dict): + for name, prop in properties.items(): + if isinstance(prop, Property): + if isinstance(prop.value, Function): + if isinstance(prop.value, Concat): # support one layer inner function. + value_str = '' + for arg in prop.value.args: + if isinstance(arg, str): + value_str += arg + elif isinstance(arg, dict): + raw_func = {} + for k, v in arg.items(): + func_args = [] + func_args.append(v) + raw_func[k] = func_args + func = get_function(topology_template, nodeTemplate, raw_func) + value_str += str(func.result()) + _properties[name] = value_str + else: + _properties[name] = prop.value.result() + elif isinstance(prop.value, dict) or isinstance(prop.value, list): + _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop.value) + elif prop.type == 'string': + _properties[name] = prop.value + else: + _properties[name] = json.dumps(prop.value) + elif isinstance(prop, dict): + _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop) + elif isinstance(prop, list): + _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop) + elif name in function_mappings: + raw_func = {} + func_args = [] + func_args.append(prop) + raw_func[name] = func_args + if name == 'CONCAT': + value_str = '' + for arg in prop: + if isinstance(arg, str): + value_str += arg + elif isinstance(arg, dict): + raw_func = {} + for k, v in arg.items(): + func_args = [] + func_args.append(v) + raw_func[k] = func_args + value_str += str( + get_function(topology_template, nodeTemplate, raw_func).result()) + value = value_str + else: + return get_function(topology_template, nodeTemplate, raw_func).result() + else: + _properties[name] = prop + elif isinstance(properties, list): + value = [] + for para in properties: + if isinstance(para, dict) or isinstance(para, list): + value.append(self.buildProperties_ex(nodeTemplate, topology_template, para)) + else: + value.append(para) + return value + return _properties + + def verify_properties(self, props, inputs, parsed_params): + ret_props = {} + if (props and len(props) > 0): + for key, value in props.items(): + ret_props[key] = self._verify_value(value, inputs, parsed_params) + # if isinstance(value, str): + # ret_props[key] = self._verify_string(inputs, parsed_params, value); + # continue + # if isinstance(value, list): + # ret_props[key] = map(lambda x: self._verify_dict(inputs, parsed_params, x), value) + # continue + # if isinstance(value, dict): + # ret_props[key] = self._verify_map(inputs, parsed_params, value) + # continue + # ret_props[key] = value + return ret_props + + def build_requirements(self, node_template): + rets = [] + for req in node_template.requirements: + for req_name, req_value in req.items(): + if (isinstance(req_value, dict)): + if ('node' in req_value and req_value['node'] not in node_template.templates): + continue # No target requirement for aria parser, not add to result. + rets.append({req_name: req_value}) + return rets + + def buildCapabilities(self, nodeTemplate, inputs, ret): + capabilities = json.dumps(nodeTemplate.entity_tpl.get(CAPABILITIES, None)) + match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', capabilities) + for m in match: + aa = [input_def for input_def in inputs if m == input_def.name][0] + capabilities = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), capabilities, 1) + if capabilities != 'null': + ret[CAPABILITIES] = json.loads(capabilities) + + def buildArtifacts(self, nodeTemplate, inputs, ret): + artifacts = json.dumps(nodeTemplate.entity_tpl.get('artifacts', None)) + match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', artifacts) + for m in match: + aa = [input_def for input_def in inputs if m == input_def.name][0] + artifacts = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), artifacts, 1) + if artifacts != 'null': + ret[ARTIFACTS] = json.loads(artifacts) + + def build_interfaces(self, node_template): + if INTERFACES in node_template.entity_tpl: + return node_template.entity_tpl[INTERFACES] + return None + + def isNodeTypeX(self, node, nodeTypes, x): + node_type = node[NODE_TYPE] + while node_type != x: + node_type_derived = node_type + node_type = nodeTypes[node_type][DERIVED_FROM] + if node_type == NODE_ROOT or node_type == node_type_derived: + return False + return True + + def get_requirement_node_name(self, req_value): + return self.get_prop_from_obj(req_value, 'node') + + def getRequirementByNodeName(self, nodeTemplates, storage_name, prop): + for node in nodeTemplates: + if node[NODE_NAME] == storage_name: + if prop in node: + return node[prop] + + def get_prop_from_obj(self, obj, prop): + if isinstance(obj, str): + return obj + if (isinstance(obj, dict) and prop in obj): + return obj[prop] + return None + + def getNodeDependencys(self, node): + return self.getRequirementByName(node, 'dependency') + + def getRequirementByName(self, node, requirementName): + requirements = [] + if REQUIREMENTS in node: + for item in node[REQUIREMENTS]: + for key, value in item.items(): + if key == requirementName: + requirements.append(value) + return requirements + + def _verify_value(self, value, inputs, parsed_params): + if value == '{}': + return '' + if isinstance(value, str): + return self._verify_string(inputs, parsed_params, value) + if isinstance(value, list) or isinstance(value, dict): + return self._verify_object(value, inputs, parsed_params) + return value + + def _verify_object(self, value, inputs, parsed_params): + s = self._verify_string(inputs, parsed_params, json.dumps(value)) + return json.loads(s) + + def _get_input_name(self, getInput): + input_name = getInput.split(':')[1] + input_name = input_name.strip() + return input_name.replace('"', '').replace('}', '') + + def _verify_string(self, inputs, parsed_params, value): + getInputs = re.findall(r'{"get_input": "[a-zA-Z_0-9]+"}', value) + for getInput in getInputs: + input_name = self._get_input_name(getInput) + if parsed_params and input_name in parsed_params: + value = value.replace(getInput, json.dumps(parsed_params[input_name])) + else: + for input_def in inputs: + if input_def.default and input_name == input_def.name: + value = value.replace(getInput, json.dumps(input_def.default)) + return value + + def get_node_by_name(self, node_templates, name): + for node in node_templates: + if node[NODE_NAME] == name: + return node + return None + + def getCapabilityByName(self, node, capabilityName): + if CAPABILITIES in node and capabilityName in node[CAPABILITIES]: + return node[CAPABILITIES][capabilityName] + return None + + def get_base_path(self, tosca): + fpath, fname = os.path.split(tosca.path) + return fpath + + def build_artifacts(self, node): + rets = [] + if ARTIFACTS in node and len(node[ARTIFACTS]) > 0: + artifacts = node[ARTIFACTS] + for name, value in artifacts.items(): + ret = {} + ret['artifact_name'] = name + ret['file'] = value + if isinstance(value, dict): + ret.update(value) + rets.append(ret) + else: + # TODO It is workaround for SDC-1900. + logger.error("VCPE specific code") + ret = {} + ret['artifact_name'] = "sw_image" + ret['file'] = "ubuntu_16.04" + ret['type'] = "tosca.artifacts.nfv.SwImage" + rets.append(ret) + + return rets + + def get_node_by_req(self, node_templates, req): + req_node_name = self.get_requirement_node_name(req) + return self.get_node_by_name(node_templates, req_node_name) + + def isGroupTypeX(self, group, groupTypes, x): + group_type = group[GROUP_TYPE] + while group_type != x: + group_type_derived = group_type + group_type = groupTypes[group_type][DERIVED_FROM] + if group_type == GROUPS_ROOT or group_type == group_type_derived: + return False + return True + + def setTargetValues(self, dict_target, target_keys, dict_source, source_keys): + i = 0 + for item in source_keys: + dict_target[target_keys[i]] = dict_source.get(item, "") + i += 1 + return dict_target + + def get_deploy_graph(self, tosca, relations): + nodes = tosca.graph.nodetemplates + graph = Graph() + for node in nodes: + self._build_deploy_path(node, [], graph, relations) + return graph.to_dict() + + def _build_deploy_path(self, node, node_parent, graph, relations): + graph.add_node(node.name, node_parent) + type_require_set = {} + type_requires = node.type_definition.requirements + for type_require in type_requires: + type_require_set.update(type_require) + for requirement in node.requirements: + for k in requirement.keys(): + if type_require_set[k].get('relationship', None) in relations[0] or type_require_set[k].get('capability', None) in relations[0]: + if isinstance(requirement[k], dict): + next_node = requirement[k].get('node', None) + else: + next_node = requirement[k] + graph.add_node(next_node, [node.name]) + if type_require_set[k].get('relationship', None) in relations[1]: + if isinstance(requirement[k], dict): + next_node = requirement[k].get('node', None) + else: + next_node = requirement[k] + graph.add_node(next_node, [node.name]) + + def get_substitution_mappings(self, tosca): + node = { + 'properties': {}, + 'requirements': {}, + 'capabilities': {}, + 'metadata': {} + } + metadata = None + substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None) + if substitution_mappings: + node['type'] = substitution_mappings['node_type'] + node['properties'] = substitution_mappings.get('properties', {}) + node['requirements'] = substitution_mappings.get('requirements', {}) + node['capabilities'] = substitution_mappings.get('capabilities', {}) + metadata = substitution_mappings.get('metadata', {}) + node['metadata'] = metadata if metadata and metadata != {} else self.buildMetadata(tosca) + return node diff --git a/genericparser/pub/utils/toscaparsers/const.py b/genericparser/pub/utils/toscaparsers/const.py new file mode 100644 index 0000000..9c61c48 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/const.py @@ -0,0 +1,30 @@ +# Copyright 2018 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +NS_METADATA_SECTIONS = (NS_UUID, NS_INVARIANTUUID, NS_NAME, NS_VERSION, NS_DESIGNER, NSD_RELEASE_DATE) =\ + ("nsd_id", "nsd_invariant_id", "nsd_name", "nsd_file_structure_version", "nsd_designer", "nsd_release_date_time") +# ("id", "invariant_id", "name", "version", "designer", "description") + +SDC_SERVICE_METADATA_SECTIONS = (SRV_UUID, SRV_INVARIANTUUID, SRV_NAME) = ('UUID', 'invariantUUID', 'name') + +PNF_METADATA_SECTIONS = (PNF_UUID, PNF_INVARIANTUUID, PNF_NAME, PNF_METADATA_DESCRIPTION, PNF_VERSION, PNF_PROVIDER) = \ + ("descriptor_id", "descriptor_invariant_id", "name", "description", "version", "provider") +PNF_SECTIONS = (PNF_ID, PNF_METADATA, PNF_PROPERTIES, PNF_DESCRIPTION) = \ + ("pnf_id", "metadata", "properties", "description") + +VNF_SECTIONS = (VNF_ID, VNF_METADATA, VNF_PROPERTIES, VNF_DESCRIPTION) = \ + ("vnf_id", "metadata", "properties", "description") + +VL_SECTIONS = (VL_ID, VL_METADATA, VL_PROPERTIES, VL_DESCRIPTION) = \ + ("vl_id", "metadata", "properties", "description") diff --git a/genericparser/pub/utils/toscaparsers/dataentityext.py b/genericparser/pub/utils/toscaparsers/dataentityext.py new file mode 100644 index 0000000..825e93b --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/dataentityext.py @@ -0,0 +1,33 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from toscaparser.dataentity import DataEntity +from toscaparser.elements.constraints import Schema +from toscaparser.common.exception import ExceptionCollector + + +class DataEntityExt(object): + '''A complex data value entity ext.''' + @staticmethod + def validate_datatype(type, value, entry_schema=None, custom_def=None): + if value: + if (type == Schema.STRING): + return str(value) + elif type == Schema.FLOAT: + try: + return float(value) + except Exception: + ExceptionCollector.appendException(ValueError(('"%s" is not an float.') % value)) + return DataEntity.validate_datatype(type, value, entry_schema, custom_def) + return value diff --git a/genericparser/pub/utils/toscaparsers/graph.py b/genericparser/pub/utils/toscaparsers/graph.py new file mode 100644 index 0000000..6d38d12 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/graph.py @@ -0,0 +1,74 @@ +# Copyright 2018 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from collections import OrderedDict + + +class Graph(object): + + def __init__(self, graph_dict=None): + self.graph = OrderedDict() + if graph_dict: + for node, dep_nodes in graph_dict.iteritems(): + self.add_node(node, dep_nodes) + + def add_node(self, node, dep_nodes): + if node not in self.graph: + self.graph[node] = set() + if isinstance(dep_nodes, list): + for dep_node in dep_nodes: + if dep_node not in self.graph: + self.graph[dep_node] = set() + if dep_node not in self.graph[node]: + self.graph[node].add(dep_node) + + def get_pre_nodes(self, node): + return [k for k in self.graph if node in self.graph[k]] + + def topo_sort(self): + degree = {} + for node in self.graph: + degree[node] = 0 + + for node in self.graph: + for dependent in self.graph[node]: + degree[dependent] += 1 + + queue = deque() + for node in degree: + if degree[node] == 0: + queue.appendleft(node) + + sort_list = [] + while queue: + node = queue.pop() + sort_list.append(node) + for dependent in self.graph[node]: + degree[dependent] -= 1 + if degree[dependent] == 0: + queue.appendleft(dependent) + + if len(sort_list) == len(self.graph): + return sort_list + else: + return None + + def to_dict(self): + dict = {} + for node, dependents in self.graph.iteritems(): + dict[node] = [] + for dep in dependents: + dict[node].append(dep) + return dict diff --git a/genericparser/pub/utils/toscaparsers/nsdmodel.py b/genericparser/pub/utils/toscaparsers/nsdmodel.py new file mode 100644 index 0000000..fe522a7 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/nsdmodel.py @@ -0,0 +1,220 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +import logging +from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel +from genericparser.pub.utils.toscaparsers.const import SDC_SERVICE_METADATA_SECTIONS +from genericparser.pub.utils.toscaparsers.servicemodel import SdcServiceModel + +logger = logging.getLogger(__name__) + +SECTIONS = (NS_TYPE, NS_VNF_TYPE, NS_VL_TYPE, NS_PNF_TYPE, NS_NFP_TYPE, NS_VNFFG_TYPE) = \ + ('tosca.nodes.nfv.NS', + 'tosca.nodes.nfv.VNF', + 'tosca.nodes.nfv.NsVirtualLink', + 'tosca.nodes.nfv.PNF', + 'tosca.nodes.nfv.NFP', + 'tosca.nodes.nfv.VNFFG') + +NFV_NS_RELATIONSHIPS = [["tosca.relationships.nfv.VirtualLinksTo", "tosca.relationships.DependsOn"], []] + + +class NsdInfoModel(BaseInfoModel): + def __init__(self, path, params): + super(NsdInfoModel, self).__init__(path, params) + + def parseModel(self, tosca): + metadata = self.buildMetadata(tosca) + self.model = {} + if self._is_etsi(metadata): + self.model = EtsiNsdInfoModel(tosca) + elif self._is_ecomp(metadata): + self.model = SdcServiceModel(tosca) + + def _is_etsi(self, metadata): + NS_METADATA_MUST = ["nsd_invariant_id", "nsd_name", "nsd_file_structure_version", "nsd_designer", "nsd_release_date_time"] + return True if len([1 for key in NS_METADATA_MUST if key in metadata]) == len(NS_METADATA_MUST) else False + + def _is_ecomp(self, metadata): + return True if len([1 for key in SDC_SERVICE_METADATA_SECTIONS if key in metadata]) == len(SDC_SERVICE_METADATA_SECTIONS) else False + + +class EtsiNsdInfoModel(BaseInfoModel): + + def __init__(self, tosca): + super(EtsiNsdInfoModel, self).__init__(tosca=tosca) + + def parseModel(self, tosca): + self.metadata = self.buildMetadata(tosca) + self.ns = self._build_ns(tosca) + self.inputs = self.buildInputs(tosca) + nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates) + types = tosca.topology_template.custom_defs + self.basepath = self.get_base_path(tosca) + self.vnfs = self._get_all_vnf(nodeTemplates, types) + self.pnfs = self._get_all_pnf(nodeTemplates, types) + self.vls = self._get_all_vl(nodeTemplates, types) + self.fps = self._get_all_fp(nodeTemplates, types) + self.vnffgs = self._get_all_vnffg(tosca.topology_template.groups, types) + self.ns_exposed = self._get_all_endpoint_exposed(tosca.topology_template) + self.nested_ns = self._get_all_nested_ns(nodeTemplates, types) + self.graph = self.get_deploy_graph(tosca, NFV_NS_RELATIONSHIPS) + + def _get_all_vnf(self, nodeTemplates, node_types): + vnfs = [] + for node in nodeTemplates: + if self.isNodeTypeX(node, node_types, NS_VNF_TYPE): + vnf = {} + vnf['vnf_id'] = node['name'] + vnf['description'] = node['description'] + vnf['properties'] = node['properties'] + if not vnf['properties'].get('id', None): + vnf['properties']['id'] = vnf['properties'].get('descriptor_id', None) + vnf['dependencies'] = self._get_networks(node, node_types) + vnf['networks'] = self._get_networks(node, node_types) + vnfs.append(vnf) + return vnfs + + def _get_all_pnf(self, nodeTemplates, node_types): + pnfs = [] + for node in nodeTemplates: + if self.isNodeTypeX(node, node_types, NS_PNF_TYPE): + pnf = {} + pnf['pnf_id'] = node['name'] + pnf['description'] = node['description'] + pnf['properties'] = node['properties'] + pnf['networks'] = self._get_networks(node, node_types) + pnfs.append(pnf) + return pnfs + + def _get_all_vl(self, nodeTemplates, node_types): + vls = [] + for node in nodeTemplates: + if self.isNodeTypeX(node, node_types, NS_VL_TYPE): + vl = dict() + vl['vl_id'] = node['name'] + vl['description'] = node['description'] + vl['properties'] = node['properties'] + vls.append(vl) + return vls + + def _get_all_fp(self, nodeTemplates, node_types): + fps = [] + for node in nodeTemplates: + if self.isNodeTypeX(node, node_types, NS_NFP_TYPE): + fp = {} + fp['fp_id'] = node['name'] + fp['description'] = node['description'] + fp['properties'] = node['properties'] + fp['forwarder_list'] = self._getForwarderList(node, nodeTemplates, node_types) + fps.append(fp) + return fps + + def _getForwarderList(self, node, node_templates, node_types): + forwarderList = [] + if 'requirements' in node: + for item in node['requirements']: + for key, value in item.items(): + if key == 'forwarder': + tmpnode = self.get_node_by_req(node_templates, value) + type = 'pnf' if self.isNodeTypeX(tmpnode, node_types, NS_PNF_TYPE) else 'vnf' + req_node_name = self.get_requirement_node_name(value) + if isinstance(value, dict) and 'capability' in value: + forwarderList.append( + {"type": type, "node_name": req_node_name, "capability": value['capability']}) + else: + forwarderList.append({"type": type, "node_name": req_node_name, "capability": ""}) + return forwarderList + + def _get_all_vnffg(self, groups, group_types): + vnffgs = [] + for group in groups: + if self.isGroupTypeX(group, group_types, NS_VNFFG_TYPE): + vnffg = {} + vnffg['vnffg_id'] = group.name + vnffg['description'] = group.description + if 'properties' in group.tpl: + vnffg['properties'] = group.tpl['properties'] + vnffg['members'] = group.members + vnffgs.append(vnffg) + return vnffgs + + def _get_all_endpoint_exposed(self, topo_tpl): + if 'substitution_mappings' in topo_tpl.tpl: + external_cps = self._get_external_cps(topo_tpl.tpl['substitution_mappings']) + forward_cps = self._get_forward_cps(topo_tpl.tpl['substitution_mappings']) + return {"external_cps": external_cps, "forward_cps": forward_cps} + return {} + + def _get_external_cps(self, subs_mappings): + external_cps = [] + if 'requirements' in subs_mappings: + for key, value in subs_mappings['requirements'].items(): + if isinstance(value, list) and len(value) > 0: + external_cps.append({"key_name": key, "cpd_id": value[0]}) + else: + external_cps.append({"key_name": key, "cpd_id": value}) + return external_cps + + def _get_forward_cps(self, subs_mappings): + forward_cps = [] + if 'capabilities' in subs_mappings: + for key, value in subs_mappings['capabilities'].items(): + if isinstance(value, list) and len(value) > 0: + forward_cps.append({"key_name": key, "cpd_id": value[0]}) + else: + forward_cps.append({"key_name": key, "cpd_id": value}) + return forward_cps + + def _get_all_nested_ns(self, nodes, node_types): + nss = [] + for node in nodes: + if self.isNodeTypeX(node, node_types, NS_TYPE): + ns = {} + ns['ns_id'] = node['name'] + ns['description'] = node['description'] + ns['properties'] = node['properties'] + ns['networks'] = self._get_networks(node, node_types) + nss.append(ns) + return nss + + def _get_networks(self, node, node_types): + rets = [] + if 'requirements' in node and (self.isNodeTypeX(node, node_types, NS_TYPE) or self.isNodeTypeX(node, node_types, NS_VNF_TYPE)): + for item in node['requirements']: + for key, value in item.items(): + rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)}) + return rets + + def _build_ns(self, tosca): + ns = self.get_substitution_mappings(tosca) + properties = ns.get("properties", {}) + metadata = ns.get("metadata", {}) + if properties.get("descriptor_id", "") == "": + descriptor_id = metadata.get("nsd_id", "") + properties["descriptor_id"] = descriptor_id + if properties.get("verison", "") == "": + version = metadata.get("nsd_file_structure_version", "") + properties["verison"] = version + if properties.get("designer", "") == "": + author = metadata.get("nsd_designer", "") + properties["designer"] = author + if properties.get("name", "") == "": + template_name = metadata.get("nsd_name", "") + properties["name"] = template_name + if properties.get("invariant_id", "") == "": + nsd_invariant_id = metadata.get("nsd_invariant_id", "") + properties["invariant_id"] = nsd_invariant_id + return ns diff --git a/genericparser/pub/utils/toscaparsers/pnfmodel.py b/genericparser/pub/utils/toscaparsers/pnfmodel.py new file mode 100644 index 0000000..0f5445f --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/pnfmodel.py @@ -0,0 +1,53 @@ +# Copyright 2018 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +import logging +from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel +logger = logging.getLogger(__name__) + + +class PnfdInfoModel(BaseInfoModel): + + def __init__(self, path, params): + super(PnfdInfoModel, self).__init__(path, params) + + def parseModel(self, tosca): + self.metadata = self.buildMetadata(tosca) + self.inputs = self.buildInputs(tosca) + nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), + tosca.nodetemplates) + self.basepath = self.get_base_path(tosca) + self.pnf = {} + self.get_substitution_mappings(tosca) + self.get_all_cp(nodeTemplates) + + def get_substitution_mappings(self, tosca): + pnf_substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None) + if pnf_substitution_mappings: + self.pnf['type'] = pnf_substitution_mappings['node_type'] + self.pnf['properties'] = pnf_substitution_mappings['properties'] + + def get_all_cp(self, nodeTemplates): + self.pnf['ExtPorts'] = [] + for node in nodeTemplates: + if self.isPnfExtPort(node): + cp = {} + cp['id'] = node['name'] + cp['type'] = node['nodeType'] + cp['properties'] = node['properties'] + self.pnf['ExtPorts'].append(cp) + + def isPnfExtPort(self, node): + return node['nodeType'].find('tosca.nodes.nfv.PnfExtPort') >= 0 diff --git a/genericparser/pub/utils/toscaparsers/sdmodel.py b/genericparser/pub/utils/toscaparsers/sdmodel.py new file mode 100644 index 0000000..8cca07e --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/sdmodel.py @@ -0,0 +1,93 @@ +# Copyright (c) 2019, CMCC Technologies. Co., Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel +from genericparser.pub.utils.toscaparsers.servicemodel import SdcServiceModel + +logger = logging.getLogger(__name__) + + +class SdInfoModel(BaseInfoModel): + def __init__(self, path, params): + super(SdInfoModel, self).__init__(path, params) + + def parseModel(self, tosca): + self.metadata = self.buildMetadata(tosca) + self.inputs = self.build_inputs(tosca) + + sdcModle = SdcServiceModel(tosca) + if sdcModle: + self.service = sdcModle.ns + if hasattr(tosca, 'nodetemplates'): + self.basepath = sdcModle.basepath + self.vnfs = sdcModle.vnfs + self.pnfs = sdcModle.pnfs + self.vls = sdcModle.vls + self.graph = sdcModle.graph + + def build_inputs(self, tosca): + """ Get all the inputs for complex type""" + result_inputs = {} + + if not tosca.inputs: + return {} + + for input in tosca.inputs: + type = input.schema.type + if type.__eq__('list') or type.__eq__('map'): + complex_input = [] + entry_schema = self.get_entry_schema(input.schema.schema['entry_schema']) + self.get_child_input_repeat(complex_input, entry_schema, input) + result_inputs[input.schema.name] = complex_input + + else: + simple_input = { + "type": input.schema.type, + "description": input.schema.description, + "required": input.schema.required, + } + result_inputs[input.schema.name] = simple_input + return result_inputs + + def get_child_input_repeat(self, complex_input, entry_schema, input): + custom_defs = input.custom_defs + properties = custom_defs[entry_schema]['properties'] + for key, value in properties.iteritems(): + if value['type'].__eq__('list'): + child_complex_input = [] + child_entry_schema = self.get_entry_schema(value['entry_schema']) + self.get_child_input_repeat(child_complex_input, child_entry_schema, input) + complex_input.append({key: child_complex_input}) + else: + if 'description' in value.keys(): + simple_input = { + key: "", + "type": value['type'], + "required": value['required'], + "description": value['description'], + } + else: + simple_input = { + key: "", + "type": value['type'], + "required": value['required'], + } + complex_input.append(simple_input) + + def get_entry_schema(self, entry_schema): + if isinstance(entry_schema, dict): + if 'type' in entry_schema.keys(): + entry_schema = entry_schema['type'] + return entry_schema diff --git a/genericparser/pub/utils/toscaparsers/servicemodel.py b/genericparser/pub/utils/toscaparsers/servicemodel.py new file mode 100644 index 0000000..069d402 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/servicemodel.py @@ -0,0 +1,188 @@ +# Copyright 2018 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +import logging +from genericparser.pub.utils.toscaparsers.const import NS_METADATA_SECTIONS, PNF_METADATA_SECTIONS, VNF_SECTIONS, PNF_SECTIONS, VL_SECTIONS +from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel + +logger = logging.getLogger(__name__) + +SDC_SERVICE_SECTIONS = (SERVICE_TYPE, SRV_DESCRIPTION) = ( + 'org.openecomp.resource.abstract.nodes.service', 'description') + +SDC_SERVICE_METADATA_SECTIONS = (SRV_UUID, SRV_INVARIANTUUID, SRV_NAME) = ( + 'UUID', 'invariantUUID', 'name') + +SDC_VL = (VL_TYPE) = ('tosca.nodes.nfv.ext.zte.VL') +SDC_VL_SECTIONS = (VL_ID, VL_METADATA, VL_PROPERTIES, VL_DESCRIPTION) = \ + ("name", "metadata", "properties", "description") + +SDC_VF = (VF_TYPE, VF_UUID) = \ + ('org.openecomp.resource.abstract.nodes.VF', 'UUID') +SDC_VF_SECTIONS = (VF_ID, VF_METADATA, VF_PROPERTIES, VF_DESCRIPTION) = \ + ("name", "metadata", "properties", "description") + +SDC_PNF = (PNF_TYPE) = \ + ('org.openecomp.resource.abstract.nodes.PNF') +SDC_PNF_METADATA_SECTIONS = (SDC_PNF_UUID, SDC_PNF_INVARIANTUUID, SDC_PNF_NAME, SDC_PNF_METADATA_DESCRIPTION, SDC_PNF_VERSION) = \ + ("UUID", "invariantUUID", "name", "description", "version") +SDC_PNF_SECTIONS = (SDC_PNF_ID, SDC_PNF_METADATA, SDC_PNF_PROPERTIES, SDC_PNF_DESCRIPTION) = \ + ("name", "metadata", "properties", "description") + +SERVICE_RELATIONSHIPS = [["tosca.relationships.network.LinksTo", "tosca.relationships.nfv.VirtualLinksTo", "tosca.capabilities.nfv.VirtualLinkable", "tosca.relationships.DependsOn"], []] + + +class SdcServiceModel(BaseInfoModel): + + def __init__(self, tosca): + super(SdcServiceModel, self).__init__(tosca=tosca) + + def parseModel(self, tosca): + self.metadata = self._buildServiceMetadata(tosca) + self.ns = self._build_ns(tosca) + self.inputs = self.buildInputs(tosca) + if hasattr(tosca, 'nodetemplates'): + nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates) + types = tosca.topology_template.custom_defs + self.basepath = self.get_base_path(tosca) + self.vnfs = self._get_all_vnf(nodeTemplates, types) + self.pnfs = self._get_all_pnf(nodeTemplates, types) + self.vls = self._get_all_vl(nodeTemplates, types) + self.graph = self.get_deploy_graph(tosca, SERVICE_RELATIONSHIPS) + + def _buildServiceMetadata(self, tosca): + """ SDC service Meta Format + invariantUUID: e2618ee1 - a29a - 44c4 - a52a - b718fe1269f4 + UUID: 2362d14a - 115f - 4a2b - b449 - e2f93c0b7c89 + name: demoVLB + description: catalogservicedescription + type: Service + category: NetworkL1 - 3 + serviceType: '' + serviceRole: '' + serviceEcompNaming: true + ecompGeneratedNaming: true + namingPolicy: '' + """ + metadata_temp = self.buildMetadata(tosca) + metadata = {} + return self.setTargetValues(metadata, NS_METADATA_SECTIONS, metadata_temp, SDC_SERVICE_METADATA_SECTIONS) + + def _get_all_vnf(self, nodeTemplates, node_types): + """ SDC Resource Metadata + invariantUUID: 9ed46ddc-8eb7-4cb0-a1b6-04136c921af4 + UUID: b56ba35d-45fb-41e3-b6b8-b4f66917baa1 + customizationUUID: af0a6e64-967b-476b-87bc-959dcf59c305 + version: '1.0' + name: b7d2fceb-dd11-43cd-a3fa + description: vendor software product + type: VF + category: Generic + subcategory: Abstract + resourceVendor: b9d9f9f7-7994-4f0d-8104 + resourceVendorRelease: '1.0' + resourceVendorModelNumber: '' + """ + vnfs = [] + for node in nodeTemplates: + if self.isNodeTypeX(node, node_types, VF_TYPE): + vnf = {} + self.setTargetValues(vnf, VNF_SECTIONS, node, SDC_VF_SECTIONS) + if not vnf['properties'].get('id', None) and node['metadata']: + vnf['properties']['id'] = node['metadata'].get('UUID', None) + vnf['properties']['vnfm_info'] = vnf['properties'].get('nf_type', None) + vnf['dependencies'] = self._get_networks(node, node_types) + vnf['networks'] = self._get_networks(node, node_types) + vnfs.append(vnf) + return vnfs + + def _get_all_pnf(self, nodeTemplates, node_types): + pnfs = [] + for node in nodeTemplates: + if self.isNodeTypeX(node, node_types, PNF_TYPE): + pnf = {} + self.setTargetValues(pnf, PNF_SECTIONS, node, SDC_PNF_SECTIONS) + self.setTargetValues(pnf['properties'], PNF_METADATA_SECTIONS, node['metadata'], SDC_PNF_METADATA_SECTIONS) + pnf['networks'] = self._get_networks(node, node_types) + pnfs.append(pnf) + return pnfs + + def _get_all_vl(self, nodeTemplates, node_types): + vls = [] + for node in nodeTemplates: + if self.isNodeTypeX(node, node_types, VL_TYPE): + vl = {} + self.setTargetValues(vl, VL_SECTIONS, node, SDC_VL_SECTIONS) + vl_profile = {} + if 'segmentation_id' in vl['properties']: + vl_profile['segmentationId'] = vl['properties'].get('segmentation_id') + if 'network_name' in vl['properties']: + vl_profile['networkName'] = vl['properties'].get('network_name') + if 'cidr' in vl['properties']: + vl_profile['cidr'] = vl['properties'].get('cidr') + if 'network_name' in vl['properties']: + vl_profile['networkName'] = vl['properties'].get('network_name') + if 'start_ip' in vl['properties']: + vl_profile['startIp'] = vl['properties'].get('start_ip', '') + if 'end_ip' in vl['properties']: + vl_profile['endIp'] = vl['properties'].get('end_ip', '') + if 'gateway_ip' in vl['properties']: + vl_profile['gatewayIp'] = vl['properties'].get('gateway_ip', '') + if 'physical_network' in vl['properties']: + vl_profile['physicalNetwork'] = vl['properties'].get('physical_network', '') + if 'network_type' in vl['properties']: + vl_profile['networkType'] = vl['properties'].get('network_type', '') + if 'dhcp_enabled' in vl['properties']: + vl_profile['dhcpEnabled'] = vl['properties'].get('dhcp_enabled', '') + if 'vlan_transparent' in vl['properties']: + vl_profile['vlanTransparent'] = vl['properties'].get('vlan_transparent', '') + if 'mtu' in vl['properties']: + vl_profile['mtu'] = vl['properties'].get('mtu', '') + if 'ip_version' in vl['properties']: + vl_profile['ip_version'] = vl['properties'].get('ip_version', '') + if 'dns_nameservers' in vl['properties']: + vl_profile['dns_nameservers'] = vl['properties'].get('dns_nameservers', []) + if 'host_routes' in vl['properties']: + vl_profile['host_routes'] = vl['properties'].get('host_routes', []) + if 'network_id' in vl['properties']: + vl_profile['network_id'] = vl['properties'].get('network_id', '') + vl['properties']['vl_profile'] = vl_profile + vls.append(vl) + return vls + + def _get_networks(self, node, node_types): + rets = [] + if 'requirements' in node and self.isNodeTypeX(node, node_types, VF_TYPE): + for item in node['requirements']: + for key, value in item.items(): + rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)}) + return rets + + def _build_ns(self, tosca): + ns = self.get_substitution_mappings(tosca) + properties = ns.get("properties", {}) + metadata = ns.get("metadata", {}) + if properties.get("descriptor_id", "") == "": + descriptor_id = metadata.get(SRV_UUID, "") + properties["descriptor_id"] = descriptor_id + properties["verison"] = "" + properties["designer"] = "" + if properties.get("name", "") == "": + template_name = metadata.get(SRV_NAME, "") + properties["name"] = template_name + if properties.get("invariant_id", "") == "": + nsd_invariant_id = metadata.get(SRV_INVARIANTUUID, "") + properties["invariant_id"] = nsd_invariant_id + return ns diff --git a/genericparser/pub/utils/toscaparsers/testdata/ns/ran.csar b/genericparser/pub/utils/toscaparsers/testdata/ns/ran.csar Binary files differnew file mode 100644 index 0000000..9ea868c --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/ns/ran.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csar b/genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csar Binary files differnew file mode 100644 index 0000000..0aeed58 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csar b/genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csar Binary files differnew file mode 100644 index 0000000..45168a9 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csar Binary files differnew file mode 100644 index 0000000..921eafd --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csar Binary files differnew file mode 100644 index 0000000..5c9fbcf --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csar Binary files differnew file mode 100644 index 0000000..b11a6ef --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csar Binary files differnew file mode 100644 index 0000000..730ea8d --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csar Binary files differnew file mode 100644 index 0000000..b0f37a7 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csar Binary files differnew file mode 100644 index 0000000..ca652bf --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csar Binary files differnew file mode 100644 index 0000000..c91c034 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csar Binary files differnew file mode 100644 index 0000000..5011563 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csar Binary files differnew file mode 100644 index 0000000..0f99199 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csar Binary files differnew file mode 100644 index 0000000..3d2dbf7 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csar diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csar Binary files differnew file mode 100644 index 0000000..5e47b77 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csar diff --git a/genericparser/pub/utils/toscaparsers/tests.py b/genericparser/pub/utils/toscaparsers/tests.py new file mode 100644 index 0000000..c461790 --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/tests.py @@ -0,0 +1,102 @@ +# Copyright 2018 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json +import os +import logging +import tempfile +import shutil + +from django.test import TestCase + +from genericparser.pub.utils.toscaparsers import parse_vnfd, parse_pnfd, parse_nsd +from genericparser.pub.utils.toscaparsers.graph import Graph + +logger = logging.getLogger(__name__) + + +class TestToscaparser(TestCase): + def setUp(self): + pass + + def tearDown(self): + pass + + def test_vnfd_parse(self): + self.remove_temp_dir() + input_parameters = [{"value": "222222", "key": "sdncontroller"}] + vcpe = ["vgw", "infra", "vbng", "vbrgemu", "vgmux"] + sriov_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/vnf/vcpesriov" + for vcpe_part in vcpe: + csar_file = ("%s/%s.csar" % (sriov_path, vcpe_part)) + logger.debug("csar_file:%s", csar_file) + vnfd_json = parse_vnfd(csar_file, input_parameters) + metadata = json.loads(vnfd_json).get("metadata") + logger.debug("sriov metadata:%s", metadata) + self.assertEqual(("vCPE_%s" % vcpe_part), metadata.get("template_name", "")) + if vcpe_part == "infra": + self.assertEqual("b1bb0ce7-1111-4fa7-95ed-4840d70a1177", json.loads(vnfd_json)["vnf"]["properties"]["descriptor_id"]) + + dpdk_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/vnf/vcpedpdk" + for vcpe_part in vcpe: + csar_file = ("%s/%s.csar" % (dpdk_path, vcpe_part)) + logger.debug("csar_file:%s", csar_file) + vnfd_json = parse_vnfd(csar_file, input_parameters) + metadata = json.loads(vnfd_json).get("metadata") + logger.debug("dpdk metadata:%s", metadata) + self.assertEqual(("vCPE_%s" % vcpe_part), metadata.get("template_name", "")) + + def test_pnfd_parse(self): + self.remove_temp_dir() + csar_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/pnf/ran-du.csar" + pnfd_json = parse_pnfd(csar_path) + pnfd_dict = json.loads(pnfd_json) + metadata = pnfd_dict.get("metadata") + self.assertEqual("RAN_DU", metadata.get("template_name", "")) + descriptor_id = pnfd_dict["pnf"]["properties"]["descriptor_id"] + self.assertEqual(1, descriptor_id) + + def test_nsd_parse(self): + self.remove_temp_dir() + # ran_csar = os.path.dirname(os.path.abspath(__file__)) + "/testdata/ns/ran.csar" + # nsd_json = parse_nsd(ran_csar, []) + # logger.debug("NS ran json: %s" % nsd_json) + # metadata = json.loads(nsd_json).get("metadata") + # self.assertEqual("RAN-NS", metadata.get("nsd_name", "")) + + def test_service_descriptor_parse(self): + self.remove_temp_dir() + service_test_csar = os.path.dirname(os.path.abspath(__file__)) + "/testdata/ns/service-vIMS.csar" + test_json = parse_nsd(service_test_csar, []) + logger.debug("service-vIMS json: %s" % test_json) + metadata = json.loads(test_json).get("metadata") + self.assertEqual("vIMS_v2", metadata.get("nsd_name", "")) + + def remove_temp_dir(self): + tempdir = tempfile.gettempdir() + for dir in os.listdir(tempdir): + if dir.startswith("tmp"): + path = tempfile.tempdir + "/" + dir + if (not os.path.isfile(path)) and os.path.exists(path): + shutil.rmtree(tempfile.tempdir + "/" + dir) + + def test_graph(self): + data = { + "cucp": [], + "du": [], + "vl_flat_net": ["cucp", "cuup"], + "vl_ext_net": ["cucp", "cuup"], + "cuup": [] + } + graph = Graph(data) + self.assertEqual(['vl_ext_net', 'vl_flat_net'].sort(), graph.get_pre_nodes("cucp").sort()) diff --git a/genericparser/pub/utils/toscaparsers/vnfdmodel.py b/genericparser/pub/utils/toscaparsers/vnfdmodel.py new file mode 100644 index 0000000..7b4423d --- /dev/null +++ b/genericparser/pub/utils/toscaparsers/vnfdmodel.py @@ -0,0 +1,265 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +import logging +import os +from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel +# from genericparser.pub.exceptions import CatalogException + +logger = logging.getLogger(__name__) + +SECTIONS = (VDU_COMPUTE_TYPE, VNF_VL_TYPE, VDU_CP_TYPE, VDU_STORAGE_TYPE) = \ + ('tosca.nodes.nfv.Vdu.Compute', 'tosca.nodes.nfv.VnfVirtualLink', 'tosca.nodes.nfv.VduCp', 'tosca.nodes.nfv.Vdu.VirtualStorage') + +NFV_VNF_RELATIONSHIPS = [["tosca.relationships.nfv.VirtualLinksTo", "tosca.relationships.nfv.VduAttachesTo", "tosca.relationships.nfv.AttachesTo", "tosca.relationships.nfv.Vdu.AttachedTo", "tosca.relationships.DependsOn"], + ["tosca.nodes.relationships.VirtualBindsTo", "tosca.relationships.nfv.VirtualBindsTo"]] + + +class EtsiVnfdInfoModel(BaseInfoModel): + + def __init__(self, path, params): + super(EtsiVnfdInfoModel, self).__init__(path, params) + + def parseModel(self, tosca): + self.vnf = {} + self.vnf = self._build_vnf(tosca) + self.metadata = self.buildMetadata(tosca) + self.inputs = self.buildInputs(tosca) + nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), + tosca.nodetemplates) + node_types = tosca.topology_template.custom_defs + self.basepath = self.get_base_path(tosca) + self.volume_storages = self._get_all_volume_storage(nodeTemplates, node_types) + self.vdus = self._get_all_vdu(nodeTemplates, node_types) + self.vls = self._get_all_vl(nodeTemplates, node_types) + self.cps = self._get_all_cp(nodeTemplates, node_types) + self.vnf_exposed = self._get_all_endpoint_exposed() + self.graph = self.get_deploy_graph(tosca, NFV_VNF_RELATIONSHIPS) + + def _get_all_volume_storage(self, nodeTemplates, node_types): + rets = [] + for node in nodeTemplates: + if self.isNodeTypeX(node, node_types, VDU_STORAGE_TYPE): + ret = {} + ret['volume_storage_id'] = node['name'] + if 'description' in node: + ret['description'] = node['description'] + ret['properties'] = node['properties'] + # image_file should be gotten form artifacts TODO + # ret['artifacts'] = self._build_artifacts(node) + rets.append(ret) + return rets + + def _get_all_vdu(self, nodeTemplates, node_types): + rets = [] + inject_files = [] + for node in nodeTemplates: + logger.debug("nodeTemplates :%s", node) + if self.isNodeTypeX(node, node_types, VDU_COMPUTE_TYPE): + ret = {} + ret['vdu_id'] = node['name'] + ret['type'] = node['nodeType'] + if 'description' in node: + ret['description'] = node['description'] + ret['properties'] = node['properties'] + if 'inject_files' in node['properties']: + inject_files = node['properties']['inject_files'] + if inject_files is not None: + if isinstance(inject_files, list): + for inject_file in inject_files: + source_path = os.path.join(self.basepath, inject_file['source_path']) + with open(source_path, "rb") as f: + source_data = f.read() + source_data_base64 = source_data.encode("base64") + inject_file["source_data_base64"] = source_data_base64 + if isinstance(inject_files, dict): + source_path = os.path.join(self.basepath, inject_files['source_path']) + with open(source_path, "rb") as f: + source_data = f.read() + source_data_base64 = source_data.encode("base64") + inject_files["source_data_base64"] = source_data_base64 + virtual_storages = self.getRequirementByName(node, 'virtual_storage') + ret['virtual_storages'] = map(functools.partial(self._trans_virtual_storage), virtual_storages) + ret['dependencies'] = map(lambda x: self.get_requirement_node_name(x), self.getNodeDependencys(node)) + virtual_compute = self.getCapabilityByName(node, 'virtual_compute') + if virtual_compute is not None and 'properties' in virtual_compute: + ret['virtual_compute'] = virtual_compute['properties'] + ret['vls'] = self._get_linked_vl_ids(node, nodeTemplates) + ret['cps'] = self._get_virtal_binding_cp_ids(node, nodeTemplates) + ret['artifacts'] = self.build_artifacts(node) + rets.append(ret) + logger.debug("rets:%s", rets) + return rets + + def _trans_virtual_storage(self, virtual_storage): + if isinstance(virtual_storage, str): + return {"virtual_storage_id": virtual_storage} + else: + ret = {} + ret['virtual_storage_id'] = self.get_requirement_node_name(virtual_storage) + return ret + + def _get_linked_vl_ids(self, node, node_templates): + vl_ids = [] + cps = self._get_virtal_binding_cps(node, node_templates) + for cp in cps: + vl_reqs = self.getRequirementByName(cp, 'virtual_link') + for vl_req in vl_reqs: + vl_ids.append(self.get_requirement_node_name(vl_req)) + return vl_ids + + def _get_virtal_binding_cp_ids(self, node, nodeTemplates): + return map(lambda x: x['name'], self._get_virtal_binding_cps(node, nodeTemplates)) + + def _get_virtal_binding_cps(self, node, nodeTemplates): + cps = [] + for tmpnode in nodeTemplates: + if 'requirements' in tmpnode: + for item in tmpnode['requirements']: + for key, value in item.items(): + if key.upper().startswith('VIRTUAL_BINDING'): + req_node_name = self.get_requirement_node_name(value) + if req_node_name is not None and req_node_name == node['name']: + cps.append(tmpnode) + return cps + + def _get_all_vl(self, nodeTemplates, node_types): + vls = [] + for node in nodeTemplates: + if self.isNodeTypeX(node, node_types, VNF_VL_TYPE): + vl = dict() + vl['vl_id'] = node['name'] + vl['description'] = node['description'] + vl['properties'] = node['properties'] + vls.append(vl) + return vls + + def _get_all_cp(self, nodeTemplates, node_types): + cps = [] + for node in nodeTemplates: + if self.isNodeTypeX(node, node_types, VDU_CP_TYPE): + cp = {} + cp['cp_id'] = node['name'] + cp['cpd_id'] = node['name'] + cp['description'] = node['description'] + cp['properties'] = node['properties'] + cp['vl_id'] = self._get_node_vl_id(node) + cp['vdu_id'] = self._get_node_vdu_id(node) + vls = self._buil_cp_vls(node) + if len(vls) > 1: + cp['vls'] = vls + cps.append(cp) + return cps + + def _get_node_vdu_id(self, node): + vdu_ids = map(lambda x: self.get_requirement_node_name(x), self.getRequirementByName(node, 'virtual_binding')) + if len(vdu_ids) > 0: + return vdu_ids[0] + return "" + + def _get_node_vl_id(self, node): + vl_ids = map(lambda x: self.get_requirement_node_name(x), self.getRequirementByName(node, 'virtual_link')) + if len(vl_ids) > 0: + return vl_ids[0] + return "" + + def _buil_cp_vls(self, node): + return map(lambda x: self._build_cp_vl(x), self.getRequirementByName(node, 'virtual_link')) + + def _build_cp_vl(self, req): + cp_vl = {} + cp_vl['vl_id'] = self.get_prop_from_obj(req, 'node') + relationship = self.get_prop_from_obj(req, 'relationship') + if relationship is not None: + properties = self.get_prop_from_obj(relationship, 'properties') + if properties is not None and isinstance(properties, dict): + for key, value in properties.items(): + cp_vl[key] = value + return cp_vl + + def _get_all_endpoint_exposed(self): + if self.vnf: + external_cps = self._get_external_cps(self.vnf.get('requirements', None)) + forward_cps = self._get_forward_cps(self.vnf.get('capabilities', None)) + return {"external_cps": external_cps, "forward_cps": forward_cps} + return {} + + def _get_external_cps(self, vnf_requirements): + external_cps = [] + if vnf_requirements: + if isinstance(vnf_requirements, dict): + for key, value in vnf_requirements.items(): + if isinstance(value, list) and len(value) > 0: + external_cps.append({"key_name": key, "cpd_id": value[0]}) + else: + external_cps.append({"key_name": key, "cpd_id": value}) + elif isinstance(vnf_requirements, list): + for vnf_requirement in vnf_requirements: + for key, value in vnf_requirement.items(): + if isinstance(value, list) and len(value) > 0: + external_cps.append({"key_name": key, "cpd_id": value[0]}) + else: + external_cps.append({"key_name": key, "cpd_id": value}) + return external_cps + + def _get_forward_cps(self, vnf_capabilities): + forward_cps = [] + if vnf_capabilities: + for key, value in vnf_capabilities.items(): + if isinstance(value, list) and len(value) > 0: + forward_cps.append({"key_name": key, "cpd_id": value[0]}) + else: + forward_cps.append({"key_name": key, "cpd_id": value}) + return forward_cps + + # def get_substitution_mappings(self, tosca): + # node = {} + # substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None) + # if substitution_mappings: + # node = substitution_mappings.get('properties', {}) + # node['type'] = substitution_mappings['node_type'] + # return node + + def _build_vnf(self, tosca): + vnf = self.get_substitution_mappings(tosca) + properties = vnf.get("properties", {}) + metadata = vnf.get("metadata", {}) + if properties.get("descriptor_id", "") == "": + descriptor_id = metadata.get("descriptor_id", "") + if descriptor_id == "": + descriptor_id = metadata.get("id", "") + if descriptor_id == "": + descriptor_id = metadata.get("UUID", "") + properties["descriptor_id"] = descriptor_id + + if properties.get("descriptor_verison", "") == "": + version = metadata.get("template_version", "") + if version == "": + version = metadata.get("version", "") + properties["descriptor_verison"] = version + + if properties.get("provider", "") == "": + provider = metadata.get("template_author", "") + if provider == "": + provider = metadata.get("provider", "") + properties["provider"] = provider + + if properties.get("template_name", "") == "": + template_name = metadata.get("template_name", "") + if template_name == "": + template_name = metadata.get("template_name", "") + properties["template_name"] = template_name + + return vnf diff --git a/genericparser/pub/utils/values.py b/genericparser/pub/utils/values.py new file mode 100644 index 0000000..0fd2d1a --- /dev/null +++ b/genericparser/pub/utils/values.py @@ -0,0 +1,24 @@ +# Copyright 2017 ZTE Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def ignore_case_get(args, key, def_val=""): + if not key: + return def_val + if key in args: + return args[key] + for old_key in args: + if old_key.upper() == key.upper(): + return args[old_key] + return def_val |