aboutsummaryrefslogtreecommitdiffstats
path: root/genericparser/pub
diff options
context:
space:
mode:
authorOlivia.Zhan <zhan.jie1@zte.com.cn>2019-07-12 15:00:04 +0800
committerOlivia.Zhan <zhan.jie1@zte.com.cn>2019-07-12 15:39:16 +0800
commitaef67d1a1fb076f154f9f7595b54590fc7499ee6 (patch)
tree95f6cdeab82c33024715b92a2a8ff7b16f0e26b7 /genericparser/pub
parent7d0fa869b75947729dbe340f8285bc018fa849c4 (diff)
Update python2 to python3
Issue-ID: VFC-1429 Signed-off-by: Olivia.Zhan <zhan.jie1@zte.com.cn> Change-Id: I9ec0ccfa5ba200c690a85d582cee41009dfdc0f0
Diffstat (limited to 'genericparser/pub')
-rw-r--r--genericparser/pub/database/migrations/0001_initial.py256
-rw-r--r--genericparser/pub/redisco/__init__.py58
-rw-r--r--genericparser/pub/redisco/containers.py116
-rw-r--r--genericparser/pub/utils/fileutil.py11
-rw-r--r--genericparser/pub/utils/idutil.py2
-rw-r--r--genericparser/pub/utils/jobutil.py1
-rw-r--r--genericparser/pub/utils/restcall.py4
-rw-r--r--genericparser/pub/utils/tests.py12
-rw-r--r--genericparser/pub/utils/toscaparsers/basemodel.py36
-rw-r--r--genericparser/pub/utils/toscaparsers/graph.py4
-rw-r--r--genericparser/pub/utils/toscaparsers/nsdmodel.py10
-rw-r--r--genericparser/pub/utils/toscaparsers/pnfmodel.py2
-rw-r--r--genericparser/pub/utils/toscaparsers/sdmodel.py6
-rw-r--r--genericparser/pub/utils/toscaparsers/servicemodel.py4
-rw-r--r--genericparser/pub/utils/toscaparsers/vnfdmodel.py2
-rw-r--r--genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_251.py34
-rw-r--r--genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_base.py32
-rw-r--r--genericparser/pub/utils/values.py9
18 files changed, 389 insertions, 210 deletions
diff --git a/genericparser/pub/database/migrations/0001_initial.py b/genericparser/pub/database/migrations/0001_initial.py
index 8446b6e..98ca84c 100644
--- a/genericparser/pub/database/migrations/0001_initial.py
+++ b/genericparser/pub/database/migrations/0001_initial.py
@@ -28,17 +28,17 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='JobModel',
fields=[
- ('jobid', models.CharField(db_column=b'JOBID', max_length=255, primary_key=True, serialize=False)),
- ('jobtype', models.CharField(db_column=b'JOBTYPE', max_length=255)),
- ('jobaction', models.CharField(db_column=b'JOBACTION', max_length=255)),
- ('resid', models.CharField(db_column=b'RESID', max_length=255)),
- ('status', models.IntegerField(blank=True, db_column=b'STATUS', null=True)),
- ('starttime', models.CharField(blank=True, db_column=b'STARTTIME', max_length=255, null=True)),
- ('endtime', models.CharField(blank=True, db_column=b'ENDTIME', max_length=255, null=True)),
- ('progress', models.IntegerField(blank=True, db_column=b'PROGRESS', null=True)),
- ('user', models.CharField(blank=True, db_column=b'USER', max_length=255, null=True)),
- ('parentjobid', models.CharField(blank=True, db_column=b'PARENTJOBID', max_length=255, null=True)),
- ('resname', models.CharField(blank=True, db_column=b'RESNAME', max_length=255, null=True)),
+ ('jobid', models.CharField(db_column='JOBID', max_length=255, primary_key=True, serialize=False)),
+ ('jobtype', models.CharField(db_column='JOBTYPE', max_length=255)),
+ ('jobaction', models.CharField(db_column='JOBACTION', max_length=255)),
+ ('resid', models.CharField(db_column='RESID', max_length=255)),
+ ('status', models.IntegerField(blank=True, db_column='STATUS', null=True)),
+ ('starttime', models.CharField(blank=True, db_column='STARTTIME', max_length=255, null=True)),
+ ('endtime', models.CharField(blank=True, db_column='ENDTIME', max_length=255, null=True)),
+ ('progress', models.IntegerField(blank=True, db_column='PROGRESS', null=True)),
+ ('user', models.CharField(blank=True, db_column='USER', max_length=255, null=True)),
+ ('parentjobid', models.CharField(blank=True, db_column='PARENTJOBID', max_length=255, null=True)),
+ ('resname', models.CharField(blank=True, db_column='RESNAME', max_length=255, null=True)),
],
options={
'db_table': 'CATALOG_JOB',
@@ -48,13 +48,13 @@ class Migration(migrations.Migration):
name='JobStatusModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('indexid', models.IntegerField(db_column=b'INDEXID')),
- ('jobid', models.CharField(db_column=b'JOBID', max_length=255)),
- ('status', models.CharField(db_column=b'STATUS', max_length=255)),
- ('progress', models.IntegerField(blank=True, db_column=b'PROGRESS', null=True)),
- ('descp', models.TextField(db_column=b'DESCP', max_length=65535)),
- ('errcode', models.CharField(blank=True, db_column=b'ERRCODE', max_length=255, null=True)),
- ('addtime', models.CharField(blank=True, db_column=b'ADDTIME', max_length=255, null=True)),
+ ('indexid', models.IntegerField(db_column='INDEXID')),
+ ('jobid', models.CharField(db_column='JOBID', max_length=255)),
+ ('status', models.CharField(db_column='STATUS', max_length=255)),
+ ('progress', models.IntegerField(blank=True, db_column='PROGRESS', null=True)),
+ ('descp', models.TextField(db_column='DESCP', max_length=65535)),
+ ('errcode', models.CharField(blank=True, db_column='ERRCODE', max_length=255, null=True)),
+ ('addtime', models.CharField(blank=True, db_column='ADDTIME', max_length=255, null=True)),
],
options={
'db_table': 'CATALOG_JOB_STATUS',
@@ -63,30 +63,30 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='NsdmSubscriptionModel',
fields=[
- ('subscriptionid', models.CharField(db_column=b'SUBSCRIPTIONID', max_length=255, primary_key=True, serialize=False)),
- ('notificationTypes', models.TextField(db_column=b'NOTIFICATIONTYPES', null=True)),
- ('auth_info', models.TextField(db_column=b'AUTHINFO', null=True)),
- ('callback_uri', models.CharField(db_column=b'CALLBACKURI', max_length=255)),
- ('nsdInfoId', models.TextField(db_column=b'NSDINFOID', null=True)),
- ('nsdId', models.TextField(db_column=b'NSDID', null=True)),
- ('nsdName', models.TextField(db_column=b'NSDNAME', null=True)),
- ('nsdVersion', models.TextField(db_column=b'NSDVERSION', null=True)),
- ('nsdDesigner', models.TextField(db_column=b'NSDDESIGNER', null=True)),
- ('nsdInvariantId', models.TextField(db_column=b'NSDINVARIANTID', null=True)),
- ('vnfPkgIds', models.TextField(db_column=b'VNFPKGIDS', null=True)),
- ('pnfdInfoIds', models.TextField(db_column=b'PNFDINFOIDS', null=True)),
- ('nestedNsdInfoIds', models.TextField(db_column=b'NESTEDNSDINFOIDS', null=True)),
- ('nsdOnboardingState', models.TextField(db_column=b'NSDONBOARDINGSTATE', null=True)),
- ('nsdOperationalState', models.TextField(db_column=b'NSDOPERATIONALSTATE', null=True)),
- ('nsdUsageState', models.TextField(db_column=b'NSDUSAGESTATE', null=True)),
- ('pnfdId', models.TextField(db_column=b'PNFDID', null=True)),
- ('pnfdName', models.TextField(db_column=b'PNFDNAME', null=True)),
- ('pnfdVersion', models.TextField(db_column=b'PNFDVERSION', null=True)),
- ('pnfdProvider', models.TextField(db_column=b'PNFDPROVIDER', null=True)),
- ('pnfdInvariantId', models.TextField(db_column=b'PNFDINVARIANTID', null=True)),
- ('pnfdOnboardingState', models.TextField(db_column=b'PNFDONBOARDINGSTATE', null=True)),
- ('pnfdUsageState', models.TextField(db_column=b'PNFDUSAGESTATE', null=True)),
- ('links', models.TextField(db_column=b'LINKS')),
+ ('subscriptionid', models.CharField(db_column='SUBSCRIPTIONID', max_length=255, primary_key=True, serialize=False)),
+ ('notificationTypes', models.TextField(db_column='NOTIFICATIONTYPES', null=True)),
+ ('auth_info', models.TextField(db_column='AUTHINFO', null=True)),
+ ('callback_uri', models.CharField(db_column='CALLBACKURI', max_length=255)),
+ ('nsdInfoId', models.TextField(db_column='NSDINFOID', null=True)),
+ ('nsdId', models.TextField(db_column='NSDID', null=True)),
+ ('nsdName', models.TextField(db_column='NSDNAME', null=True)),
+ ('nsdVersion', models.TextField(db_column='NSDVERSION', null=True)),
+ ('nsdDesigner', models.TextField(db_column='NSDDESIGNER', null=True)),
+ ('nsdInvariantId', models.TextField(db_column='NSDINVARIANTID', null=True)),
+ ('vnfPkgIds', models.TextField(db_column='VNFPKGIDS', null=True)),
+ ('pnfdInfoIds', models.TextField(db_column='PNFDINFOIDS', null=True)),
+ ('nestedNsdInfoIds', models.TextField(db_column='NESTEDNSDINFOIDS', null=True)),
+ ('nsdOnboardingState', models.TextField(db_column='NSDONBOARDINGSTATE', null=True)),
+ ('nsdOperationalState', models.TextField(db_column='NSDOPERATIONALSTATE', null=True)),
+ ('nsdUsageState', models.TextField(db_column='NSDUSAGESTATE', null=True)),
+ ('pnfdId', models.TextField(db_column='PNFDID', null=True)),
+ ('pnfdName', models.TextField(db_column='PNFDNAME', null=True)),
+ ('pnfdVersion', models.TextField(db_column='PNFDVERSION', null=True)),
+ ('pnfdProvider', models.TextField(db_column='PNFDPROVIDER', null=True)),
+ ('pnfdInvariantId', models.TextField(db_column='PNFDINVARIANTID', null=True)),
+ ('pnfdOnboardingState', models.TextField(db_column='PNFDONBOARDINGSTATE', null=True)),
+ ('pnfdUsageState', models.TextField(db_column='PNFDUSAGESTATE', null=True)),
+ ('links', models.TextField(db_column='LINKS')),
],
options={
'db_table': 'CATALOG_NSDM_SUBSCRIPTION',
@@ -95,23 +95,23 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='NSPackageModel',
fields=[
- ('nsPackageId', models.CharField(db_column=b'NSPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('nsPackageUri', models.CharField(blank=True, db_column=b'NSPACKAGEURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
- ('sdcCsarId', models.CharField(blank=True, db_column=b'SDCCSARID', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
- ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
- ('nsdId', models.CharField(blank=True, db_column=b'NSDID', max_length=50, null=True)),
- ('invariantId', models.CharField(blank=True, db_column=b'INVARIANTID', max_length=50, null=True)),
- ('nsdName', models.CharField(blank=True, db_column=b'NSDNAME', max_length=50, null=True)),
- ('nsdDesginer', models.CharField(blank=True, db_column=b'NSDDESIGNER', max_length=50, null=True)),
- ('nsdDescription', models.CharField(blank=True, db_column=b'NSDDESCRIPTION', max_length=100, null=True)),
- ('nsdVersion', models.CharField(blank=True, db_column=b'NSDVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
- ('nsdModel', models.TextField(blank=True, db_column=b'NSDMODEL', max_length=65535, null=True)),
+ ('nsPackageId', models.CharField(db_column='NSPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('nsPackageUri', models.CharField(blank=True, db_column='NSPACKAGEURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('sdcCsarId', models.CharField(blank=True, db_column='SDCCSARID', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('nsdId', models.CharField(blank=True, db_column='NSDID', max_length=50, null=True)),
+ ('invariantId', models.CharField(blank=True, db_column='INVARIANTID', max_length=50, null=True)),
+ ('nsdName', models.CharField(blank=True, db_column='NSDNAME', max_length=50, null=True)),
+ ('nsdDesginer', models.CharField(blank=True, db_column='NSDDESIGNER', max_length=50, null=True)),
+ ('nsdDescription', models.CharField(blank=True, db_column='NSDDESCRIPTION', max_length=100, null=True)),
+ ('nsdVersion', models.CharField(blank=True, db_column='NSDVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('nsdModel', models.TextField(blank=True, db_column='NSDMODEL', max_length=65535, null=True)),
],
options={
'db_table': 'CATALOG_NSPACKAGE',
@@ -120,22 +120,22 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='PnfPackageModel',
fields=[
- ('pnfPackageId', models.CharField(db_column=b'PNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('pnfPackageUri', models.CharField(blank=True, db_column=b'PNFPACKAGEURI', max_length=300, null=True)),
- ('sdcCSARUri', models.CharField(blank=True, db_column=b'SDCCSARURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
- ('pnfdId', models.CharField(blank=True, db_column=b'PNFDID', max_length=50, null=True)),
- ('pnfVendor', models.CharField(blank=True, db_column=b'VENDOR', max_length=50, null=True)),
- ('pnfdProductName', models.CharField(blank=True, db_column=b'PNFDPRODUCTNAME', max_length=50, null=True)),
- ('pnfdVersion', models.CharField(blank=True, db_column=b'PNFDVERSION', max_length=20, null=True)),
- ('pnfSoftwareVersion', models.CharField(blank=True, db_column=b'PNFSOFTWAREVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
- ('pnfdModel', models.TextField(blank=True, db_column=b'PNFDMODEL', max_length=65535, null=True)),
- ('pnfdName', models.TextField(blank=True, db_column=b'PNFDNAME', max_length=65535, null=True)),
+ ('pnfPackageId', models.CharField(db_column='PNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('pnfPackageUri', models.CharField(blank=True, db_column='PNFPACKAGEURI', max_length=300, null=True)),
+ ('sdcCSARUri', models.CharField(blank=True, db_column='SDCCSARURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('pnfdId', models.CharField(blank=True, db_column='PNFDID', max_length=50, null=True)),
+ ('pnfVendor', models.CharField(blank=True, db_column='VENDOR', max_length=50, null=True)),
+ ('pnfdProductName', models.CharField(blank=True, db_column='PNFDPRODUCTNAME', max_length=50, null=True)),
+ ('pnfdVersion', models.CharField(blank=True, db_column='PNFDVERSION', max_length=20, null=True)),
+ ('pnfSoftwareVersion', models.CharField(blank=True, db_column='PNFSOFTWAREVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('pnfdModel', models.TextField(blank=True, db_column='PNFDMODEL', max_length=65535, null=True)),
+ ('pnfdName', models.TextField(blank=True, db_column='PNFDNAME', max_length=65535, null=True)),
],
options={
'db_table': 'CATALOG_PNFPACKAGE',
@@ -144,23 +144,23 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='ServicePackageModel',
fields=[
- ('servicePackageId', models.CharField(db_column=b'SERVICEPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('servicePackageUri', models.CharField(blank=True, db_column=b'SERVICEPACKAGEURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
- ('sdcCsarId', models.CharField(blank=True, db_column=b'SDCCSARID', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
- ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
- ('servicedId', models.CharField(blank=True, db_column=b'SERVICEDID', max_length=50, null=True)),
- ('invariantId', models.CharField(blank=True, db_column=b'INVARIANTID', max_length=50, null=True)),
- ('servicedName', models.CharField(blank=True, db_column=b'SERVICEDNAME', max_length=50, null=True)),
- ('servicedDesigner', models.CharField(blank=True, db_column=b'SERVICEDDESIGNER', max_length=50, null=True)),
- ('servicedDescription', models.CharField(blank=True, db_column=b'SERVICEDDESCRIPTION', max_length=100, null=True)),
- ('servicedVersion', models.CharField(blank=True, db_column=b'SERVICEDVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
- ('servicedModel', models.TextField(blank=True, db_column=b'SERVICEDMODEL', max_length=65535, null=True)),
+ ('servicePackageId', models.CharField(db_column='SERVICEPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('servicePackageUri', models.CharField(blank=True, db_column='SERVICEPACKAGEURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('sdcCsarId', models.CharField(blank=True, db_column='SDCCSARID', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('servicedId', models.CharField(blank=True, db_column='SERVICEDID', max_length=50, null=True)),
+ ('invariantId', models.CharField(blank=True, db_column='INVARIANTID', max_length=50, null=True)),
+ ('servicedName', models.CharField(blank=True, db_column='SERVICEDNAME', max_length=50, null=True)),
+ ('servicedDesigner', models.CharField(blank=True, db_column='SERVICEDDESIGNER', max_length=50, null=True)),
+ ('servicedDescription', models.CharField(blank=True, db_column='SERVICEDDESCRIPTION', max_length=100, null=True)),
+ ('servicedVersion', models.CharField(blank=True, db_column='SERVICEDVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('servicedModel', models.TextField(blank=True, db_column='SERVICEDMODEL', max_length=65535, null=True)),
],
options={
'db_table': 'CATALOG_SERVICEPACKAGE',
@@ -169,16 +169,16 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='SoftwareImageModel',
fields=[
- ('imageid', models.CharField(db_column=b'IMAGEID', max_length=50, primary_key=True, serialize=False)),
- ('containerFormat', models.CharField(db_column=b'CONTAINERFORMAT', max_length=20)),
- ('diskFormat', models.CharField(db_column=b'DISKFORMAT', max_length=20)),
- ('mindisk', models.CharField(db_column=b'MINDISK', max_length=20)),
- ('minram', models.CharField(db_column=b'MINRAM', max_length=20)),
- ('usermetadata', models.CharField(db_column=b'USAERMETADATA', max_length=1024)),
- ('vnfPackageId', models.CharField(db_column=b'VNFPACKAGEID', max_length=50)),
- ('filePath', models.CharField(db_column=b'FILEPATH', max_length=300)),
- ('status', models.CharField(db_column=b'STATUS', max_length=10)),
- ('vimid', models.CharField(db_column=b'VIMID', max_length=50)),
+ ('imageid', models.CharField(db_column='IMAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('containerFormat', models.CharField(db_column='CONTAINERFORMAT', max_length=20)),
+ ('diskFormat', models.CharField(db_column='DISKFORMAT', max_length=20)),
+ ('mindisk', models.CharField(db_column='MINDISK', max_length=20)),
+ ('minram', models.CharField(db_column='MINRAM', max_length=20)),
+ ('usermetadata', models.CharField(db_column='USAERMETADATA', max_length=1024)),
+ ('vnfPackageId', models.CharField(db_column='VNFPACKAGEID', max_length=50)),
+ ('filePath', models.CharField(db_column='FILEPATH', max_length=300)),
+ ('status', models.CharField(db_column='STATUS', max_length=10)),
+ ('vimid', models.CharField(db_column='VIMID', max_length=50)),
],
options={
'db_table': 'CATALOG_SOFTWAREIMAGEMODEL',
@@ -187,22 +187,22 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='VnfPackageModel',
fields=[
- ('vnfPackageId', models.CharField(db_column=b'VNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('vnfPackageUri', models.CharField(blank=True, db_column=b'VNFPACKAGEURI', max_length=300, null=True)),
- ('SdcCSARUri', models.CharField(blank=True, db_column=b'SDCCSARURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
- ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
- ('vnfdId', models.CharField(blank=True, db_column=b'VNFDID', max_length=50, null=True)),
- ('vnfVendor', models.CharField(blank=True, db_column=b'VENDOR', max_length=50, null=True)),
- ('vnfdProductName', models.CharField(blank=True, db_column=b'VNFDPRODUCTNAME', max_length=50, null=True)),
- ('vnfdVersion', models.CharField(blank=True, db_column=b'VNFDVERSION', max_length=20, null=True)),
- ('vnfSoftwareVersion', models.CharField(blank=True, db_column=b'VNFSOFTWAREVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
- ('vnfdModel', models.TextField(blank=True, db_column=b'VNFDMODEL', max_length=65535, null=True)),
+ ('vnfPackageId', models.CharField(db_column='VNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('vnfPackageUri', models.CharField(blank=True, db_column='VNFPACKAGEURI', max_length=300, null=True)),
+ ('SdcCSARUri', models.CharField(blank=True, db_column='SDCCSARURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('vnfdId', models.CharField(blank=True, db_column='VNFDID', max_length=50, null=True)),
+ ('vnfVendor', models.CharField(blank=True, db_column='VENDOR', max_length=50, null=True)),
+ ('vnfdProductName', models.CharField(blank=True, db_column='VNFDPRODUCTNAME', max_length=50, null=True)),
+ ('vnfdVersion', models.CharField(blank=True, db_column='VNFDVERSION', max_length=20, null=True)),
+ ('vnfSoftwareVersion', models.CharField(blank=True, db_column='VNFSOFTWAREVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('vnfdModel', models.TextField(blank=True, db_column='VNFDMODEL', max_length=65535, null=True)),
],
options={
'db_table': 'CATALOG_VNFPACKAGE',
@@ -211,16 +211,16 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='VnfPkgSubscriptionModel',
fields=[
- ('subscription_id', models.CharField(db_column=b'SUBSCRIPTION_ID', max_length=255, primary_key=True, serialize=False)),
- ('callback_uri', models.URLField(db_column=b'CALLBACK_URI', max_length=255)),
- ('auth_info', models.TextField(db_column=b'AUTH_INFO')),
- ('usage_states', models.TextField(db_column=b'USAGE_STATES')),
- ('notification_types', models.TextField(db_column=b'NOTIFICATION_TYPES')),
- ('vnfd_id', models.TextField(db_column=b'VNFD_ID')),
- ('vnf_pkg_id', models.TextField(db_column=b'VNF_PKG_ID')),
- ('operation_states', models.TextField(db_column=b'OPERATION_STATES')),
- ('vnf_products_from_provider', models.TextField(db_column=b'VNF_PRODUCTS_FROM_PROVIDER')),
- ('links', models.TextField(db_column=b'LINKS')),
+ ('subscription_id', models.CharField(db_column='SUBSCRIPTION_ID', max_length=255, primary_key=True, serialize=False)),
+ ('callback_uri', models.URLField(db_column='CALLBACK_URI', max_length=255)),
+ ('auth_info', models.TextField(db_column='AUTH_INFO')),
+ ('usage_states', models.TextField(db_column='USAGE_STATES')),
+ ('notification_types', models.TextField(db_column='NOTIFICATION_TYPES')),
+ ('vnfd_id', models.TextField(db_column='VNFD_ID')),
+ ('vnf_pkg_id', models.TextField(db_column='VNF_PKG_ID')),
+ ('operation_states', models.TextField(db_column='OPERATION_STATES')),
+ ('vnf_products_from_provider', models.TextField(db_column='VNF_PRODUCTS_FROM_PROVIDER')),
+ ('links', models.TextField(db_column='LINKS')),
],
options={
'db_table': 'VNF_PKG_SUBSCRIPTION',
diff --git a/genericparser/pub/redisco/__init__.py b/genericparser/pub/redisco/__init__.py
new file mode 100644
index 0000000..217a232
--- /dev/null
+++ b/genericparser/pub/redisco/__init__.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2010 Tim Medina
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# The original code link is https://github.com/iamteem/redisco/tree/master/redisco/__init__.py
+
+
+import redis
+
+
+class Client(object):
+ def __init__(self, **kwargs):
+ self.connection_settings = kwargs or {'host': 'localhost', 'port': 6379, 'db': 0}
+
+ def redis(self):
+ return redis.Redis(**self.connection_settings)
+
+ def update(self, d):
+ self.connection_settings.update(d)
+
+
+def connection_setup(**kwargs):
+ global connection, client
+ if client:
+ client.update(kwargs)
+ else:
+ client = Client(**kwargs)
+ connection = client.redis()
+
+
+def get_client():
+ global connection
+ return connection
+
+
+client = Client()
+connection = client.redis()
+
+__all__ = ['connection_setup', 'get_client']
diff --git a/genericparser/pub/redisco/containers.py b/genericparser/pub/redisco/containers.py
new file mode 100644
index 0000000..8957294
--- /dev/null
+++ b/genericparser/pub/redisco/containers.py
@@ -0,0 +1,116 @@
+# Copyright (c) 2010 Tim Medina
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# The original code link is https://github.com/iamteem/redisco/tree/master/redisco/containers.py
+
+"""
+This module contains the container classes to create objects
+that persist directly in a Redis server.
+"""
+
+import collections
+from functools import partial
+
+
+class Container(object):
+ """Create a container object saved in Redis.
+
+ Arguments:
+ key -- the Redis key this container is stored at
+ db -- the Redis client object. Default: None
+
+ When ``db`` is not set, the gets the default connection from
+ ``redisco.connection`` module.
+ """
+
+ def __init__(self, key, db=None, pipeline=None):
+ self._db = db
+ self.key = key
+ self.pipeline = pipeline
+
+ def clear(self):
+ """Remove container from Redis database."""
+ del self.db[self.key]
+
+ def __getattribute__(self, att):
+ if att in object.__getattribute__(self, 'DELEGATEABLE_METHODS'):
+ return partial(getattr(object.__getattribute__(self, 'db'), att), self.key)
+ else:
+ return object.__getattribute__(self, att)
+
+ @property
+ def db(self):
+ if self.pipeline:
+ return self.pipeline
+ if self._db:
+ return self._db
+ if hasattr(self, 'db_cache') and self.db_cache:
+ return self.db_cache
+ else:
+ from redisco import connection
+ self.db_cache = connection
+ return self.db_cache
+
+ DELEGATEABLE_METHODS = ()
+
+
+class Hash(Container, collections.MutableMapping):
+
+ def __getitem__(self, att):
+ return self.hget(att)
+
+ def __setitem__(self, att, val):
+ self.hset(att, val)
+
+ def __delitem__(self, att):
+ self.hdel(att)
+
+ def __len__(self):
+ return self.hlen()
+
+ def __iter__(self):
+ return self.hgetall().__iter__()
+
+ def __contains__(self, att):
+ return self.hexists(att)
+
+ def __repr__(self):
+ return "<%s '%s' %s>" % (self.__class__.__name__, self.key, self.hgetall())
+
+ def keys(self):
+ return self.hkeys()
+
+ def values(self):
+ return self.hvals()
+
+ def _get_dict(self):
+ return self.hgetall()
+
+ def _set_dict(self, new_dict):
+ self.clear()
+ self.update(new_dict)
+
+ dict = property(_get_dict, _set_dict)
+
+ DELEGATEABLE_METHODS = ('hlen', 'hset', 'hdel', 'hkeys', 'hgetall', 'hvals',
+ 'hget', 'hexists', 'hincrby', 'hmget', 'hmset')
diff --git a/genericparser/pub/utils/fileutil.py b/genericparser/pub/utils/fileutil.py
index d7811b8..6ddfc72 100644
--- a/genericparser/pub/utils/fileutil.py
+++ b/genericparser/pub/utils/fileutil.py
@@ -16,7 +16,7 @@ import shutil
import logging
import tempfile
import traceback
-import urllib2
+import urllib
import zipfile
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
def make_dirs(path):
if not os.path.exists(path):
- os.makedirs(path, 0777)
+ os.makedirs(path, 0o777)
def delete_dirs(path):
@@ -34,7 +34,7 @@ def delete_dirs(path):
shutil.rmtree(path)
except Exception as e:
logger.error(traceback.format_exc())
- logger.error("Failed to delete %s:%s", path, e.message)
+ logger.error("Failed to delete %s:%s", path, e.args[0])
def download_file_from_http(url, local_dir, file_name):
@@ -42,9 +42,8 @@ def download_file_from_http(url, local_dir, file_name):
is_download_ok = False
try:
make_dirs(local_dir)
- r = urllib2.Request(url)
- req = urllib2.urlopen(r)
- save_file = open(local_file_name, 'wb')
+ req = urllib.request.urlopen(url)
+ save_file = open(local_file_name, 'w')
save_file.write(req.read())
save_file.close()
req.close()
diff --git a/genericparser/pub/utils/idutil.py b/genericparser/pub/utils/idutil.py
index 85bebb8..768416c 100644
--- a/genericparser/pub/utils/idutil.py
+++ b/genericparser/pub/utils/idutil.py
@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from redisco import containers as cont
+from genericparser.pub.redisco import containers as cont
def get_auto_id(id_type, id_group="auto_id_hash"):
diff --git a/genericparser/pub/utils/jobutil.py b/genericparser/pub/utils/jobutil.py
index c06c72d..abe5227 100644
--- a/genericparser/pub/utils/jobutil.py
+++ b/genericparser/pub/utils/jobutil.py
@@ -15,6 +15,7 @@ import datetime
import logging
import uuid
import traceback
+from functools import reduce
from genericparser.pub.database.models import JobStatusModel, JobModel
from genericparser.pub.utils import idutil
diff --git a/genericparser/pub/utils/restcall.py b/genericparser/pub/utils/restcall.py
index a8944b5..c83f69d 100644
--- a/genericparser/pub/utils/restcall.py
+++ b/genericparser/pub/utils/restcall.py
@@ -15,7 +15,7 @@
import sys
import traceback
import logging
-import urllib2
+import urllib
import uuid
import httplib2
@@ -65,7 +65,7 @@ def call_req(base_url, user, passwd, auth_type, resource, method, content='', ad
ret = [1, "Unable to connect to %s" % full_url, resp_status]
continue
raise ex
- except urllib2.URLError as err:
+ except urllib.error.URLError as err:
ret = [2, str(err), resp_status]
except Exception as ex:
logger.error(traceback.format_exc())
diff --git a/genericparser/pub/utils/tests.py b/genericparser/pub/utils/tests.py
index 7f8a391..8390ce7 100644
--- a/genericparser/pub/utils/tests.py
+++ b/genericparser/pub/utils/tests.py
@@ -15,11 +15,11 @@
import platform
import unittest
import mock
-import fileutil
-import urllib2
-import syscomm
-import timeutil
-import values
+from . import fileutil
+import urllib
+from . import syscomm
+from . import timeutil
+from . import values
from genericparser.pub.database.models import JobStatusModel, JobModel
from genericparser.pub.utils.jobutil import JobUtil
@@ -46,7 +46,7 @@ class UtilsTest(unittest.TestCase):
fileutil.make_dirs(dirs)
fileutil.delete_dirs(dirs)
- @mock.patch.object(urllib2, 'urlopen')
+ @mock.patch.object(urllib.request, 'urlopen')
def test_download_file_from_http(self, mock_urlopen):
mock_urlopen.return_value = MockReq()
fileutil.delete_dirs("abc")
diff --git a/genericparser/pub/utils/toscaparsers/basemodel.py b/genericparser/pub/utils/toscaparsers/basemodel.py
index 0110df9..643041d 100644
--- a/genericparser/pub/utils/toscaparsers/basemodel.py
+++ b/genericparser/pub/utils/toscaparsers/basemodel.py
@@ -77,7 +77,7 @@ class BaseInfoModel(object):
try:
os.remove(file_name)
except Exception as e:
- logger.error("Failed to parse package, error: %s", e.message)
+ logger.error("Failed to parse package, error: %s", e.args[0])
def _validate_input_params(self, path, params):
valid_params = {}
@@ -92,7 +92,7 @@ class BaseInfoModel(object):
if params:
tmp = self._create_tosca_template(path, None)
if isinstance(params, dict):
- for key, value in params.items():
+ for key, value in list(params.items()):
if hasattr(tmp, 'inputs') and len(tmp.inputs) > 0:
for input_def in tmp.inputs:
if (input_def.name == key):
@@ -107,16 +107,16 @@ class BaseInfoModel(object):
no_required_paras_check=True,
debug_mode=True)
except Exception as e:
- print e.message
+ print(e.args[0])
finally:
if tosca_tpl is not None and hasattr(tosca_tpl, "temp_dir") and os.path.exists(tosca_tpl.temp_dir):
try:
shutil.rmtree(tosca_tpl.temp_dir)
except Exception as e:
- logger.error("Failed to create tosca template, error: %s", e.message)
- print "-----------------------------"
- print '\n'.join(['%s:%s' % item for item in tosca_tpl.__dict__.items()])
- print "-----------------------------"
+ logger.error("Failed to create tosca template, error: %s", e.args[0])
+ print("-----------------------------")
+ print('\n'.join(['%s:%s' % item for item in list(tosca_tpl.__dict__.items())]))
+ print("-----------------------------")
return tosca_tpl
def _check_download_file(self, path):
@@ -130,7 +130,7 @@ class BaseInfoModel(object):
path = path.encode("utf-8")
tmps = str.split(path, '/')
localFileName = tmps[len(tmps) - 1]
- urllib.urlretrieve(path, localFileName)
+ urllib.request.urlretrieve(path, localFileName)
return localFileName
def downloadFileFromFtpServer(self, path):
@@ -222,7 +222,7 @@ class BaseInfoModel(object):
def buildProperties(self, nodeTemplate, parsed_params):
properties = {}
isMappingParams = parsed_params and len(parsed_params) > 0
- for k, item in nodeTemplate.get_properties().items():
+ for k, item in list(nodeTemplate.get_properties().items()):
properties[k] = item.value
if isinstance(item.value, GetInput):
if item.value.result() and isMappingParams:
@@ -232,7 +232,7 @@ class BaseInfoModel(object):
tmp[item.value.name] = item.value.input_name
properties[k] = tmp
if ATTRIBUTES in nodeTemplate.entity_tpl:
- for k, item in nodeTemplate.entity_tpl[ATTRIBUTES].items():
+ for k, item in list(nodeTemplate.entity_tpl[ATTRIBUTES].items()):
properties[k] = str(item)
return properties
@@ -241,7 +241,7 @@ class BaseInfoModel(object):
properties = nodeTemplate.get_properties()
_properties = {}
if isinstance(properties, dict):
- for name, prop in properties.items():
+ for name, prop in list(properties.items()):
if isinstance(prop, Property):
if isinstance(prop.value, Function):
if isinstance(prop.value, Concat): # support one layer inner function.
@@ -251,7 +251,7 @@ class BaseInfoModel(object):
value_str += arg
elif isinstance(arg, dict):
raw_func = {}
- for k, v in arg.items():
+ for k, v in list(arg.items()):
func_args = []
func_args.append(v)
raw_func[k] = func_args
@@ -282,7 +282,7 @@ class BaseInfoModel(object):
value_str += arg
elif isinstance(arg, dict):
raw_func = {}
- for k, v in arg.items():
+ for k, v in list(arg.items()):
func_args = []
func_args.append(v)
raw_func[k] = func_args
@@ -306,7 +306,7 @@ class BaseInfoModel(object):
def verify_properties(self, props, inputs, parsed_params):
ret_props = {}
if (props and len(props) > 0):
- for key, value in props.items():
+ for key, value in list(props.items()):
ret_props[key] = self._verify_value(value, inputs, parsed_params)
# if isinstance(value, str):
# ret_props[key] = self._verify_string(inputs, parsed_params, value);
@@ -323,7 +323,7 @@ class BaseInfoModel(object):
def build_requirements(self, node_template):
rets = []
for req in node_template.requirements:
- for req_name, req_value in req.items():
+ for req_name, req_value in list(req.items()):
if (isinstance(req_value, dict)):
if ('node' in req_value and req_value['node'] not in node_template.templates):
continue # No target requirement for aria parser, not add to result.
@@ -385,7 +385,7 @@ class BaseInfoModel(object):
requirements = []
if REQUIREMENTS in node:
for item in node[REQUIREMENTS]:
- for key, value in item.items():
+ for key, value in list(item.items()):
if key == requirementName:
requirements.append(value)
return requirements
@@ -439,7 +439,7 @@ class BaseInfoModel(object):
rets = []
if ARTIFACTS in node and len(node[ARTIFACTS]) > 0:
artifacts = node[ARTIFACTS]
- for name, value in artifacts.items():
+ for name, value in list(artifacts.items()):
ret = {}
ret['artifact_name'] = name
ret['file'] = value
@@ -491,7 +491,7 @@ class BaseInfoModel(object):
for type_require in type_requires:
type_require_set.update(type_require)
for requirement in node.requirements:
- for k in requirement.keys():
+ for k in list(requirement.keys()):
if type_require_set[k].get('relationship', None) in relations[0] or type_require_set[k].get('capability', None) in relations[0]:
if isinstance(requirement[k], dict):
next_node = requirement[k].get('node', None)
diff --git a/genericparser/pub/utils/toscaparsers/graph.py b/genericparser/pub/utils/toscaparsers/graph.py
index 6d38d12..0af2a14 100644
--- a/genericparser/pub/utils/toscaparsers/graph.py
+++ b/genericparser/pub/utils/toscaparsers/graph.py
@@ -21,7 +21,7 @@ class Graph(object):
def __init__(self, graph_dict=None):
self.graph = OrderedDict()
if graph_dict:
- for node, dep_nodes in graph_dict.iteritems():
+ for node, dep_nodes in list(graph_dict.items()):
self.add_node(node, dep_nodes)
def add_node(self, node, dep_nodes):
@@ -67,7 +67,7 @@ class Graph(object):
def to_dict(self):
dict = {}
- for node, dependents in self.graph.iteritems():
+ for node, dependents in self.graph.items():
dict[node] = []
for dep in dependents:
dict[node].append(dep)
diff --git a/genericparser/pub/utils/toscaparsers/nsdmodel.py b/genericparser/pub/utils/toscaparsers/nsdmodel.py
index fe522a7..9cc706f 100644
--- a/genericparser/pub/utils/toscaparsers/nsdmodel.py
+++ b/genericparser/pub/utils/toscaparsers/nsdmodel.py
@@ -60,7 +60,7 @@ class EtsiNsdInfoModel(BaseInfoModel):
self.metadata = self.buildMetadata(tosca)
self.ns = self._build_ns(tosca)
self.inputs = self.buildInputs(tosca)
- nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates)
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
types = tosca.topology_template.custom_defs
self.basepath = self.get_base_path(tosca)
self.vnfs = self._get_all_vnf(nodeTemplates, types)
@@ -126,7 +126,7 @@ class EtsiNsdInfoModel(BaseInfoModel):
forwarderList = []
if 'requirements' in node:
for item in node['requirements']:
- for key, value in item.items():
+ for key, value in list(item.items()):
if key == 'forwarder':
tmpnode = self.get_node_by_req(node_templates, value)
type = 'pnf' if self.isNodeTypeX(tmpnode, node_types, NS_PNF_TYPE) else 'vnf'
@@ -161,7 +161,7 @@ class EtsiNsdInfoModel(BaseInfoModel):
def _get_external_cps(self, subs_mappings):
external_cps = []
if 'requirements' in subs_mappings:
- for key, value in subs_mappings['requirements'].items():
+ for key, value in list(subs_mappings['requirements'].items()):
if isinstance(value, list) and len(value) > 0:
external_cps.append({"key_name": key, "cpd_id": value[0]})
else:
@@ -171,7 +171,7 @@ class EtsiNsdInfoModel(BaseInfoModel):
def _get_forward_cps(self, subs_mappings):
forward_cps = []
if 'capabilities' in subs_mappings:
- for key, value in subs_mappings['capabilities'].items():
+ for key, value in list(subs_mappings['capabilities'].items()):
if isinstance(value, list) and len(value) > 0:
forward_cps.append({"key_name": key, "cpd_id": value[0]})
else:
@@ -194,7 +194,7 @@ class EtsiNsdInfoModel(BaseInfoModel):
rets = []
if 'requirements' in node and (self.isNodeTypeX(node, node_types, NS_TYPE) or self.isNodeTypeX(node, node_types, NS_VNF_TYPE)):
for item in node['requirements']:
- for key, value in item.items():
+ for key, value in list(item.items()):
rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
return rets
diff --git a/genericparser/pub/utils/toscaparsers/pnfmodel.py b/genericparser/pub/utils/toscaparsers/pnfmodel.py
index 0f5445f..546861b 100644
--- a/genericparser/pub/utils/toscaparsers/pnfmodel.py
+++ b/genericparser/pub/utils/toscaparsers/pnfmodel.py
@@ -37,7 +37,7 @@ class PnfdInfoModel(BaseInfoModel):
pnf_substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None)
if pnf_substitution_mappings:
self.pnf['type'] = pnf_substitution_mappings['node_type']
- self.pnf['properties'] = pnf_substitution_mappings['properties']
+ self.pnf['properties'] = pnf_substitution_mappings.get('properties', {})
def get_all_cp(self, nodeTemplates):
self.pnf['ExtPorts'] = []
diff --git a/genericparser/pub/utils/toscaparsers/sdmodel.py b/genericparser/pub/utils/toscaparsers/sdmodel.py
index 8cca07e..7635ab3 100644
--- a/genericparser/pub/utils/toscaparsers/sdmodel.py
+++ b/genericparser/pub/utils/toscaparsers/sdmodel.py
@@ -64,14 +64,14 @@ class SdInfoModel(BaseInfoModel):
def get_child_input_repeat(self, complex_input, entry_schema, input):
custom_defs = input.custom_defs
properties = custom_defs[entry_schema]['properties']
- for key, value in properties.iteritems():
+ for key, value in properties.items():
if value['type'].__eq__('list'):
child_complex_input = []
child_entry_schema = self.get_entry_schema(value['entry_schema'])
self.get_child_input_repeat(child_complex_input, child_entry_schema, input)
complex_input.append({key: child_complex_input})
else:
- if 'description' in value.keys():
+ if 'description' in list(value.keys()):
simple_input = {
key: "",
"type": value['type'],
@@ -88,6 +88,6 @@ class SdInfoModel(BaseInfoModel):
def get_entry_schema(self, entry_schema):
if isinstance(entry_schema, dict):
- if 'type' in entry_schema.keys():
+ if 'type' in list(entry_schema.keys()):
entry_schema = entry_schema['type']
return entry_schema
diff --git a/genericparser/pub/utils/toscaparsers/servicemodel.py b/genericparser/pub/utils/toscaparsers/servicemodel.py
index 069d402..6321e04 100644
--- a/genericparser/pub/utils/toscaparsers/servicemodel.py
+++ b/genericparser/pub/utils/toscaparsers/servicemodel.py
@@ -54,7 +54,7 @@ class SdcServiceModel(BaseInfoModel):
self.ns = self._build_ns(tosca)
self.inputs = self.buildInputs(tosca)
if hasattr(tosca, 'nodetemplates'):
- nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates)
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
types = tosca.topology_template.custom_defs
self.basepath = self.get_base_path(tosca)
self.vnfs = self._get_all_vnf(nodeTemplates, types)
@@ -166,7 +166,7 @@ class SdcServiceModel(BaseInfoModel):
rets = []
if 'requirements' in node and self.isNodeTypeX(node, node_types, VF_TYPE):
for item in node['requirements']:
- for key, value in item.items():
+ for key, value in list(item.items()):
rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
return rets
diff --git a/genericparser/pub/utils/toscaparsers/vnfdmodel.py b/genericparser/pub/utils/toscaparsers/vnfdmodel.py
index 95ae2c1..2e48b4d 100644
--- a/genericparser/pub/utils/toscaparsers/vnfdmodel.py
+++ b/genericparser/pub/utils/toscaparsers/vnfdmodel.py
@@ -34,7 +34,7 @@ class EtsiVnfdInfoModel(BaseInfoModel):
def parseModel(self, tosca):
self.metadata = self.buildMetadata(tosca)
self.inputs = self.buildInputs(tosca)
- nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates)
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
self.basepath = self.get_base_path(tosca)
node_types = tosca.topology_template.custom_defs
sol_version = self.metadata.get("VNFD_SCHEMA_VERSION", VNFD_SCHEMA_VERSION_DEFAULT) if isinstance(self.metadata, dict) else VNFD_SCHEMA_VERSION_DEFAULT
diff --git a/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_251.py b/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_251.py
index 6ad26e8..d0365a7 100644
--- a/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_251.py
+++ b/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_251.py
@@ -33,7 +33,7 @@ class VnfdSOL251():
properties = vnf.get("properties", {})
metadata = vnf.get("metadata", {})
- for key, value in properties.items():
+ for key, value in list(properties.items()):
if isinstance(value, dict):
if value["type"] == "string":
properties[key] = value.get("default", "")
@@ -139,19 +139,17 @@ class VnfdSOL251():
if isinstance(inject_files, list):
for inject_file in inject_files:
source_path = os.path.join(self.model.basepath, inject_file['source_path'])
- with open(source_path, "rb") as f:
+ with open(source_path, "rt") as f:
source_data = f.read()
- source_data_base64 = source_data.encode("base64")
- inject_file["source_data_base64"] = source_data_base64
+ inject_file["source_data_base64"] = source_data
if isinstance(inject_files, dict):
source_path = os.path.join(self.model.basepath, inject_files['source_path'])
- with open(source_path, "rb") as f:
+ with open(source_path, "rt") as f:
source_data = f.read()
- source_data_base64 = source_data.encode("base64")
- inject_files["source_data_base64"] = source_data_base64
+ inject_files["source_data_base64"] = source_data
virtual_storages = self.model.getRequirementByName(node, 'virtual_storage')
- ret['virtual_storages'] = map(functools.partial(self._trans_virtual_storage), virtual_storages)
- ret['dependencies'] = map(lambda x: self.model.get_requirement_node_name(x), self.model.getNodeDependencys(node))
+ ret['virtual_storages'] = list(map(functools.partial(self._trans_virtual_storage), virtual_storages))
+ ret['dependencies'] = [self.model.get_requirement_node_name(x) for x in self.model.getNodeDependencys(node)]
virtual_compute = self.model.getCapabilityByName(node, 'virtual_compute')
if virtual_compute is not None and 'properties' in virtual_compute:
ret['virtual_compute'] = virtual_compute['properties']
@@ -194,14 +192,14 @@ class VnfdSOL251():
return vl_ids
def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
- return map(lambda x: x['name'], self._get_virtal_binding_cps(node, nodeTemplates))
+ return [x['name'] for x in self._get_virtal_binding_cps(node, nodeTemplates)]
def _get_virtal_binding_cps(self, node, nodeTemplates):
cps = []
for tmpnode in nodeTemplates:
if 'requirements' in tmpnode:
for item in tmpnode['requirements']:
- for key, value in item.items():
+ for key, value in list(item.items()):
if key.upper().startswith('VIRTUAL_BINDING'):
req_node_name = self.model.get_requirement_node_name(value)
if req_node_name is not None and req_node_name == node['name']:
@@ -209,19 +207,19 @@ class VnfdSOL251():
return cps
def _get_node_vdu_id(self, node):
- vdu_ids = map(lambda x: self.model.get_requirement_node_name(x), self.model.getRequirementByName(node, 'virtual_binding'))
+ vdu_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_binding')]
if len(vdu_ids) > 0:
return vdu_ids[0]
return ""
def _get_node_vl_id(self, node):
- vl_ids = map(lambda x: self.model.get_requirement_node_name(x), self.model.getRequirementByName(node, 'virtual_link'))
+ vl_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
if len(vl_ids) > 0:
return vl_ids[0]
return ""
def _buil_cp_vls(self, node):
- return map(lambda x: self._build_cp_vl(x), self.model.getRequirementByName(node, 'virtual_link'))
+ return [self._build_cp_vl(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
def _build_cp_vl(self, req):
cp_vl = {}
@@ -230,7 +228,7 @@ class VnfdSOL251():
if relationship is not None:
properties = self.model.get_prop_from_obj(relationship, 'properties')
if properties is not None and isinstance(properties, dict):
- for key, value in properties.items():
+ for key, value in list(properties.items()):
cp_vl[key] = value
return cp_vl
@@ -238,14 +236,14 @@ class VnfdSOL251():
external_cps = []
if vnf_requirements:
if isinstance(vnf_requirements, dict):
- for key, value in vnf_requirements.items():
+ for key, value in list(vnf_requirements.items()):
if isinstance(value, list) and len(value) > 0:
external_cps.append({"key_name": key, "cpd_id": value[0]})
else:
external_cps.append({"key_name": key, "cpd_id": value})
elif isinstance(vnf_requirements, list):
for vnf_requirement in vnf_requirements:
- for key, value in vnf_requirement.items():
+ for key, value in list(vnf_requirement.items()):
if isinstance(value, list) and len(value) > 0:
external_cps.append({"key_name": key, "cpd_id": value[0]})
else:
@@ -255,7 +253,7 @@ class VnfdSOL251():
def _get_forward_cps(self, vnf_capabilities):
forward_cps = []
if vnf_capabilities:
- for key, value in vnf_capabilities.items():
+ for key, value in list(vnf_capabilities.items()):
if isinstance(value, list) and len(value) > 0:
forward_cps.append({"key_name": key, "cpd_id": value[0]})
else:
diff --git a/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_base.py b/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_base.py
index 89ddc84..cc941b2 100644
--- a/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_base.py
+++ b/genericparser/pub/utils/toscaparsers/vnfdparser/vnfd_sol_base.py
@@ -118,19 +118,17 @@ class VnfdSOLBase():
if isinstance(inject_files, list):
for inject_file in inject_files:
source_path = os.path.join(self.model.basepath, inject_file['source_path'])
- with open(source_path, "rb") as f:
+ with open(source_path, "rt") as f:
source_data = f.read()
- source_data_base64 = source_data.encode("base64")
- inject_file["source_data_base64"] = source_data_base64
+ inject_file["source_data_base64"] = source_data
if isinstance(inject_files, dict):
source_path = os.path.join(self.model.basepath, inject_files['source_path'])
- with open(source_path, "rb") as f:
+ with open(source_path, "rt") as f:
source_data = f.read()
- source_data_base64 = source_data.encode("base64")
- inject_files["source_data_base64"] = source_data_base64
+ inject_files["source_data_base64"] = source_data
virtual_storages = self.model.getRequirementByName(node, 'virtual_storage')
- ret['virtual_storages'] = map(functools.partial(self._trans_virtual_storage), virtual_storages)
- ret['dependencies'] = map(lambda x: self.model.get_requirement_node_name(x), self.model.getNodeDependencys(node))
+ ret['virtual_storages'] = list(map(functools.partial(self._trans_virtual_storage), virtual_storages))
+ ret['dependencies'] = [self.model.get_requirement_node_name(x) for x in self.model.getNodeDependencys(node)]
virtual_compute = self.model.getCapabilityByName(node, 'virtual_compute')
if virtual_compute is not None and 'properties' in virtual_compute:
ret['virtual_compute'] = virtual_compute['properties']
@@ -166,14 +164,14 @@ class VnfdSOLBase():
return vl_ids
def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
- return map(lambda x: x['name'], self._get_virtal_binding_cps(node, nodeTemplates))
+ return [x['name'] for x in self._get_virtal_binding_cps(node, nodeTemplates)]
def _get_virtal_binding_cps(self, node, nodeTemplates):
cps = []
for tmpnode in nodeTemplates:
if 'requirements' in tmpnode:
for item in tmpnode['requirements']:
- for key, value in item.items():
+ for key, value in list(item.items()):
if key.upper().startswith('VIRTUAL_BINDING'):
req_node_name = self.model.get_requirement_node_name(value)
if req_node_name is not None and req_node_name == node['name']:
@@ -181,19 +179,19 @@ class VnfdSOLBase():
return cps
def _get_node_vdu_id(self, node):
- vdu_ids = map(lambda x: self.model.get_requirement_node_name(x), self.model.getRequirementByName(node, 'virtual_binding'))
+ vdu_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_binding')]
if len(vdu_ids) > 0:
return vdu_ids[0]
return ""
def _get_node_vl_id(self, node):
- vl_ids = map(lambda x: self.model.get_requirement_node_name(x), self.model.getRequirementByName(node, 'virtual_link'))
+ vl_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
if len(vl_ids) > 0:
return vl_ids[0]
return ""
def _buil_cp_vls(self, node):
- return map(lambda x: self._build_cp_vl(x), self.model.getRequirementByName(node, 'virtual_link'))
+ return [self._build_cp_vl(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
def _build_cp_vl(self, req):
cp_vl = {}
@@ -202,7 +200,7 @@ class VnfdSOLBase():
if relationship is not None:
properties = self.model.get_prop_from_obj(relationship, 'properties')
if properties is not None and isinstance(properties, dict):
- for key, value in properties.items():
+ for key, value in list(properties.items()):
cp_vl[key] = value
return cp_vl
@@ -210,14 +208,14 @@ class VnfdSOLBase():
external_cps = []
if vnf_requirements:
if isinstance(vnf_requirements, dict):
- for key, value in vnf_requirements.items():
+ for key, value in list(vnf_requirements.items()):
if isinstance(value, list) and len(value) > 0:
external_cps.append({"key_name": key, "cpd_id": value[0]})
else:
external_cps.append({"key_name": key, "cpd_id": value})
elif isinstance(vnf_requirements, list):
for vnf_requirement in vnf_requirements:
- for key, value in vnf_requirement.items():
+ for key, value in list(vnf_requirement.items()):
if isinstance(value, list) and len(value) > 0:
external_cps.append({"key_name": key, "cpd_id": value[0]})
else:
@@ -227,7 +225,7 @@ class VnfdSOLBase():
def _get_forward_cps(self, vnf_capabilities):
forward_cps = []
if vnf_capabilities:
- for key, value in vnf_capabilities.items():
+ for key, value in list(vnf_capabilities.items()):
if isinstance(value, list) and len(value) > 0:
forward_cps.append({"key_name": key, "cpd_id": value[0]})
else:
diff --git a/genericparser/pub/utils/values.py b/genericparser/pub/utils/values.py
index 0fd2d1a..d02d544 100644
--- a/genericparser/pub/utils/values.py
+++ b/genericparser/pub/utils/values.py
@@ -22,3 +22,12 @@ def ignore_case_get(args, key, def_val=""):
if old_key.upper() == key.upper():
return args[old_key]
return def_val
+
+
+def remove_none_key(data, none_list=None):
+ none_list = none_list if none_list else [None, '', 'NULL', 'None', [], {}]
+ if isinstance(data, dict):
+ data = dict([(k, remove_none_key(v, none_list)) for k, v in list(data.items()) if v not in none_list])
+ if isinstance(data, list):
+ data = [remove_none_key(s, none_list) for s in data if s not in none_list]
+ return data