aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorOlivia.Zhan <zhan.jie1@zte.com.cn>2019-07-12 10:40:38 +0800
committerOlivia.Zhan <zhan.jie1@zte.com.cn>2019-07-12 11:09:03 +0800
commit74602ff5cc299a5071a26cec6719435d39658062 (patch)
treea0a2434858f8c919a048b409c5c2c58d9fda3179
parent22185d6f712a6b76bd4757961eed8baf62d81e71 (diff)
Update python2 to python3 for catalog
Issue-ID: VFC-1429 Signed-off-by: Olivia.Zhan <zhan.jie1@zte.com.cn> Change-Id: I9b7bd50c0a750c046643e4a8df46e12165eaa4c1
-rw-r--r--catalog/jobs/tests/tests.py2
-rw-r--r--catalog/packages/biz/nsdm_subscription.py27
-rw-r--r--catalog/packages/biz/pnf_descriptor.py6
-rw-r--r--catalog/packages/biz/sdc_ns_package.py14
-rw-r--r--catalog/packages/biz/sdc_service_package.py4
-rw-r--r--catalog/packages/biz/sdc_vnf_package.py22
-rw-r--r--catalog/packages/biz/vnf_package.py11
-rw-r--r--catalog/packages/biz/vnf_pkg_artifacts.py2
-rwxr-xr-xcatalog/packages/biz/vnf_pkg_subscription.py2
-rw-r--r--catalog/packages/tests/test_ns_descriptor.py20
-rw-r--r--catalog/packages/tests/test_nsdm_subscription.py3
-rw-r--r--catalog/packages/tests/test_pnf_descriptor.py18
-rw-r--r--catalog/packages/tests/test_service_descriptor.py2
-rw-r--r--catalog/packages/tests/test_servicepackage.py12
-rw-r--r--catalog/packages/tests/test_vnf_package.py24
-rw-r--r--catalog/packages/views/catalog_views.py18
-rw-r--r--catalog/packages/views/common.py38
-rw-r--r--catalog/packages/views/nsdm_subscription_views.py6
-rw-r--r--catalog/pub/database/migrations/0001_initial.py256
-rw-r--r--catalog/pub/redisco/__init__.py58
-rw-r--r--catalog/pub/redisco/containers.py116
-rw-r--r--catalog/pub/utils/fileutil.py11
-rw-r--r--catalog/pub/utils/idutil.py2
-rw-r--r--catalog/pub/utils/jobutil.py1
-rw-r--r--catalog/pub/utils/restcall.py4
-rw-r--r--catalog/pub/utils/tests.py12
-rw-r--r--catalog/pub/utils/toscaparser/basemodel.py36
-rw-r--r--catalog/pub/utils/toscaparser/graph.py4
-rw-r--r--catalog/pub/utils/toscaparser/nsdmodel.py10
-rw-r--r--catalog/pub/utils/toscaparser/sdmodel.py6
-rw-r--r--catalog/pub/utils/toscaparser/servicemodel.py4
-rw-r--r--catalog/pub/utils/toscaparser/vnfdmodel.py2
-rw-r--r--catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py32
-rw-r--r--catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py32
-rw-r--r--catalog/pub/utils/values.py9
-rw-r--r--catalog/settings.py4
-rw-r--r--catalog/swagger/management/commands/export_swagger.py2
-rwxr-xr-xdocker/Dockerfile2
-rwxr-xr-xdocker/docker-env-conf.sh2
-rw-r--r--requirements.txt14
-rw-r--r--tox.ini5
41 files changed, 517 insertions, 338 deletions
diff --git a/catalog/jobs/tests/tests.py b/catalog/jobs/tests/tests.py
index 0411c186..460c8549 100644
--- a/catalog/jobs/tests/tests.py
+++ b/catalog/jobs/tests/tests.py
@@ -29,7 +29,7 @@ class JobsViewTest(TestCase):
JobModel(jobid=self.job_id, jobtype='VNF', jobaction='INST', resid='1').save()
JobStatusModel(indexid=1, jobid=self.job_id, status='inst', errcode='0', progress=20, descp='inst').save()
response = self.client.get("/api/catalog/v1/jobs/%s" % self.job_id)
- self.failUnlessEqual(status.HTTP_200_OK, response.status_code)
+ self.assertEqual(status.HTTP_200_OK, response.status_code)
def test_job_when_jobid_not_exist(self):
job_id = 'test_new_job_id'
diff --git a/catalog/packages/biz/nsdm_subscription.py b/catalog/packages/biz/nsdm_subscription.py
index ec305076..e2af6e49 100644
--- a/catalog/packages/biz/nsdm_subscription.py
+++ b/catalog/packages/biz/nsdm_subscription.py
@@ -73,7 +73,7 @@ class NsdmSubscription:
query_data = {}
logger.debug("Start QueryMultiSubscriptions get --> "
"Check for filters in query params" % self.params)
- for query, value in self.params.iteritems():
+ for query, value in list(self.params.items()):
if query in const.NSDM_NOTIFICATION_FILTERS and value:
query_data[query + '__icontains'] = json.dumps(list(set(value)))
# Query the database with filters if the request
@@ -103,8 +103,9 @@ class NsdmSubscription:
def fill_resp_data(self, subscription):
subscription_filter = dict()
for filter_type in const.NSDM_NOTIFICATION_FILTERS:
- subscription_filter[filter_type] = \
- ast.literal_eval(subscription.__dict__[filter_type])
+ if subscription.__dict__[filter_type]:
+ subscription_filter[filter_type] = \
+ ast.literal_eval(subscription.__dict__[filter_type])
resp_data = {
'id': subscription.subscriptionid,
'callbackUri': subscription.callback_uri,
@@ -156,20 +157,17 @@ class NsdmSubscription:
raise NsdmBadRequestException('Auth type should '
'be ' + const.OAUTH2_CLIENT_CREDENTIALS)
if const.BASIC in self.authentication.get("authType", '') and \
- "paramsBasic" in self.authentication.keys() and \
- not is_filter_type_equal(PARAMSBASICKEYS,
- self.authentication.
- get("paramsBasic").keys()):
+ "paramsBasic" in list(self.authentication.keys()) and \
+ not is_filter_type_equal(PARAMSBASICKEYS, list(
+ self.authentication.get("paramsBasic").keys())):
raise NsdmBadRequestException('userName and password needed '
'for ' + const.BASIC)
if const.OAUTH2_CLIENT_CREDENTIALS in \
self.authentication.get("authType", '') and \
"paramsOauth2ClientCredentials" in \
- self.authentication.keys() and \
- not is_filter_type_equal(PARAMSOAUTH2CLIENTCREDENTIALSKEYS,
- self.authentication.
- get("paramsOauth2ClientCredentials")
- .keys()):
+ list(self.authentication.keys()) and \
+ not is_filter_type_equal(PARAMSOAUTH2CLIENTCREDENTIALSKEYS, list(
+ self.authentication.get("paramsOauth2ClientCredentials").keys())):
raise NsdmBadRequestException('clientId, clientPassword and '
'tokenEndpoint required '
'for ' + const.OAUTH2_CLIENT_CREDENTIALS)
@@ -213,7 +211,8 @@ class NsdmSubscription:
"links": json.dumps(links)
}
for filter_type in const.NSDM_NOTIFICATION_FILTERS:
- subscription_save_db[filter_type] = json.dumps(
- list(set(self.filter.get(filter_type, []))))
+ if self.filter:
+ subscription_save_db[filter_type] = json.dumps(
+ list(set(self.filter.get(filter_type, []))))
NsdmSubscriptionModel.objects.create(**subscription_save_db)
logger.debug('Create Subscription[%s] success', self.subscription_id)
diff --git a/catalog/packages/biz/pnf_descriptor.py b/catalog/packages/biz/pnf_descriptor.py
index 1a11fe08..547c198d 100644
--- a/catalog/packages/biz/pnf_descriptor.py
+++ b/catalog/packages/biz/pnf_descriptor.py
@@ -219,8 +219,8 @@ class PnfDescriptor(object):
csar_path = pnf_pkg[0].localFilePath
ret = {"model": toscaparser.parse_pnfd(csar_path, inputs)}
except CatalogException as e:
- return [1, e.message]
+ return [1, e.args[0]]
except Exception as e:
- logger.error(e.message)
- return [1, e.message]
+ logger.error(e.args[0])
+ return [1, e.args[0]]
return [0, ret]
diff --git a/catalog/packages/biz/sdc_ns_package.py b/catalog/packages/biz/sdc_ns_package.py
index 64c520d8..4f9d4600 100644
--- a/catalog/packages/biz/sdc_ns_package.py
+++ b/catalog/packages/biz/sdc_ns_package.py
@@ -43,7 +43,7 @@ def ns_on_distribute(csar_id):
ret = NsPackage().on_distribute(csar_id)
except CatalogException as e:
NsPackage().delete_csar(csar_id)
- return fmt_ns_pkg_rsp(STATUS_FAILED, e.message)
+ return fmt_ns_pkg_rsp(STATUS_FAILED, e.args[0])
except:
logger.error(traceback.format_exc())
NsPackage().delete_csar(csar_id)
@@ -58,7 +58,7 @@ def ns_delete_csar(csar_id):
try:
ret = NsPackage().delete_csar(csar_id)
except CatalogException as e:
- return fmt_ns_pkg_rsp(STATUS_FAILED, e.message)
+ return fmt_ns_pkg_rsp(STATUS_FAILED, e.args[0])
except:
logger.error(traceback.format_exc())
return fmt_ns_pkg_rsp(STATUS_FAILED, str(sys.exc_info()))
@@ -70,7 +70,7 @@ def ns_get_csars():
try:
ret = NsPackage().get_csars()
except CatalogException as e:
- return [1, e.message]
+ return [1, e.args[0]]
except:
logger.error(traceback.format_exc())
return [1, str(sys.exc_info())]
@@ -82,9 +82,9 @@ def ns_get_csar(csar_id):
try:
ret = NsPackage().get_csar(csar_id)
except CatalogException as e:
- return [1, e.message]
+ return [1, e.args[0]]
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
return [1, str(sys.exc_info())]
return ret
@@ -99,9 +99,9 @@ def parse_nsd(csar_id, inputs):
csar_path = ns_pkg[0].localFilePath
ret = {"model": toscaparser.parse_nsd(csar_path, inputs)}
except CatalogException as e:
- return [1, e.message]
+ return [1, e.args[0]]
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
return [1, str(sys.exc_info())]
return [0, ret]
diff --git a/catalog/packages/biz/sdc_service_package.py b/catalog/packages/biz/sdc_service_package.py
index 3105bc0b..9160880d 100644
--- a/catalog/packages/biz/sdc_service_package.py
+++ b/catalog/packages/biz/sdc_service_package.py
@@ -106,9 +106,9 @@ class ServicePackage(object):
ret = {"model": toscaparser.parse_sd(csar_path, inputs)}
return ret
except CatalogException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
raise e
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
raise e
diff --git a/catalog/packages/biz/sdc_vnf_package.py b/catalog/packages/biz/sdc_vnf_package.py
index e432f0e4..571c3bb0 100644
--- a/catalog/packages/biz/sdc_vnf_package.py
+++ b/catalog/packages/biz/sdc_vnf_package.py
@@ -39,9 +39,9 @@ def nf_get_csars():
try:
ret = NfPackage().get_csars()
except CatalogException as e:
- return [1, e.message]
+ return [1, e.args[0]]
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
return [1, str(sys.exc_info())]
return ret
@@ -52,9 +52,9 @@ def nf_get_csar(csar_id):
try:
ret = NfPackage().get_csar(csar_id)
except CatalogException as e:
- return [1, e.message]
+ return [1, e.args[0]]
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
return [1, str(sys.exc_info())]
return ret
@@ -69,9 +69,9 @@ def parse_vnfd(csar_id, inputs):
csar_path = nf_pkg[0].localFilePath
ret = {"model": toscaparser.parse_vnfd(csar_path, inputs)}
except CatalogException as e:
- return [1, e.message]
+ return [1, e.args[0]]
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
return [1, str(sys.exc_info())]
return [0, ret]
@@ -96,9 +96,9 @@ class NfDistributeThread(threading.Thread):
self.on_distribute()
except CatalogException as e:
self.rollback_distribute()
- JobUtil.add_job_status(self.job_id, JOB_ERROR, e.message)
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, e.args[0])
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
logger.error(str(sys.exc_info()))
self.rollback_distribute()
@@ -162,7 +162,7 @@ class NfDistributeThread(threading.Thread):
VnfPackageModel.objects.filter(vnfPackageId=self.csar_id).delete()
fileutil.delete_dirs(self.csar_save_path)
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
logger.error(str(sys.exc_info()))
@@ -181,9 +181,9 @@ class NfPkgDeleteThread(threading.Thread):
try:
self.delete_csar()
except CatalogException as e:
- JobUtil.add_job_status(self.job_id, JOB_ERROR, e.message)
+ JobUtil.add_job_status(self.job_id, JOB_ERROR, e.args[0])
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
logger.error(str(sys.exc_info()))
JobUtil.add_job_status(self.job_id, JOB_ERROR, "Failed to delete CSAR(%s)" % self.csar_id)
diff --git a/catalog/packages/biz/vnf_package.py b/catalog/packages/biz/vnf_package.py
index 34130f1d..585a599b 100644
--- a/catalog/packages/biz/vnf_package.py
+++ b/catalog/packages/biz/vnf_package.py
@@ -18,7 +18,7 @@ import os
import sys
import threading
import traceback
-import urllib2
+import urllib
import uuid
from catalog.packages.biz.common import parse_file_range, read, save
@@ -139,9 +139,9 @@ class VnfPkgUploadThread(threading.Thread):
self.upload_vnf_pkg_from_uri()
parse_vnfd_and_save(self.vnf_pkg_id, self.upload_file_name)
except CatalogException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
logger.error(str(sys.exc_info()))
@@ -154,14 +154,13 @@ class VnfPkgUploadThread(threading.Thread):
vnf_pkg.update(onboardingState=PKG_STATUS.UPLOADING)
uri = ignore_case_get(self.data, "addressInformation")
- request = urllib2.Request(uri)
- response = urllib2.urlopen(request)
+ response = urllib.request.urlopen(uri)
local_file_dir = os.path.join(CATALOG_ROOT_PATH, self.vnf_pkg_id)
self.upload_file_name = os.path.join(local_file_dir, os.path.basename(uri))
if not os.path.exists(local_file_dir):
fileutil.make_dirs(local_file_dir)
- with open(self.upload_file_name, "wb") as local_file:
+ with open(self.upload_file_name, "wt") as local_file:
local_file.write(response.read())
response.close()
logger.info('VNF packge(%s) has been uploaded.' % self.vnf_pkg_id)
diff --git a/catalog/packages/biz/vnf_pkg_artifacts.py b/catalog/packages/biz/vnf_pkg_artifacts.py
index bb79624b..37021eb5 100644
--- a/catalog/packages/biz/vnf_pkg_artifacts.py
+++ b/catalog/packages/biz/vnf_pkg_artifacts.py
@@ -36,7 +36,7 @@ class FetchVnfPkgArtifact(object):
artifact_path = fileutil.get_artifact_path(vnf_extract_path, artifactPath)
if not artifact_path:
raise ArtifactNotFoundException("Couldn't artifact %s" % artifactPath)
- with open(artifact_path, 'rb') as f:
+ with open(artifact_path, 'rt') as f:
file_content = f.read()
else:
raise ArtifactNotFoundException("NF Package format is not csar or zip")
diff --git a/catalog/packages/biz/vnf_pkg_subscription.py b/catalog/packages/biz/vnf_pkg_subscription.py
index e0e6ff07..29ef92e1 100755
--- a/catalog/packages/biz/vnf_pkg_subscription.py
+++ b/catalog/packages/biz/vnf_pkg_subscription.py
@@ -152,7 +152,7 @@ class QuerySubscription(object):
query_data = {}
logger.debug("QuerySubscription--get--multi--subscriptions--biz::> Check "
"for filters in query params %s" % params)
- for query, value in params.iteritems():
+ for query, value in list(params.items()):
if query in ROOT_FILTERS:
query_data[ROOT_FILTERS[query] + '__icontains'] = value
# Query the database with filters if the request has fields in request params, else fetch all records
diff --git a/catalog/packages/tests/test_ns_descriptor.py b/catalog/packages/tests/test_ns_descriptor.py
index d156843a..2f4e035c 100644
--- a/catalog/packages/tests/test_ns_descriptor.py
+++ b/catalog/packages/tests/test_ns_descriptor.py
@@ -175,9 +175,9 @@ class TestNsDescriptor(TestCase):
userDefinedData=user_defined_data_json,
).save()
- with open('nsd_content.txt', 'wb') as fp:
+ with open('nsd_content.txt', 'wt') as fp:
fp.write('test')
- with open('nsd_content.txt', 'rb') as fp:
+ with open('nsd_content.txt', 'rt') as fp:
resp = self.client.put(
"/api/nsd/v1/ns_descriptors/22/nsd_content",
{'file': fp},
@@ -195,9 +195,9 @@ class TestNsDescriptor(TestCase):
os.remove('nsd_content.txt')
def test_nsd_content_upload_failure(self):
- with open('nsd_content.txt', 'wb') as fp:
+ with open('nsd_content.txt', 'wt') as fp:
fp.write('test')
- with open('nsd_content.txt', 'rb') as fp:
+ with open('nsd_content.txt', 'rt') as fp:
response = self.client.put(
"/api/nsd/v1/ns_descriptors/22/nsd_content",
{'file': fp},
@@ -205,7 +205,7 @@ class TestNsDescriptor(TestCase):
self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
def test_nsd_content_download_normal(self):
- with open('nsd_content.txt', 'wb') as fp:
+ with open('nsd_content.txt', 'wt') as fp:
fp.writelines('test1')
fp.writelines('test2')
NSPackageModel.objects.create(
@@ -220,7 +220,7 @@ class TestNsDescriptor(TestCase):
for data in response.streaming_content:
file_content = '%s%s' % (file_content, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual('test1test2', file_content)
+ self.assertEqual("b'test1test2'", file_content)
os.remove('nsd_content.txt')
def test_nsd_content_download_when_ns_not_exist(self):
@@ -237,7 +237,7 @@ class TestNsDescriptor(TestCase):
self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
def test_nsd_content_partial_download_normal(self):
- with open('nsd_content.txt', 'wb') as fp:
+ with open('nsd_content.txt', 'wt') as fp:
fp.writelines('test1')
fp.writelines('test2')
NSPackageModel(
@@ -255,7 +255,7 @@ class TestNsDescriptor(TestCase):
for data in response.streaming_content:
partial_file_content = '%s%s' % (partial_file_content, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual('test2', partial_file_content)
+ self.assertEqual("b'test2'", partial_file_content)
os.remove('nsd_content.txt')
@mock.patch.object(NsDescriptor, 'create')
@@ -286,9 +286,9 @@ class TestNsDescriptor(TestCase):
@mock.patch.object(NsDescriptor, 'upload')
def test_upload_when_catch_exception(self, mock_upload):
mock_upload.side_effect = TypeError("integer type")
- with open('nsd_content.txt', 'wb') as fp:
+ with open('nsd_content.txt', 'wt') as fp:
fp.write('test')
- with open('nsd_content.txt', 'rb') as fp:
+ with open('nsd_content.txt', 'rt') as fp:
response = self.client.put("/api/nsd/v1/ns_descriptors/22/nsd_content", {'file': fp})
self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
os.remove('nsd_content.txt')
diff --git a/catalog/packages/tests/test_nsdm_subscription.py b/catalog/packages/tests/test_nsdm_subscription.py
index 1eb4c0dd..f73c4167 100644
--- a/catalog/packages/tests/test_nsdm_subscription.py
+++ b/catalog/packages/tests/test_nsdm_subscription.py
@@ -37,6 +37,9 @@ class TestNsdmSubscription(TestCase):
"userName": "username",
"password": "password"
}
+ },
+ "filter": {
+ "nsdId": ["b632bddc-abcd-4180-bd8d-4e8a9578eff7"],
}
}
self.links = {
diff --git a/catalog/packages/tests/test_pnf_descriptor.py b/catalog/packages/tests/test_pnf_descriptor.py
index 97515187..68fad6cf 100644
--- a/catalog/packages/tests/test_pnf_descriptor.py
+++ b/catalog/packages/tests/test_pnf_descriptor.py
@@ -149,10 +149,10 @@ class TestPnfDescriptor(TestCase):
userDefinedData=user_defined_data_json,
).save()
mock_parse_pnfd.return_value = json.JSONEncoder().encode(pnfd_data)
- with open('pnfd_content.txt', 'wb') as fp:
+ with open('pnfd_content.txt', 'wt') as fp:
fp.write('test')
- with open('pnfd_content.txt', 'rb') as fp:
+ with open('pnfd_content.txt', 'rt') as fp:
resp = self.client.put(
"/api/nsd/v1/pnf_descriptors/22/pnfd_content",
{'file': fp},
@@ -165,10 +165,10 @@ class TestPnfDescriptor(TestCase):
os.remove('pnfd_content.txt')
def test_pnfd_content_upload_when_pnf_not_exist(self):
- with open('pnfd_content.txt', 'wb') as fp:
+ with open('pnfd_content.txt', 'wt') as fp:
fp.write('test')
- with open('pnfd_content.txt', 'rb') as fp:
+ with open('pnfd_content.txt', 'rt') as fp:
resp = self.client.put(
"/api/nsd/v1/pnf_descriptors/22/pnfd_content",
{'file': fp},
@@ -177,7 +177,7 @@ class TestPnfDescriptor(TestCase):
@mock.patch.object(toscaparser, "parse_pnfd")
def test_pnfd_content_upload_when_pnfd_exist(self, mock_parse_pnfd):
- with open('pnfd_content.txt', 'wb') as fp:
+ with open('pnfd_content.txt', 'wt') as fp:
fp.write('test')
PnfPackageModel(
pnfPackageId='22',
@@ -190,7 +190,7 @@ class TestPnfDescriptor(TestCase):
pnfdId="zte-1.0"
).save()
mock_parse_pnfd.return_value = json.JSONEncoder().encode(pnfd_data)
- with open('pnfd_content.txt', 'rb') as fp:
+ with open('pnfd_content.txt', 'rt') as fp:
resp = self.client.put(
"/api/nsd/v1/pnf_descriptors/22/pnfd_content",
{'file': fp},
@@ -198,7 +198,7 @@ class TestPnfDescriptor(TestCase):
self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
def test_pnfd_download_normal(self):
- with open('pnfd_content.txt', 'wb') as fp:
+ with open('pnfd_content.txt', 'wt') as fp:
fp.writelines('test1')
fp.writelines('test2')
user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
@@ -215,7 +215,7 @@ class TestPnfDescriptor(TestCase):
for data in resp.streaming_content:
file_content = '%s%s' % (file_content, data)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertEqual('test1test2', file_content)
+ self.assertEqual("b'test1test2'", file_content)
os.remove('pnfd_content.txt')
def test_pnfd_download_failed(self):
@@ -223,7 +223,7 @@ class TestPnfDescriptor(TestCase):
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_pnfd_download_when_not_on_boarded(self):
- with open('pnfd_content.txt', 'wb') as fp:
+ with open('pnfd_content.txt', 'wt') as fp:
fp.writelines('test1')
fp.writelines('test2')
user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
diff --git a/catalog/packages/tests/test_service_descriptor.py b/catalog/packages/tests/test_service_descriptor.py
index 362e1ccf..08a6f033 100644
--- a/catalog/packages/tests/test_service_descriptor.py
+++ b/catalog/packages/tests/test_service_descriptor.py
@@ -92,4 +92,4 @@ class TestServiceDescription(TestCase):
ServiceDescriptor().delete_single(csar_id)
except Exception as e:
self.assertTrue(isinstance(e, PackageNotFoundException))
- self.assertEqual("Service package[8000] not Found.", e.message)
+ self.assertEqual("Service package[8000] not Found.", e.args[0])
diff --git a/catalog/packages/tests/test_servicepackage.py b/catalog/packages/tests/test_servicepackage.py
index 8030d924..bfa7c25b 100644
--- a/catalog/packages/tests/test_servicepackage.py
+++ b/catalog/packages/tests/test_servicepackage.py
@@ -261,7 +261,7 @@ class TestServicePackage(TestCase):
try:
ServicePackage().on_distribute(csar_id)
except PackageHasExistsException as e:
- self.assertEqual("Service CSAR(1) already exists.", e.message)
+ self.assertEqual("Service CSAR(1) already exists.", e.args[0])
@mock.patch.object(sdc, 'get_artifact')
def test_service_pkg_distribute_when_fail_get_artifacts(self, mock_get_artifact):
@@ -271,7 +271,7 @@ class TestServicePackage(TestCase):
ServicePackage().on_distribute(csar_id)
except Exception as e:
self.assertTrue(isinstance(e, CatalogException))
- self.assertEqual("Failed to query artifact(services,1) from sdc.", e.message)
+ self.assertEqual("Failed to query artifact(services,1) from sdc.", e.args[0])
@mock.patch.object(sdc, 'get_artifact')
@mock.patch.object(sdc, 'download_artifacts')
@@ -295,7 +295,7 @@ class TestServicePackage(TestCase):
ServicePackage().on_distribute(csar_id)
except Exception as e:
self.assertTrue(isinstance(e, CatalogException))
- self.assertEqual("Failed to download 1 from sdc.", e.message)
+ self.assertEqual("Failed to download 1 from sdc.", e.args[0])
@mock.patch.object(sdc, 'get_artifact')
@mock.patch.object(sdc, 'download_artifacts')
@@ -389,7 +389,7 @@ class TestServicePackage(TestCase):
try:
ServicePackage().get_csar(1000)
except PackageNotFoundException as e:
- self.assertEqual("Service package[1000] not Found.", e.message)
+ self.assertEqual("Service package[1000] not Found.", e.args[0])
def test_api_service_pkg_get_one(self):
ServicePackageModel(
@@ -423,7 +423,7 @@ class TestServicePackage(TestCase):
try:
ServicePackage().delete_csar("8000")
except PackageNotFoundException as e:
- self.assertEqual("Service package[8000] not Found.", e.message)
+ self.assertEqual("Service package[8000] not Found.", e.args[0])
def test_api_service_pkg_normal_delete(self):
ServicePackageModel(servicePackageId="8", servicedId="2").save()
@@ -447,7 +447,7 @@ class TestServicePackage(TestCase):
inputs = []
ServicePackage().parse_serviced(csar_id, inputs)
except PackageNotFoundException as e:
- self.assertEqual("Service CSAR(8000) does not exist.", e.message)
+ self.assertEqual("Service CSAR(8000) does not exist.", e.args[0])
def test_api_service_pkg_parser_not_found(self):
query_data = {
diff --git a/catalog/packages/tests/test_vnf_package.py b/catalog/packages/tests/test_vnf_package.py
index 8def3ba9..5ff3ff45 100644
--- a/catalog/packages/tests/test_vnf_package.py
+++ b/catalog/packages/tests/test_vnf_package.py
@@ -14,7 +14,7 @@
import json
import os
-import urllib2
+import urllib
import mock
import shutil
@@ -49,7 +49,7 @@ class TestVnfPackage(TestCase):
@mock.patch.object(toscaparser, 'parse_vnfd')
def test_upload_vnf_pkg(self, mock_parse_vnfd):
- data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "empty.txt"), "rb")}
+ data = {'file': open(os.path.join(CATALOG_ROOT_PATH, "empty.txt"), "rt")}
VnfPackageModel.objects.create(
vnfPackageId="222",
onboardingState="CREATED"
@@ -70,7 +70,7 @@ class TestVnfPackage(TestCase):
self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
@mock.patch.object(toscaparser, 'parse_vnfd')
- @mock.patch.object(urllib2, 'urlopen')
+ @mock.patch.object(urllib.request, 'urlopen')
def test_upload_nf_pkg_from_uri(self, mock_urlopen, mock_parse_vnfd):
vnf_pkg = VnfPackageModel.objects.create(
vnfPackageId="222",
@@ -230,7 +230,7 @@ class TestVnfPackage(TestCase):
self.assertEqual(response.data, None)
def test_fetch_vnf_pkg(self):
- with open("vnfPackage.csar", "wb") as fp:
+ with open("vnfPackage.csar", "wt") as fp:
fp.writelines("AAAABBBBCCCCDDDD")
VnfPackageModel.objects.create(
vnfPackageId="222",
@@ -240,13 +240,13 @@ class TestVnfPackage(TestCase):
response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
file_content = ''
for data in response.streaming_content:
- file_content = file_content + data
+ file_content = file_content + data.decode()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual('AAAABBBBCCCCDDDD', file_content)
os.remove("vnfPackage.csar")
def test_fetch_partical_vnf_pkg(self):
- with open("vnfPackage.csar", "wb") as fp:
+ with open("vnfPackage.csar", "wt") as fp:
fp.writelines("AAAABBBBCCCCDDDD")
VnfPackageModel.objects.create(
vnfPackageId="222",
@@ -256,13 +256,13 @@ class TestVnfPackage(TestCase):
response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content", HTTP_RANGE="4-7")
partial_file_content = ''
for data in response.streaming_content:
- partial_file_content = partial_file_content + data
+ partial_file_content = partial_file_content.encode() + data
self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual('BBB', partial_file_content)
+ self.assertEqual(b'BBB', partial_file_content)
os.remove("vnfPackage.csar")
def test_fetch_last_partical_vnf_pkg(self):
- with open("vnfPackage.csar", "wb") as fp:
+ with open("vnfPackage.csar", "wt") as fp:
fp.writelines("AAAABBBBCCCCDDDD")
VnfPackageModel.objects.create(
vnfPackageId="222",
@@ -272,9 +272,9 @@ class TestVnfPackage(TestCase):
response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content", HTTP_RANGE=" 4-")
partial_file_content = ''
for data in response.streaming_content:
- partial_file_content = partial_file_content + data
+ partial_file_content = partial_file_content.encode() + data
self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual('BBBBCCCCDDDD', partial_file_content)
+ self.assertEqual(b'BBBBCCCCDDDD', partial_file_content)
os.remove("vnfPackage.csar")
def test_fetch_vnf_pkg_when_pkg_not_exist(self):
@@ -353,7 +353,7 @@ class TestVnfPackage(TestCase):
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/artifacts/image")
self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response.getvalue(), "ubuntu_16.04\n")
+ self.assertEqual(response.getvalue(), b"ubuntu_16.04\n")
@mock.patch.object(toscaparser, 'parse_vnfd')
def test_fetch_vnf_artifact_not_exists(self, mock_parse_vnfd):
diff --git a/catalog/packages/views/catalog_views.py b/catalog/packages/views/catalog_views.py
index f9cc4803..6ed9fb9c 100644
--- a/catalog/packages/views/catalog_views.py
+++ b/catalog/packages/views/catalog_views.py
@@ -243,7 +243,7 @@ def servicepackages_rc(request, *args, **kwargs):
return Response(data=csar_list, status=status.HTTP_200_OK)
except Exception as e:
error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
- return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
elif request.method == 'POST':
# Distributes the package according to the given csarId
request_serializer = ServicePackageDistributeRequestSerializer(data=request.data)
@@ -258,10 +258,10 @@ def servicepackages_rc(request, *args, **kwargs):
return Response(status=status.HTTP_202_ACCEPTED)
except PackageHasExistsException as e:
error_status = status.HTTP_400_BAD_REQUEST
- return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
except Exception as e:
error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
- return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
@swagger_auto_schema(
@@ -307,10 +307,10 @@ def service_rd_csar(request, *args, **kwargs):
return Response(data=ret, status=status.HTTP_200_OK)
except PackageNotFoundException as e:
error_status = status.HTTP_404_NOT_FOUND
- return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
except Exception as e:
error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
- return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
elif request.method == 'DELETE':
try:
@@ -318,10 +318,10 @@ def service_rd_csar(request, *args, **kwargs):
return Response(status=status.HTTP_204_NO_CONTENT)
except PackageNotFoundException as e:
error_status = status.HTTP_404_NOT_FOUND
- return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
except Exception as e:
error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
- return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
@swagger_auto_schema(
@@ -420,10 +420,10 @@ def model_parser(request, *args, **kwargs):
return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
except PackageNotFoundException as e:
error_status = status.HTTP_404_NOT_FOUND
- return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
except Exception as e:
error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
- return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+ return Response(data=fmt_error_rsp(e.args[0], error_status), status=error_status)
elif package_type.lower().__eq__("ns"):
ret = sdc_ns_package.parse_nsd(csar_id, inputs)
elif package_type.lower().__eq__("vnf"):
diff --git a/catalog/packages/views/common.py b/catalog/packages/views/common.py
index d336ec1a..6285cb95 100644
--- a/catalog/packages/views/common.py
+++ b/catalog/packages/views/common.py
@@ -59,61 +59,61 @@ def view_safe_call_with_log(logger):
try:
return func(*args, **kwargs)
except NsdmDuplicateSubscriptionException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
return make_error_resp(
- detail=e.message,
+ detail=e.args[0],
status=status.HTTP_303_SEE_OTHER
)
except VnfPkgDuplicateSubscriptionException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
return make_error_resp(
- detail=e.message,
+ detail=e.args[0],
status=status.HTTP_303_SEE_OTHER
)
except PackageNotFoundException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
return make_error_resp(
- detail=e.message,
+ detail=e.args[0],
status=status.HTTP_404_NOT_FOUND
)
except ResourceNotFoundException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
return make_error_resp(
- detail=e.message,
+ detail=e.args[0],
status=status.HTTP_404_NOT_FOUND
)
except ArtifactNotFoundException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
return make_error_resp(
- detail=e.message,
+ detail=e.args[0],
status=status.HTTP_404_NOT_FOUND
)
except BadRequestException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
return make_error_resp(
- detail=e.message,
+ detail=e.args[0],
status=status.HTTP_400_BAD_REQUEST
)
except NsdmBadRequestException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
return make_error_resp(
- detail=e.message,
+ detail=e.args[0],
status=status.HTTP_400_BAD_REQUEST
)
except VnfPkgSubscriptionException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
return make_error_resp(
- detail=e.message,
+ detail=e.args[0],
status=status.HTTP_500_INTERNAL_SERVER_ERROR
)
except CatalogException as e:
- logger.error(e.message)
+ logger.error(e.args[0])
return make_error_resp(
- detail=e.message,
+ detail=e.args[0],
status=status.HTTP_500_INTERNAL_SERVER_ERROR
)
except Exception as e:
- logger.error(e.message)
+ logger.error(e.args[0])
logger.error(traceback.format_exc())
return make_error_resp(
detail='Unexpected exception',
diff --git a/catalog/packages/views/nsdm_subscription_views.py b/catalog/packages/views/nsdm_subscription_views.py
index a3ceeae2..5e6394e4 100644
--- a/catalog/packages/views/nsdm_subscription_views.py
+++ b/catalog/packages/views/nsdm_subscription_views.py
@@ -73,10 +73,8 @@ def nsd_subscription_rc(request):
NsdmSubscriptionRequestSerializer)
subscription = NsdmSubscription().create(
nsdm_subscription_request.data)
- subscription_resp = validate_data(subscription,
- NsdmSubscriptionSerializer)
- return Response(data=subscription_resp.data,
- status=status.HTTP_201_CREATED)
+ validate_data(subscription, NsdmSubscriptionSerializer)
+ return Response(data=subscription, status=status.HTTP_201_CREATED)
if request.method == 'GET':
logger.debug("Subscription Notification GET %s" % request.query_params)
diff --git a/catalog/pub/database/migrations/0001_initial.py b/catalog/pub/database/migrations/0001_initial.py
index 8446b6e3..98ca84c3 100644
--- a/catalog/pub/database/migrations/0001_initial.py
+++ b/catalog/pub/database/migrations/0001_initial.py
@@ -28,17 +28,17 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='JobModel',
fields=[
- ('jobid', models.CharField(db_column=b'JOBID', max_length=255, primary_key=True, serialize=False)),
- ('jobtype', models.CharField(db_column=b'JOBTYPE', max_length=255)),
- ('jobaction', models.CharField(db_column=b'JOBACTION', max_length=255)),
- ('resid', models.CharField(db_column=b'RESID', max_length=255)),
- ('status', models.IntegerField(blank=True, db_column=b'STATUS', null=True)),
- ('starttime', models.CharField(blank=True, db_column=b'STARTTIME', max_length=255, null=True)),
- ('endtime', models.CharField(blank=True, db_column=b'ENDTIME', max_length=255, null=True)),
- ('progress', models.IntegerField(blank=True, db_column=b'PROGRESS', null=True)),
- ('user', models.CharField(blank=True, db_column=b'USER', max_length=255, null=True)),
- ('parentjobid', models.CharField(blank=True, db_column=b'PARENTJOBID', max_length=255, null=True)),
- ('resname', models.CharField(blank=True, db_column=b'RESNAME', max_length=255, null=True)),
+ ('jobid', models.CharField(db_column='JOBID', max_length=255, primary_key=True, serialize=False)),
+ ('jobtype', models.CharField(db_column='JOBTYPE', max_length=255)),
+ ('jobaction', models.CharField(db_column='JOBACTION', max_length=255)),
+ ('resid', models.CharField(db_column='RESID', max_length=255)),
+ ('status', models.IntegerField(blank=True, db_column='STATUS', null=True)),
+ ('starttime', models.CharField(blank=True, db_column='STARTTIME', max_length=255, null=True)),
+ ('endtime', models.CharField(blank=True, db_column='ENDTIME', max_length=255, null=True)),
+ ('progress', models.IntegerField(blank=True, db_column='PROGRESS', null=True)),
+ ('user', models.CharField(blank=True, db_column='USER', max_length=255, null=True)),
+ ('parentjobid', models.CharField(blank=True, db_column='PARENTJOBID', max_length=255, null=True)),
+ ('resname', models.CharField(blank=True, db_column='RESNAME', max_length=255, null=True)),
],
options={
'db_table': 'CATALOG_JOB',
@@ -48,13 +48,13 @@ class Migration(migrations.Migration):
name='JobStatusModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
- ('indexid', models.IntegerField(db_column=b'INDEXID')),
- ('jobid', models.CharField(db_column=b'JOBID', max_length=255)),
- ('status', models.CharField(db_column=b'STATUS', max_length=255)),
- ('progress', models.IntegerField(blank=True, db_column=b'PROGRESS', null=True)),
- ('descp', models.TextField(db_column=b'DESCP', max_length=65535)),
- ('errcode', models.CharField(blank=True, db_column=b'ERRCODE', max_length=255, null=True)),
- ('addtime', models.CharField(blank=True, db_column=b'ADDTIME', max_length=255, null=True)),
+ ('indexid', models.IntegerField(db_column='INDEXID')),
+ ('jobid', models.CharField(db_column='JOBID', max_length=255)),
+ ('status', models.CharField(db_column='STATUS', max_length=255)),
+ ('progress', models.IntegerField(blank=True, db_column='PROGRESS', null=True)),
+ ('descp', models.TextField(db_column='DESCP', max_length=65535)),
+ ('errcode', models.CharField(blank=True, db_column='ERRCODE', max_length=255, null=True)),
+ ('addtime', models.CharField(blank=True, db_column='ADDTIME', max_length=255, null=True)),
],
options={
'db_table': 'CATALOG_JOB_STATUS',
@@ -63,30 +63,30 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='NsdmSubscriptionModel',
fields=[
- ('subscriptionid', models.CharField(db_column=b'SUBSCRIPTIONID', max_length=255, primary_key=True, serialize=False)),
- ('notificationTypes', models.TextField(db_column=b'NOTIFICATIONTYPES', null=True)),
- ('auth_info', models.TextField(db_column=b'AUTHINFO', null=True)),
- ('callback_uri', models.CharField(db_column=b'CALLBACKURI', max_length=255)),
- ('nsdInfoId', models.TextField(db_column=b'NSDINFOID', null=True)),
- ('nsdId', models.TextField(db_column=b'NSDID', null=True)),
- ('nsdName', models.TextField(db_column=b'NSDNAME', null=True)),
- ('nsdVersion', models.TextField(db_column=b'NSDVERSION', null=True)),
- ('nsdDesigner', models.TextField(db_column=b'NSDDESIGNER', null=True)),
- ('nsdInvariantId', models.TextField(db_column=b'NSDINVARIANTID', null=True)),
- ('vnfPkgIds', models.TextField(db_column=b'VNFPKGIDS', null=True)),
- ('pnfdInfoIds', models.TextField(db_column=b'PNFDINFOIDS', null=True)),
- ('nestedNsdInfoIds', models.TextField(db_column=b'NESTEDNSDINFOIDS', null=True)),
- ('nsdOnboardingState', models.TextField(db_column=b'NSDONBOARDINGSTATE', null=True)),
- ('nsdOperationalState', models.TextField(db_column=b'NSDOPERATIONALSTATE', null=True)),
- ('nsdUsageState', models.TextField(db_column=b'NSDUSAGESTATE', null=True)),
- ('pnfdId', models.TextField(db_column=b'PNFDID', null=True)),
- ('pnfdName', models.TextField(db_column=b'PNFDNAME', null=True)),
- ('pnfdVersion', models.TextField(db_column=b'PNFDVERSION', null=True)),
- ('pnfdProvider', models.TextField(db_column=b'PNFDPROVIDER', null=True)),
- ('pnfdInvariantId', models.TextField(db_column=b'PNFDINVARIANTID', null=True)),
- ('pnfdOnboardingState', models.TextField(db_column=b'PNFDONBOARDINGSTATE', null=True)),
- ('pnfdUsageState', models.TextField(db_column=b'PNFDUSAGESTATE', null=True)),
- ('links', models.TextField(db_column=b'LINKS')),
+ ('subscriptionid', models.CharField(db_column='SUBSCRIPTIONID', max_length=255, primary_key=True, serialize=False)),
+ ('notificationTypes', models.TextField(db_column='NOTIFICATIONTYPES', null=True)),
+ ('auth_info', models.TextField(db_column='AUTHINFO', null=True)),
+ ('callback_uri', models.CharField(db_column='CALLBACKURI', max_length=255)),
+ ('nsdInfoId', models.TextField(db_column='NSDINFOID', null=True)),
+ ('nsdId', models.TextField(db_column='NSDID', null=True)),
+ ('nsdName', models.TextField(db_column='NSDNAME', null=True)),
+ ('nsdVersion', models.TextField(db_column='NSDVERSION', null=True)),
+ ('nsdDesigner', models.TextField(db_column='NSDDESIGNER', null=True)),
+ ('nsdInvariantId', models.TextField(db_column='NSDINVARIANTID', null=True)),
+ ('vnfPkgIds', models.TextField(db_column='VNFPKGIDS', null=True)),
+ ('pnfdInfoIds', models.TextField(db_column='PNFDINFOIDS', null=True)),
+ ('nestedNsdInfoIds', models.TextField(db_column='NESTEDNSDINFOIDS', null=True)),
+ ('nsdOnboardingState', models.TextField(db_column='NSDONBOARDINGSTATE', null=True)),
+ ('nsdOperationalState', models.TextField(db_column='NSDOPERATIONALSTATE', null=True)),
+ ('nsdUsageState', models.TextField(db_column='NSDUSAGESTATE', null=True)),
+ ('pnfdId', models.TextField(db_column='PNFDID', null=True)),
+ ('pnfdName', models.TextField(db_column='PNFDNAME', null=True)),
+ ('pnfdVersion', models.TextField(db_column='PNFDVERSION', null=True)),
+ ('pnfdProvider', models.TextField(db_column='PNFDPROVIDER', null=True)),
+ ('pnfdInvariantId', models.TextField(db_column='PNFDINVARIANTID', null=True)),
+ ('pnfdOnboardingState', models.TextField(db_column='PNFDONBOARDINGSTATE', null=True)),
+ ('pnfdUsageState', models.TextField(db_column='PNFDUSAGESTATE', null=True)),
+ ('links', models.TextField(db_column='LINKS')),
],
options={
'db_table': 'CATALOG_NSDM_SUBSCRIPTION',
@@ -95,23 +95,23 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='NSPackageModel',
fields=[
- ('nsPackageId', models.CharField(db_column=b'NSPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('nsPackageUri', models.CharField(blank=True, db_column=b'NSPACKAGEURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
- ('sdcCsarId', models.CharField(blank=True, db_column=b'SDCCSARID', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
- ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
- ('nsdId', models.CharField(blank=True, db_column=b'NSDID', max_length=50, null=True)),
- ('invariantId', models.CharField(blank=True, db_column=b'INVARIANTID', max_length=50, null=True)),
- ('nsdName', models.CharField(blank=True, db_column=b'NSDNAME', max_length=50, null=True)),
- ('nsdDesginer', models.CharField(blank=True, db_column=b'NSDDESIGNER', max_length=50, null=True)),
- ('nsdDescription', models.CharField(blank=True, db_column=b'NSDDESCRIPTION', max_length=100, null=True)),
- ('nsdVersion', models.CharField(blank=True, db_column=b'NSDVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
- ('nsdModel', models.TextField(blank=True, db_column=b'NSDMODEL', max_length=65535, null=True)),
+ ('nsPackageId', models.CharField(db_column='NSPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('nsPackageUri', models.CharField(blank=True, db_column='NSPACKAGEURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('sdcCsarId', models.CharField(blank=True, db_column='SDCCSARID', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('nsdId', models.CharField(blank=True, db_column='NSDID', max_length=50, null=True)),
+ ('invariantId', models.CharField(blank=True, db_column='INVARIANTID', max_length=50, null=True)),
+ ('nsdName', models.CharField(blank=True, db_column='NSDNAME', max_length=50, null=True)),
+ ('nsdDesginer', models.CharField(blank=True, db_column='NSDDESIGNER', max_length=50, null=True)),
+ ('nsdDescription', models.CharField(blank=True, db_column='NSDDESCRIPTION', max_length=100, null=True)),
+ ('nsdVersion', models.CharField(blank=True, db_column='NSDVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('nsdModel', models.TextField(blank=True, db_column='NSDMODEL', max_length=65535, null=True)),
],
options={
'db_table': 'CATALOG_NSPACKAGE',
@@ -120,22 +120,22 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='PnfPackageModel',
fields=[
- ('pnfPackageId', models.CharField(db_column=b'PNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('pnfPackageUri', models.CharField(blank=True, db_column=b'PNFPACKAGEURI', max_length=300, null=True)),
- ('sdcCSARUri', models.CharField(blank=True, db_column=b'SDCCSARURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
- ('pnfdId', models.CharField(blank=True, db_column=b'PNFDID', max_length=50, null=True)),
- ('pnfVendor', models.CharField(blank=True, db_column=b'VENDOR', max_length=50, null=True)),
- ('pnfdProductName', models.CharField(blank=True, db_column=b'PNFDPRODUCTNAME', max_length=50, null=True)),
- ('pnfdVersion', models.CharField(blank=True, db_column=b'PNFDVERSION', max_length=20, null=True)),
- ('pnfSoftwareVersion', models.CharField(blank=True, db_column=b'PNFSOFTWAREVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
- ('pnfdModel', models.TextField(blank=True, db_column=b'PNFDMODEL', max_length=65535, null=True)),
- ('pnfdName', models.TextField(blank=True, db_column=b'PNFDNAME', max_length=65535, null=True)),
+ ('pnfPackageId', models.CharField(db_column='PNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('pnfPackageUri', models.CharField(blank=True, db_column='PNFPACKAGEURI', max_length=300, null=True)),
+ ('sdcCSARUri', models.CharField(blank=True, db_column='SDCCSARURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('pnfdId', models.CharField(blank=True, db_column='PNFDID', max_length=50, null=True)),
+ ('pnfVendor', models.CharField(blank=True, db_column='VENDOR', max_length=50, null=True)),
+ ('pnfdProductName', models.CharField(blank=True, db_column='PNFDPRODUCTNAME', max_length=50, null=True)),
+ ('pnfdVersion', models.CharField(blank=True, db_column='PNFDVERSION', max_length=20, null=True)),
+ ('pnfSoftwareVersion', models.CharField(blank=True, db_column='PNFSOFTWAREVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('pnfdModel', models.TextField(blank=True, db_column='PNFDMODEL', max_length=65535, null=True)),
+ ('pnfdName', models.TextField(blank=True, db_column='PNFDNAME', max_length=65535, null=True)),
],
options={
'db_table': 'CATALOG_PNFPACKAGE',
@@ -144,23 +144,23 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='ServicePackageModel',
fields=[
- ('servicePackageId', models.CharField(db_column=b'SERVICEPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('servicePackageUri', models.CharField(blank=True, db_column=b'SERVICEPACKAGEURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
- ('sdcCsarId', models.CharField(blank=True, db_column=b'SDCCSARID', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
- ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
- ('servicedId', models.CharField(blank=True, db_column=b'SERVICEDID', max_length=50, null=True)),
- ('invariantId', models.CharField(blank=True, db_column=b'INVARIANTID', max_length=50, null=True)),
- ('servicedName', models.CharField(blank=True, db_column=b'SERVICEDNAME', max_length=50, null=True)),
- ('servicedDesigner', models.CharField(blank=True, db_column=b'SERVICEDDESIGNER', max_length=50, null=True)),
- ('servicedDescription', models.CharField(blank=True, db_column=b'SERVICEDDESCRIPTION', max_length=100, null=True)),
- ('servicedVersion', models.CharField(blank=True, db_column=b'SERVICEDVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
- ('servicedModel', models.TextField(blank=True, db_column=b'SERVICEDMODEL', max_length=65535, null=True)),
+ ('servicePackageId', models.CharField(db_column='SERVICEPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('servicePackageUri', models.CharField(blank=True, db_column='SERVICEPACKAGEURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('sdcCsarId', models.CharField(blank=True, db_column='SDCCSARID', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('servicedId', models.CharField(blank=True, db_column='SERVICEDID', max_length=50, null=True)),
+ ('invariantId', models.CharField(blank=True, db_column='INVARIANTID', max_length=50, null=True)),
+ ('servicedName', models.CharField(blank=True, db_column='SERVICEDNAME', max_length=50, null=True)),
+ ('servicedDesigner', models.CharField(blank=True, db_column='SERVICEDDESIGNER', max_length=50, null=True)),
+ ('servicedDescription', models.CharField(blank=True, db_column='SERVICEDDESCRIPTION', max_length=100, null=True)),
+ ('servicedVersion', models.CharField(blank=True, db_column='SERVICEDVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('servicedModel', models.TextField(blank=True, db_column='SERVICEDMODEL', max_length=65535, null=True)),
],
options={
'db_table': 'CATALOG_SERVICEPACKAGE',
@@ -169,16 +169,16 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='SoftwareImageModel',
fields=[
- ('imageid', models.CharField(db_column=b'IMAGEID', max_length=50, primary_key=True, serialize=False)),
- ('containerFormat', models.CharField(db_column=b'CONTAINERFORMAT', max_length=20)),
- ('diskFormat', models.CharField(db_column=b'DISKFORMAT', max_length=20)),
- ('mindisk', models.CharField(db_column=b'MINDISK', max_length=20)),
- ('minram', models.CharField(db_column=b'MINRAM', max_length=20)),
- ('usermetadata', models.CharField(db_column=b'USAERMETADATA', max_length=1024)),
- ('vnfPackageId', models.CharField(db_column=b'VNFPACKAGEID', max_length=50)),
- ('filePath', models.CharField(db_column=b'FILEPATH', max_length=300)),
- ('status', models.CharField(db_column=b'STATUS', max_length=10)),
- ('vimid', models.CharField(db_column=b'VIMID', max_length=50)),
+ ('imageid', models.CharField(db_column='IMAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('containerFormat', models.CharField(db_column='CONTAINERFORMAT', max_length=20)),
+ ('diskFormat', models.CharField(db_column='DISKFORMAT', max_length=20)),
+ ('mindisk', models.CharField(db_column='MINDISK', max_length=20)),
+ ('minram', models.CharField(db_column='MINRAM', max_length=20)),
+ ('usermetadata', models.CharField(db_column='USAERMETADATA', max_length=1024)),
+ ('vnfPackageId', models.CharField(db_column='VNFPACKAGEID', max_length=50)),
+ ('filePath', models.CharField(db_column='FILEPATH', max_length=300)),
+ ('status', models.CharField(db_column='STATUS', max_length=10)),
+ ('vimid', models.CharField(db_column='VIMID', max_length=50)),
],
options={
'db_table': 'CATALOG_SOFTWAREIMAGEMODEL',
@@ -187,22 +187,22 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='VnfPackageModel',
fields=[
- ('vnfPackageId', models.CharField(db_column=b'VNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
- ('vnfPackageUri', models.CharField(blank=True, db_column=b'VNFPACKAGEURI', max_length=300, null=True)),
- ('SdcCSARUri', models.CharField(blank=True, db_column=b'SDCCSARURI', max_length=300, null=True)),
- ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
- ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
- ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)),
- ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
- ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
- ('vnfdId', models.CharField(blank=True, db_column=b'VNFDID', max_length=50, null=True)),
- ('vnfVendor', models.CharField(blank=True, db_column=b'VENDOR', max_length=50, null=True)),
- ('vnfdProductName', models.CharField(blank=True, db_column=b'VNFDPRODUCTNAME', max_length=50, null=True)),
- ('vnfdVersion', models.CharField(blank=True, db_column=b'VNFDVERSION', max_length=20, null=True)),
- ('vnfSoftwareVersion', models.CharField(blank=True, db_column=b'VNFSOFTWAREVERSION', max_length=20, null=True)),
- ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
- ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
- ('vnfdModel', models.TextField(blank=True, db_column=b'VNFDMODEL', max_length=65535, null=True)),
+ ('vnfPackageId', models.CharField(db_column='VNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+ ('vnfPackageUri', models.CharField(blank=True, db_column='VNFPACKAGEURI', max_length=300, null=True)),
+ ('SdcCSARUri', models.CharField(blank=True, db_column='SDCCSARURI', max_length=300, null=True)),
+ ('checksum', models.CharField(blank=True, db_column='CHECKSUM', max_length=50, null=True)),
+ ('onboardingState', models.CharField(blank=True, db_column='ONBOARDINGSTATE', max_length=20, null=True)),
+ ('operationalState', models.CharField(blank=True, db_column='OPERATIONALSTATE', max_length=20, null=True)),
+ ('usageState', models.CharField(blank=True, db_column='USAGESTATE', max_length=20, null=True)),
+ ('deletionPending', models.CharField(blank=True, db_column='DELETIONPENDING', max_length=20, null=True)),
+ ('vnfdId', models.CharField(blank=True, db_column='VNFDID', max_length=50, null=True)),
+ ('vnfVendor', models.CharField(blank=True, db_column='VENDOR', max_length=50, null=True)),
+ ('vnfdProductName', models.CharField(blank=True, db_column='VNFDPRODUCTNAME', max_length=50, null=True)),
+ ('vnfdVersion', models.CharField(blank=True, db_column='VNFDVERSION', max_length=20, null=True)),
+ ('vnfSoftwareVersion', models.CharField(blank=True, db_column='VNFSOFTWAREVERSION', max_length=20, null=True)),
+ ('userDefinedData', models.TextField(blank=True, db_column='USERDEFINEDDATA', max_length=1024, null=True)),
+ ('localFilePath', models.CharField(blank=True, db_column='LOCALFILEPATH', max_length=300, null=True)),
+ ('vnfdModel', models.TextField(blank=True, db_column='VNFDMODEL', max_length=65535, null=True)),
],
options={
'db_table': 'CATALOG_VNFPACKAGE',
@@ -211,16 +211,16 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='VnfPkgSubscriptionModel',
fields=[
- ('subscription_id', models.CharField(db_column=b'SUBSCRIPTION_ID', max_length=255, primary_key=True, serialize=False)),
- ('callback_uri', models.URLField(db_column=b'CALLBACK_URI', max_length=255)),
- ('auth_info', models.TextField(db_column=b'AUTH_INFO')),
- ('usage_states', models.TextField(db_column=b'USAGE_STATES')),
- ('notification_types', models.TextField(db_column=b'NOTIFICATION_TYPES')),
- ('vnfd_id', models.TextField(db_column=b'VNFD_ID')),
- ('vnf_pkg_id', models.TextField(db_column=b'VNF_PKG_ID')),
- ('operation_states', models.TextField(db_column=b'OPERATION_STATES')),
- ('vnf_products_from_provider', models.TextField(db_column=b'VNF_PRODUCTS_FROM_PROVIDER')),
- ('links', models.TextField(db_column=b'LINKS')),
+ ('subscription_id', models.CharField(db_column='SUBSCRIPTION_ID', max_length=255, primary_key=True, serialize=False)),
+ ('callback_uri', models.URLField(db_column='CALLBACK_URI', max_length=255)),
+ ('auth_info', models.TextField(db_column='AUTH_INFO')),
+ ('usage_states', models.TextField(db_column='USAGE_STATES')),
+ ('notification_types', models.TextField(db_column='NOTIFICATION_TYPES')),
+ ('vnfd_id', models.TextField(db_column='VNFD_ID')),
+ ('vnf_pkg_id', models.TextField(db_column='VNF_PKG_ID')),
+ ('operation_states', models.TextField(db_column='OPERATION_STATES')),
+ ('vnf_products_from_provider', models.TextField(db_column='VNF_PRODUCTS_FROM_PROVIDER')),
+ ('links', models.TextField(db_column='LINKS')),
],
options={
'db_table': 'VNF_PKG_SUBSCRIPTION',
diff --git a/catalog/pub/redisco/__init__.py b/catalog/pub/redisco/__init__.py
new file mode 100644
index 00000000..217a2327
--- /dev/null
+++ b/catalog/pub/redisco/__init__.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2010 Tim Medina
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# The original code link is https://github.com/iamteem/redisco/tree/master/redisco/__init__.py
+
+
+import redis
+
+
+class Client(object):
+ def __init__(self, **kwargs):
+ self.connection_settings = kwargs or {'host': 'localhost', 'port': 6379, 'db': 0}
+
+ def redis(self):
+ return redis.Redis(**self.connection_settings)
+
+ def update(self, d):
+ self.connection_settings.update(d)
+
+
+def connection_setup(**kwargs):
+ global connection, client
+ if client:
+ client.update(kwargs)
+ else:
+ client = Client(**kwargs)
+ connection = client.redis()
+
+
+def get_client():
+ global connection
+ return connection
+
+
+client = Client()
+connection = client.redis()
+
+__all__ = ['connection_setup', 'get_client']
diff --git a/catalog/pub/redisco/containers.py b/catalog/pub/redisco/containers.py
new file mode 100644
index 00000000..89572940
--- /dev/null
+++ b/catalog/pub/redisco/containers.py
@@ -0,0 +1,116 @@
+# Copyright (c) 2010 Tim Medina
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# The original code link is https://github.com/iamteem/redisco/tree/master/redisco/containers.py
+
+"""
+This module contains the container classes to create objects
+that persist directly in a Redis server.
+"""
+
+import collections
+from functools import partial
+
+
+class Container(object):
+ """Create a container object saved in Redis.
+
+ Arguments:
+ key -- the Redis key this container is stored at
+ db -- the Redis client object. Default: None
+
+ When ``db`` is not set, the gets the default connection from
+ ``redisco.connection`` module.
+ """
+
+ def __init__(self, key, db=None, pipeline=None):
+ self._db = db
+ self.key = key
+ self.pipeline = pipeline
+
+ def clear(self):
+ """Remove container from Redis database."""
+ del self.db[self.key]
+
+ def __getattribute__(self, att):
+ if att in object.__getattribute__(self, 'DELEGATEABLE_METHODS'):
+ return partial(getattr(object.__getattribute__(self, 'db'), att), self.key)
+ else:
+ return object.__getattribute__(self, att)
+
+ @property
+ def db(self):
+ if self.pipeline:
+ return self.pipeline
+ if self._db:
+ return self._db
+ if hasattr(self, 'db_cache') and self.db_cache:
+ return self.db_cache
+ else:
+ from redisco import connection
+ self.db_cache = connection
+ return self.db_cache
+
+ DELEGATEABLE_METHODS = ()
+
+
+class Hash(Container, collections.MutableMapping):
+
+ def __getitem__(self, att):
+ return self.hget(att)
+
+ def __setitem__(self, att, val):
+ self.hset(att, val)
+
+ def __delitem__(self, att):
+ self.hdel(att)
+
+ def __len__(self):
+ return self.hlen()
+
+ def __iter__(self):
+ return self.hgetall().__iter__()
+
+ def __contains__(self, att):
+ return self.hexists(att)
+
+ def __repr__(self):
+ return "<%s '%s' %s>" % (self.__class__.__name__, self.key, self.hgetall())
+
+ def keys(self):
+ return self.hkeys()
+
+ def values(self):
+ return self.hvals()
+
+ def _get_dict(self):
+ return self.hgetall()
+
+ def _set_dict(self, new_dict):
+ self.clear()
+ self.update(new_dict)
+
+ dict = property(_get_dict, _set_dict)
+
+ DELEGATEABLE_METHODS = ('hlen', 'hset', 'hdel', 'hkeys', 'hgetall', 'hvals',
+ 'hget', 'hexists', 'hincrby', 'hmget', 'hmset')
diff --git a/catalog/pub/utils/fileutil.py b/catalog/pub/utils/fileutil.py
index d7811b8f..6ddfc72c 100644
--- a/catalog/pub/utils/fileutil.py
+++ b/catalog/pub/utils/fileutil.py
@@ -16,7 +16,7 @@ import shutil
import logging
import tempfile
import traceback
-import urllib2
+import urllib
import zipfile
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
def make_dirs(path):
if not os.path.exists(path):
- os.makedirs(path, 0777)
+ os.makedirs(path, 0o777)
def delete_dirs(path):
@@ -34,7 +34,7 @@ def delete_dirs(path):
shutil.rmtree(path)
except Exception as e:
logger.error(traceback.format_exc())
- logger.error("Failed to delete %s:%s", path, e.message)
+ logger.error("Failed to delete %s:%s", path, e.args[0])
def download_file_from_http(url, local_dir, file_name):
@@ -42,9 +42,8 @@ def download_file_from_http(url, local_dir, file_name):
is_download_ok = False
try:
make_dirs(local_dir)
- r = urllib2.Request(url)
- req = urllib2.urlopen(r)
- save_file = open(local_file_name, 'wb')
+ req = urllib.request.urlopen(url)
+ save_file = open(local_file_name, 'w')
save_file.write(req.read())
save_file.close()
req.close()
diff --git a/catalog/pub/utils/idutil.py b/catalog/pub/utils/idutil.py
index 85bebb83..c2347c16 100644
--- a/catalog/pub/utils/idutil.py
+++ b/catalog/pub/utils/idutil.py
@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from redisco import containers as cont
+from catalog.pub.redisco import containers as cont
def get_auto_id(id_type, id_group="auto_id_hash"):
diff --git a/catalog/pub/utils/jobutil.py b/catalog/pub/utils/jobutil.py
index 78983aba..3d79c7ae 100644
--- a/catalog/pub/utils/jobutil.py
+++ b/catalog/pub/utils/jobutil.py
@@ -15,6 +15,7 @@ import datetime
import logging
import uuid
import traceback
+from functools import reduce
from catalog.pub.database.models import JobStatusModel, JobModel
from catalog.pub.utils import idutil
diff --git a/catalog/pub/utils/restcall.py b/catalog/pub/utils/restcall.py
index 6cdc14bc..6a07e897 100644
--- a/catalog/pub/utils/restcall.py
+++ b/catalog/pub/utils/restcall.py
@@ -15,7 +15,7 @@
import sys
import traceback
import logging
-import urllib2
+import urllib
import uuid
import httplib2
@@ -65,7 +65,7 @@ def call_req(base_url, user, passwd, auth_type, resource, method, content='', ad
ret = [1, "Unable to connect to %s" % full_url, resp_status]
continue
raise ex
- except urllib2.URLError as err:
+ except urllib.error.URLError as err:
ret = [2, str(err), resp_status]
except Exception as ex:
logger.error(traceback.format_exc())
diff --git a/catalog/pub/utils/tests.py b/catalog/pub/utils/tests.py
index 73029fa6..0f024679 100644
--- a/catalog/pub/utils/tests.py
+++ b/catalog/pub/utils/tests.py
@@ -15,11 +15,11 @@
import platform
import unittest
import mock
-import fileutil
-import urllib2
-import syscomm
-import timeutil
-import values
+from . import fileutil
+import urllib
+from . import syscomm
+from . import timeutil
+from . import values
from catalog.pub.database.models import JobStatusModel, JobModel
from catalog.pub.utils.jobutil import JobUtil
@@ -46,7 +46,7 @@ class UtilsTest(unittest.TestCase):
fileutil.make_dirs(dirs)
fileutil.delete_dirs(dirs)
- @mock.patch.object(urllib2, 'urlopen')
+ @mock.patch.object(urllib.request, 'urlopen')
def test_download_file_from_http(self, mock_urlopen):
mock_urlopen.return_value = MockReq()
fileutil.delete_dirs("abc")
diff --git a/catalog/pub/utils/toscaparser/basemodel.py b/catalog/pub/utils/toscaparser/basemodel.py
index 1d8b261d..9ca9f12b 100644
--- a/catalog/pub/utils/toscaparser/basemodel.py
+++ b/catalog/pub/utils/toscaparser/basemodel.py
@@ -77,7 +77,7 @@ class BaseInfoModel(object):
try:
os.remove(file_name)
except Exception as e:
- logger.error("Failed to parse package, error: %s", e.message)
+ logger.error("Failed to parse package, error: %s", e.args[0])
def _validate_input_params(self, path, params):
valid_params = {}
@@ -92,7 +92,7 @@ class BaseInfoModel(object):
if params:
tmp = self._create_tosca_template(path, None)
if isinstance(params, dict):
- for key, value in params.items():
+ for key, value in list(params.items()):
if hasattr(tmp, 'inputs') and len(tmp.inputs) > 0:
for input_def in tmp.inputs:
if (input_def.name == key):
@@ -107,16 +107,16 @@ class BaseInfoModel(object):
no_required_paras_check=True,
debug_mode=True)
except Exception as e:
- print e.message
+ print(e.args[0])
finally:
if tosca_tpl is not None and hasattr(tosca_tpl, "temp_dir") and os.path.exists(tosca_tpl.temp_dir):
try:
shutil.rmtree(tosca_tpl.temp_dir)
except Exception as e:
- logger.error("Failed to create tosca template, error: %s", e.message)
- print "-----------------------------"
- print '\n'.join(['%s:%s' % item for item in tosca_tpl.__dict__.items()])
- print "-----------------------------"
+ logger.error("Failed to create tosca template, error: %s", e.args[0])
+ print("-----------------------------")
+ print('\n'.join(['%s:%s' % item for item in list(tosca_tpl.__dict__.items())]))
+ print("-----------------------------")
return tosca_tpl
def _check_download_file(self, path):
@@ -130,7 +130,7 @@ class BaseInfoModel(object):
path = path.encode("utf-8")
tmps = str.split(path, '/')
localFileName = tmps[len(tmps) - 1]
- urllib.urlretrieve(path, localFileName)
+ urllib.request.urlretrieve(path, localFileName)
return localFileName
def downloadFileFromFtpServer(self, path):
@@ -222,7 +222,7 @@ class BaseInfoModel(object):
def buildProperties(self, nodeTemplate, parsed_params):
properties = {}
isMappingParams = parsed_params and len(parsed_params) > 0
- for k, item in nodeTemplate.get_properties().items():
+ for k, item in list(nodeTemplate.get_properties().items()):
properties[k] = item.value
if isinstance(item.value, GetInput):
if item.value.result() and isMappingParams:
@@ -232,7 +232,7 @@ class BaseInfoModel(object):
tmp[item.value.name] = item.value.input_name
properties[k] = tmp
if ATTRIBUTES in nodeTemplate.entity_tpl:
- for k, item in nodeTemplate.entity_tpl[ATTRIBUTES].items():
+ for k, item in list(nodeTemplate.entity_tpl[ATTRIBUTES].items()):
properties[k] = str(item)
return properties
@@ -241,7 +241,7 @@ class BaseInfoModel(object):
properties = nodeTemplate.get_properties()
_properties = {}
if isinstance(properties, dict):
- for name, prop in properties.items():
+ for name, prop in list(properties.items()):
if isinstance(prop, Property):
if isinstance(prop.value, Function):
if isinstance(prop.value, Concat): # support one layer inner function.
@@ -251,7 +251,7 @@ class BaseInfoModel(object):
value_str += arg
elif isinstance(arg, dict):
raw_func = {}
- for k, v in arg.items():
+ for k, v in list(arg.items()):
func_args = []
func_args.append(v)
raw_func[k] = func_args
@@ -282,7 +282,7 @@ class BaseInfoModel(object):
value_str += arg
elif isinstance(arg, dict):
raw_func = {}
- for k, v in arg.items():
+ for k, v in list(arg.items()):
func_args = []
func_args.append(v)
raw_func[k] = func_args
@@ -306,7 +306,7 @@ class BaseInfoModel(object):
def verify_properties(self, props, inputs, parsed_params):
ret_props = {}
if (props and len(props) > 0):
- for key, value in props.items():
+ for key, value in list(props.items()):
ret_props[key] = self._verify_value(value, inputs, parsed_params)
# if isinstance(value, str):
# ret_props[key] = self._verify_string(inputs, parsed_params, value);
@@ -323,7 +323,7 @@ class BaseInfoModel(object):
def build_requirements(self, node_template):
rets = []
for req in node_template.requirements:
- for req_name, req_value in req.items():
+ for req_name, req_value in list(req.items()):
if (isinstance(req_value, dict)):
if ('node' in req_value and req_value['node'] not in node_template.templates):
continue # No target requirement for aria parser, not add to result.
@@ -385,7 +385,7 @@ class BaseInfoModel(object):
requirements = []
if REQUIREMENTS in node:
for item in node[REQUIREMENTS]:
- for key, value in item.items():
+ for key, value in list(item.items()):
if key == requirementName:
requirements.append(value)
return requirements
@@ -439,7 +439,7 @@ class BaseInfoModel(object):
rets = []
if ARTIFACTS in node and len(node[ARTIFACTS]) > 0:
artifacts = node[ARTIFACTS]
- for name, value in artifacts.items():
+ for name, value in list(artifacts.items()):
ret = {}
ret['artifact_name'] = name
ret['file'] = value
@@ -491,7 +491,7 @@ class BaseInfoModel(object):
for type_require in type_requires:
type_require_set.update(type_require)
for requirement in node.requirements:
- for k in requirement.keys():
+ for k in list(requirement.keys()):
if type_require_set[k].get('relationship', None) in relations[0] or type_require_set[k].get('capability', None) in relations[0]:
if isinstance(requirement[k], dict):
next_node = requirement[k].get('node', None)
diff --git a/catalog/pub/utils/toscaparser/graph.py b/catalog/pub/utils/toscaparser/graph.py
index 6d38d12f..0af2a143 100644
--- a/catalog/pub/utils/toscaparser/graph.py
+++ b/catalog/pub/utils/toscaparser/graph.py
@@ -21,7 +21,7 @@ class Graph(object):
def __init__(self, graph_dict=None):
self.graph = OrderedDict()
if graph_dict:
- for node, dep_nodes in graph_dict.iteritems():
+ for node, dep_nodes in list(graph_dict.items()):
self.add_node(node, dep_nodes)
def add_node(self, node, dep_nodes):
@@ -67,7 +67,7 @@ class Graph(object):
def to_dict(self):
dict = {}
- for node, dependents in self.graph.iteritems():
+ for node, dependents in self.graph.items():
dict[node] = []
for dep in dependents:
dict[node].append(dep)
diff --git a/catalog/pub/utils/toscaparser/nsdmodel.py b/catalog/pub/utils/toscaparser/nsdmodel.py
index 58712fc0..f742640f 100644
--- a/catalog/pub/utils/toscaparser/nsdmodel.py
+++ b/catalog/pub/utils/toscaparser/nsdmodel.py
@@ -60,7 +60,7 @@ class EtsiNsdInfoModel(BaseInfoModel):
self.metadata = self.buildMetadata(tosca)
self.ns = self._build_ns(tosca)
self.inputs = self.buildInputs(tosca)
- nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates)
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
types = tosca.topology_template.custom_defs
self.basepath = self.get_base_path(tosca)
self.vnfs = self._get_all_vnf(nodeTemplates, types)
@@ -126,7 +126,7 @@ class EtsiNsdInfoModel(BaseInfoModel):
forwarderList = []
if 'requirements' in node:
for item in node['requirements']:
- for key, value in item.items():
+ for key, value in list(item.items()):
if key == 'forwarder':
tmpnode = self.get_node_by_req(node_templates, value)
type = 'pnf' if self.isNodeTypeX(tmpnode, node_types, NS_PNF_TYPE) else 'vnf'
@@ -161,7 +161,7 @@ class EtsiNsdInfoModel(BaseInfoModel):
def _get_external_cps(self, subs_mappings):
external_cps = []
if 'requirements' in subs_mappings:
- for key, value in subs_mappings['requirements'].items():
+ for key, value in list(subs_mappings['requirements'].items()):
if isinstance(value, list) and len(value) > 0:
external_cps.append({"key_name": key, "cpd_id": value[0]})
else:
@@ -171,7 +171,7 @@ class EtsiNsdInfoModel(BaseInfoModel):
def _get_forward_cps(self, subs_mappings):
forward_cps = []
if 'capabilities' in subs_mappings:
- for key, value in subs_mappings['capabilities'].items():
+ for key, value in list(subs_mappings['capabilities'].items()):
if isinstance(value, list) and len(value) > 0:
forward_cps.append({"key_name": key, "cpd_id": value[0]})
else:
@@ -194,7 +194,7 @@ class EtsiNsdInfoModel(BaseInfoModel):
rets = []
if 'requirements' in node and (self.isNodeTypeX(node, node_types, NS_TYPE) or self.isNodeTypeX(node, node_types, NS_VNF_TYPE)):
for item in node['requirements']:
- for key, value in item.items():
+ for key, value in list(item.items()):
rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
return rets
diff --git a/catalog/pub/utils/toscaparser/sdmodel.py b/catalog/pub/utils/toscaparser/sdmodel.py
index d7e1c3ad..05e0caf9 100644
--- a/catalog/pub/utils/toscaparser/sdmodel.py
+++ b/catalog/pub/utils/toscaparser/sdmodel.py
@@ -64,14 +64,14 @@ class SdInfoModel(BaseInfoModel):
def get_child_input_repeat(self, complex_input, entry_schema, input):
custom_defs = input.custom_defs
properties = custom_defs[entry_schema]['properties']
- for key, value in properties.iteritems():
+ for key, value in properties.items():
if value['type'].__eq__('list'):
child_complex_input = []
child_entry_schema = self.get_entry_schema(value['entry_schema'])
self.get_child_input_repeat(child_complex_input, child_entry_schema, input)
complex_input.append({key: child_complex_input})
else:
- if 'description' in value.keys():
+ if 'description' in list(value.keys()):
simple_input = {
key: "",
"type": value['type'],
@@ -88,6 +88,6 @@ class SdInfoModel(BaseInfoModel):
def get_entry_schema(self, entry_schema):
if isinstance(entry_schema, dict):
- if 'type' in entry_schema.keys():
+ if 'type' in list(entry_schema.keys()):
entry_schema = entry_schema['type']
return entry_schema
diff --git a/catalog/pub/utils/toscaparser/servicemodel.py b/catalog/pub/utils/toscaparser/servicemodel.py
index d8fb5bab..47d66308 100644
--- a/catalog/pub/utils/toscaparser/servicemodel.py
+++ b/catalog/pub/utils/toscaparser/servicemodel.py
@@ -54,7 +54,7 @@ class SdcServiceModel(BaseInfoModel):
self.ns = self._build_ns(tosca)
self.inputs = self.buildInputs(tosca)
if hasattr(tosca, 'nodetemplates'):
- nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates)
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
types = tosca.topology_template.custom_defs
self.basepath = self.get_base_path(tosca)
self.vnfs = self._get_all_vnf(nodeTemplates, types)
@@ -166,7 +166,7 @@ class SdcServiceModel(BaseInfoModel):
rets = []
if 'requirements' in node and self.isNodeTypeX(node, node_types, VF_TYPE):
for item in node['requirements']:
- for key, value in item.items():
+ for key, value in list(item.items()):
rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
return rets
diff --git a/catalog/pub/utils/toscaparser/vnfdmodel.py b/catalog/pub/utils/toscaparser/vnfdmodel.py
index eb6732f4..1ed06597 100644
--- a/catalog/pub/utils/toscaparser/vnfdmodel.py
+++ b/catalog/pub/utils/toscaparser/vnfdmodel.py
@@ -34,7 +34,7 @@ class EtsiVnfdInfoModel(BaseInfoModel):
def parseModel(self, tosca):
self.metadata = self.buildMetadata(tosca)
self.inputs = self.buildInputs(tosca)
- nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates)
+ nodeTemplates = list(map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates))
self.basepath = self.get_base_path(tosca)
node_types = tosca.topology_template.custom_defs
sol_version = self.metadata.get("VNFD_SCHEMA_VERSION", VNFD_SCHEMA_VERSION_DEFAULT) if isinstance(self.metadata, dict) else VNFD_SCHEMA_VERSION_DEFAULT
diff --git a/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py
index 98e0940a..48ad2c82 100644
--- a/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py
+++ b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_251.py
@@ -32,7 +32,7 @@ class VnfdSOL251():
properties = vnf.get("properties", {})
metadata = vnf.get("metadata", {})
- for key, value in properties.items():
+ for key, value in list(properties.items()):
if isinstance(value, dict):
if value["type"] == "string":
properties[key] = value.get("default", "")
@@ -173,17 +173,15 @@ class VnfdSOL251():
if isinstance(inject_files, list):
for inject_file in inject_files:
source_path = os.path.join(self.model.basepath, inject_file['source_path'])
- with open(source_path, "rb") as f:
+ with open(source_path, "rt") as f:
source_data = f.read()
- source_data_base64 = source_data.encode("base64")
- inject_file["source_data_base64"] = source_data_base64
+ inject_file["source_data_base64"] = source_data
if isinstance(inject_files, dict):
source_path = os.path.join(self.model.basepath, inject_files['source_path'])
- with open(source_path, "rb") as f:
+ with open(source_path, "rt") as f:
source_data = f.read()
- source_data_base64 = source_data.encode("base64")
- inject_files["source_data_base64"] = source_data_base64
- ret['dependencies'] = map(lambda x: self.model.get_requirement_node_name(x), self.model.getNodeDependencys(node))
+ inject_files["source_data_base64"] = source_data
+ ret['dependencies'] = [self.model.get_requirement_node_name(x) for x in self.model.getNodeDependencys(node)]
virtual_compute = self.model.getCapabilityByName(node, 'virtual_compute')
if virtual_compute is not None and 'properties' in virtual_compute:
vc = {}
@@ -230,14 +228,14 @@ class VnfdSOL251():
return vl_ids
def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
- return map(lambda x: x['name'], self._get_virtal_binding_cps(node, nodeTemplates))
+ return [x['name'] for x in self._get_virtal_binding_cps(node, nodeTemplates)]
def _get_virtal_binding_cps(self, node, nodeTemplates):
cps = []
for tmpnode in nodeTemplates:
if 'requirements' in tmpnode:
for item in tmpnode['requirements']:
- for key, value in item.items():
+ for key, value in list(item.items()):
if key.upper().startswith('VIRTUAL_BINDING'):
req_node_name = self.model.get_requirement_node_name(value)
if req_node_name is not None and req_node_name == node['name']:
@@ -245,19 +243,19 @@ class VnfdSOL251():
return cps
def _get_node_vdu_id(self, node):
- vdu_ids = map(lambda x: self.model.get_requirement_node_name(x), self.model.getRequirementByName(node, 'virtual_binding'))
+ vdu_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_binding')]
if len(vdu_ids) > 0:
return vdu_ids[0]
return ""
def _get_node_vl_id(self, node):
- vl_ids = map(lambda x: self.model.get_requirement_node_name(x), self.model.getRequirementByName(node, 'virtual_link'))
+ vl_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
if len(vl_ids) > 0:
return vl_ids[0]
return ""
def _buil_cp_vls(self, node):
- return map(lambda x: self._build_cp_vl(x), self.model.getRequirementByName(node, 'virtual_link'))
+ return [self._build_cp_vl(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
def _build_cp_vl(self, req):
cp_vl = {}
@@ -266,7 +264,7 @@ class VnfdSOL251():
if relationship is not None:
properties = self.model.get_prop_from_obj(relationship, 'properties')
if properties is not None and isinstance(properties, dict):
- for key, value in properties.items():
+ for key, value in list(properties.items()):
cp_vl[key] = value
return cp_vl
@@ -274,14 +272,14 @@ class VnfdSOL251():
external_cps = []
if vnf_requirements:
if isinstance(vnf_requirements, dict):
- for key, value in vnf_requirements.items():
+ for key, value in list(vnf_requirements.items()):
if isinstance(value, list) and len(value) > 0:
external_cps.append({"key_name": key, "cpd_id": value[0]})
else:
external_cps.append({"key_name": key, "cpd_id": value})
elif isinstance(vnf_requirements, list):
for vnf_requirement in vnf_requirements:
- for key, value in vnf_requirement.items():
+ for key, value in list(vnf_requirement.items()):
if isinstance(value, list) and len(value) > 0:
external_cps.append({"key_name": key, "cpd_id": value[0]})
else:
@@ -291,7 +289,7 @@ class VnfdSOL251():
def _get_forward_cps(self, vnf_capabilities):
forward_cps = []
if vnf_capabilities:
- for key, value in vnf_capabilities.items():
+ for key, value in list(vnf_capabilities.items()):
if isinstance(value, list) and len(value) > 0:
forward_cps.append({"key_name": key, "cpd_id": value[0]})
else:
diff --git a/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py
index 89ddc84a..cc941b26 100644
--- a/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py
+++ b/catalog/pub/utils/toscaparser/vnfdparser/vnfd_sol_base.py
@@ -118,19 +118,17 @@ class VnfdSOLBase():
if isinstance(inject_files, list):
for inject_file in inject_files:
source_path = os.path.join(self.model.basepath, inject_file['source_path'])
- with open(source_path, "rb") as f:
+ with open(source_path, "rt") as f:
source_data = f.read()
- source_data_base64 = source_data.encode("base64")
- inject_file["source_data_base64"] = source_data_base64
+ inject_file["source_data_base64"] = source_data
if isinstance(inject_files, dict):
source_path = os.path.join(self.model.basepath, inject_files['source_path'])
- with open(source_path, "rb") as f:
+ with open(source_path, "rt") as f:
source_data = f.read()
- source_data_base64 = source_data.encode("base64")
- inject_files["source_data_base64"] = source_data_base64
+ inject_files["source_data_base64"] = source_data
virtual_storages = self.model.getRequirementByName(node, 'virtual_storage')
- ret['virtual_storages'] = map(functools.partial(self._trans_virtual_storage), virtual_storages)
- ret['dependencies'] = map(lambda x: self.model.get_requirement_node_name(x), self.model.getNodeDependencys(node))
+ ret['virtual_storages'] = list(map(functools.partial(self._trans_virtual_storage), virtual_storages))
+ ret['dependencies'] = [self.model.get_requirement_node_name(x) for x in self.model.getNodeDependencys(node)]
virtual_compute = self.model.getCapabilityByName(node, 'virtual_compute')
if virtual_compute is not None and 'properties' in virtual_compute:
ret['virtual_compute'] = virtual_compute['properties']
@@ -166,14 +164,14 @@ class VnfdSOLBase():
return vl_ids
def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
- return map(lambda x: x['name'], self._get_virtal_binding_cps(node, nodeTemplates))
+ return [x['name'] for x in self._get_virtal_binding_cps(node, nodeTemplates)]
def _get_virtal_binding_cps(self, node, nodeTemplates):
cps = []
for tmpnode in nodeTemplates:
if 'requirements' in tmpnode:
for item in tmpnode['requirements']:
- for key, value in item.items():
+ for key, value in list(item.items()):
if key.upper().startswith('VIRTUAL_BINDING'):
req_node_name = self.model.get_requirement_node_name(value)
if req_node_name is not None and req_node_name == node['name']:
@@ -181,19 +179,19 @@ class VnfdSOLBase():
return cps
def _get_node_vdu_id(self, node):
- vdu_ids = map(lambda x: self.model.get_requirement_node_name(x), self.model.getRequirementByName(node, 'virtual_binding'))
+ vdu_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_binding')]
if len(vdu_ids) > 0:
return vdu_ids[0]
return ""
def _get_node_vl_id(self, node):
- vl_ids = map(lambda x: self.model.get_requirement_node_name(x), self.model.getRequirementByName(node, 'virtual_link'))
+ vl_ids = [self.model.get_requirement_node_name(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
if len(vl_ids) > 0:
return vl_ids[0]
return ""
def _buil_cp_vls(self, node):
- return map(lambda x: self._build_cp_vl(x), self.model.getRequirementByName(node, 'virtual_link'))
+ return [self._build_cp_vl(x) for x in self.model.getRequirementByName(node, 'virtual_link')]
def _build_cp_vl(self, req):
cp_vl = {}
@@ -202,7 +200,7 @@ class VnfdSOLBase():
if relationship is not None:
properties = self.model.get_prop_from_obj(relationship, 'properties')
if properties is not None and isinstance(properties, dict):
- for key, value in properties.items():
+ for key, value in list(properties.items()):
cp_vl[key] = value
return cp_vl
@@ -210,14 +208,14 @@ class VnfdSOLBase():
external_cps = []
if vnf_requirements:
if isinstance(vnf_requirements, dict):
- for key, value in vnf_requirements.items():
+ for key, value in list(vnf_requirements.items()):
if isinstance(value, list) and len(value) > 0:
external_cps.append({"key_name": key, "cpd_id": value[0]})
else:
external_cps.append({"key_name": key, "cpd_id": value})
elif isinstance(vnf_requirements, list):
for vnf_requirement in vnf_requirements:
- for key, value in vnf_requirement.items():
+ for key, value in list(vnf_requirement.items()):
if isinstance(value, list) and len(value) > 0:
external_cps.append({"key_name": key, "cpd_id": value[0]})
else:
@@ -227,7 +225,7 @@ class VnfdSOLBase():
def _get_forward_cps(self, vnf_capabilities):
forward_cps = []
if vnf_capabilities:
- for key, value in vnf_capabilities.items():
+ for key, value in list(vnf_capabilities.items()):
if isinstance(value, list) and len(value) > 0:
forward_cps.append({"key_name": key, "cpd_id": value[0]})
else:
diff --git a/catalog/pub/utils/values.py b/catalog/pub/utils/values.py
index 0fd2d1ac..d02d544c 100644
--- a/catalog/pub/utils/values.py
+++ b/catalog/pub/utils/values.py
@@ -22,3 +22,12 @@ def ignore_case_get(args, key, def_val=""):
if old_key.upper() == key.upper():
return args[old_key]
return def_val
+
+
+def remove_none_key(data, none_list=None):
+ none_list = none_list if none_list else [None, '', 'NULL', 'None', [], {}]
+ if isinstance(data, dict):
+ data = dict([(k, remove_none_key(v, none_list)) for k, v in list(data.items()) if v not in none_list])
+ if isinstance(data, list):
+ data = [remove_none_key(s, none_list) for s in data if s not in none_list]
+ return data
diff --git a/catalog/settings.py b/catalog/settings.py
index 32e2f7c4..51c9a889 100644
--- a/catalog/settings.py
+++ b/catalog/settings.py
@@ -16,7 +16,7 @@ import os
import sys
import platform
-import redisco
+import catalog.pub.redisco
from catalog.pub.config.config import REDIS_HOST, REDIS_PORT, REDIS_PASSWD
from catalog.pub.config.config import DB_NAME, DB_IP, DB_USER, DB_PASSWD, DB_PORT
@@ -116,7 +116,7 @@ DATABASES = {
},
}
-redisco.connection_setup(host=REDIS_HOST, port=REDIS_PORT, password=REDIS_PASSWD, db=0)
+catalog.pub.redisco.connection_setup(host=REDIS_HOST, port=REDIS_PORT, password=REDIS_PASSWD, db=0)
# CACHE_BACKEND = 'redis_cache.cache://%s@%s:%s' % (REDIS_PASSWD, REDIS_HOST, REDIS_PORT)
TIME_ZONE = 'UTC'
diff --git a/catalog/swagger/management/commands/export_swagger.py b/catalog/swagger/management/commands/export_swagger.py
index 70f6143f..bc5fd1a5 100644
--- a/catalog/swagger/management/commands/export_swagger.py
+++ b/catalog/swagger/management/commands/export_swagger.py
@@ -33,4 +33,4 @@ class Command(BaseCommand):
response = self.client.get("/api/catalog/v1/swagger.json")
with open(options['name'], 'w') as swagger_file:
swagger_file.write(json.dumps(response.data))
- print "swagger api is written to %s" % options['name']
+ print("swagger api is written to %s" % options['name'])
diff --git a/docker/Dockerfile b/docker/Dockerfile
index f75f9091..37dd74dd 100755
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,4 +1,4 @@
-FROM python:2-alpine
+FROM python:3.6-alpine
ARG HTTP_PROXY=${HTTP_PROXY}
ARG HTTPS_PROXY=${HTTPS_PROXY}
diff --git a/docker/docker-env-conf.sh b/docker/docker-env-conf.sh
index a77ba520..75a9d3f0 100755
--- a/docker/docker-env-conf.sh
+++ b/docker/docker-env-conf.sh
@@ -4,7 +4,7 @@ install_sf(){
apk --no-cache update
apk --no-cache add bash curl gcc wget mysql-client openssl-dev
- apk --no-cache add python-dev libffi-dev musl-dev py2-virtualenv
+ apk --no-cache add python36-dev libffi-dev musl-dev py3-virtualenv
# get binary zip from nexus - vfc-nfvo-catalog
diff --git a/requirements.txt b/requirements.txt
index 3a0470ff..221e87f9 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,6 +1,6 @@
# rest framework
-Django==1.11.9
-djangorestframework==3.7.7
+Django==2.1.4
+djangorestframework==3.9.4
# for access MySQL
PyMySQL==0.9.3
@@ -9,7 +9,7 @@ PyMySQL==0.9.3
redis==2.10.5
# for access redis cache
-redisco==0.1.4
+# redisco==0.1.4
django-redis-cache==0.13.1
# for call rest api
@@ -17,13 +17,13 @@ httplib2==0.12.3
# for unit test
coverage==4.2
-mock==2.0.0
+mock==3.0.5
unittest_xml_reporting==1.12.0
# for parser
# cryptography==2.0.3
# paramiko==2.0.2
-nfv-toscaparser==1.1.2.dev1
+nfv-toscaparser==1.1.2.dev2
# for auto swagger
drf-yasg>=1.2.2
@@ -31,7 +31,7 @@ flex>=6.11.1
swagger-spec-validator>=2.1.0
# for onap logging
-onappylog>=1.0.6
+onappylog==1.0.9
# uwsgi for parallel processing
-uwsgi \ No newline at end of file
+# uwsgi \ No newline at end of file
diff --git a/tox.ini b/tox.ini
index fcc6330e..3745940f 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27,pep8,cov
+envlist = py36,pep8,cov
skipsdist = true
[tox:jenkins]
@@ -17,9 +17,10 @@ commands = coverage run --branch manage.py test catalog
deps = flake8
commands = flake8
-[testenv:py27]
+[testenv:py36]
commands =
{[testenv]commands}
[testenv:cov]
+deps = coverage
commands = coverage xml --omit="*test*,*__init__.py,*site-packages*"