summaryrefslogtreecommitdiffstats
path: root/share/common
diff options
context:
space:
mode:
Diffstat (limited to 'share/common')
-rw-r--r--share/common/msapi/helper.py10
-rw-r--r--share/common/utils/aai_cache.py11
-rw-r--r--share/common/utils/restcall.py11
3 files changed, 18 insertions, 14 deletions
diff --git a/share/common/msapi/helper.py b/share/common/msapi/helper.py
index 947966c9..69b91953 100644
--- a/share/common/msapi/helper.py
+++ b/share/common/msapi/helper.py
@@ -13,12 +13,10 @@ import json
import logging
# import re
import uuid
-
import threading
import datetime
import time
-
-import traceback
+#import traceback
# from common.exceptions import VimDriverNewtonException
from common.utils import restcall
@@ -75,7 +73,7 @@ class MultiCloudAAIHelper(object):
'''
def __init__(self, multicloud_prefix, aai_base_url):
- logger.debug("MultiCloudAAIHelper __init__ traceback: %s" % traceback.format_exc())
+ # logger.debug("MultiCloudAAIHelper __init__ traceback: %s" % traceback.format_exc())
self.proxy_prefix = multicloud_prefix
self.aai_base_url = aai_base_url
self._logger = logger
@@ -203,7 +201,7 @@ class MultiCloudThreadHelper(object):
self.expired_backlog = {}
self.lock = threading.Lock()
self.state_ = 0 # 0: stopped, 1: started
- self.cache_prefix = "bi_"+name+"_"
+ self.cache_prefix = "bi_"+self.name+"_"
self.cache_expired_prefix = "biex_"+self.name+"_"
self.thread = MultiCloudThreadHelper.HelperThread(self)
@@ -317,7 +315,7 @@ class MultiCloudThreadHelper(object):
# sleep in case of interval > 1 second
time.sleep(nexttimer // 1000000)
nexttimer = 30*1000000 # initial interval in us to be updated:30 seconds
- # logger.debug("self.owner.backlog len: %s" % len(self.owner.backlog))
+ # logger.debug("self.owner.backlog: %s, len: %s" % (self.owner.name, len(self.owner.backlog)))
for backlog_id, item in self.owner.backlog.items():
# logger.debug("evaluate backlog item: %s" % item)
# check interval for repeatable backlog item
diff --git a/share/common/utils/aai_cache.py b/share/common/utils/aai_cache.py
index 53298bb8..41506aca 100644
--- a/share/common/utils/aai_cache.py
+++ b/share/common/utils/aai_cache.py
@@ -26,12 +26,14 @@ def flush_cache_by_url(resource_url):
def get_cache_by_url(resource_url):
try:
- if (filter_cache_by_url(resource_url)):
+ if filter_cache_by_url(resource_url):
value = cache.get("AAI_" + resource_url)
+ # logger.debug("Find cache the resource: %s, %s" %( resource_url, value))
return json.loads(value) if value else None
else:
return None
- except:
+ except Exception as e:
+ logger.error("get_cache_by_url exception: %s" % e.message)
return None
@@ -40,9 +42,10 @@ def set_cache_by_url(resource_url, resource_in_json):
# filter out unmanaged AAI resource
if filter_cache_by_url(resource_url):
# cache the resource for 24 hours
- logger.debug("Cache the resource: "+ resource_url)
+ # logger.debug("Cache the resource: "+ resource_url)
cache.set("AAI_" + resource_url, json.dumps(resource_in_json), 3600 * 24)
- except:
+ except Exception as e:
+ logger.error("get_cache_by_url exception: %s" % e.message)
pass
def filter_cache_by_url(resource_url):
diff --git a/share/common/utils/restcall.py b/share/common/utils/restcall.py
index eb4cb008..464dd65f 100644
--- a/share/common/utils/restcall.py
+++ b/share/common/utils/restcall.py
@@ -65,9 +65,10 @@ def _call_req(base_url, user, passwd, auth_type,
headers['Authorization'] = 'Basic ' + \
base64.b64encode(tmpauthsource).decode('utf-8')
- logger.info("Making rest call with uri,method, header = %s, %s, %s" % (full_url, method.upper(), headers))
+ logger.info("Making rest call with method, uri, header = %s, %s, %s" %
+ (method.upper(), full_url, headers))
if content:
- logger.debug("with content = %s" % (content))
+ logger.debug("with content = %s" % content)
ca_certs = None
for retry_times in range(MAX_RETRY_TIME):
@@ -138,8 +139,9 @@ def req_to_aai(resource, method, content='', appid=settings.MULTICLOUD_APP_ID, n
# hook to flush cache
if method.upper() in ["PUT", "POST", "PATCH", "DELETE"]:
aai_cache.flush_cache_by_url(resource)
- elif method.upper in ["GET"] and not nocache:
+ elif method.upper() in ["GET"] and not nocache:
content = aai_cache.get_cache_by_url(resource)
+ # logger.debug("cached resource: %s, %s" % (resource, content))
if content:
return content
@@ -148,7 +150,8 @@ def req_to_aai(resource, method, content='', appid=settings.MULTICLOUD_APP_ID, n
resource, method, content=json.dumps(content), extra_headers=headers)
if method.upper() in ["GET"] and ret == 0 and not nocache:
- aai_cache.set_cache_by_url(resource, [ret, resp_body, resp_status])
+ # aai_cache.set_cache_by_url(resource, [ret, resp_body, resp_status])
+ aai_cache.set_cache_by_url(resource, (ret, resp_body, resp_status))
return [ret, resp_body, resp_status]