summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--config/log.yml95
-rwxr-xr-xconfig/osdf_config.yaml8
-rwxr-xr-xconfig/preload_secrets.yaml8
-rw-r--r--docs/sections/release-notes.rst47
-rw-r--r--osdf/adapters/policy/interface.py5
-rw-r--r--osdf/config/loader.py2
-rwxr-xr-xosdf/logging/onap_common_v1/CommonLogger.py900
-rwxr-xr-xosdf/logging/onap_common_v1/CommonLogger_test.config58
-rwxr-xr-xosdf/logging/onap_common_v1/CommonLogger_testing.py143
-rwxr-xr-xosdf/logging/onap_common_v1/README.md214
-rwxr-xr-xosdf/logging/onap_common_v1/__init__.py0
-rwxr-xr-xosdf/logging/onap_common_v1/makefile40
-rwxr-xr-xosdf/logging/osdf_logging.py59
-rw-r--r--osdf/optimizers/licenseopt/simple_license_allocation.py2
-rw-r--r--osdf/optimizers/pciopt/pci_opt_processor.py8
-rw-r--r--osdf/optimizers/placementopt/conductor/remote_opt_processor.py2
-rw-r--r--osdf/optimizers/routeopt/simple_route_opt.py3
-rw-r--r--osdf/utils/mdc_utils.py55
-rwxr-xr-xosdfapp.py19
-rw-r--r--pom.xml2
-rw-r--r--requirements.txt1
-rwxr-xr-xtest/logging/test_osdf_logging.py12
-rw-r--r--tox.ini6
24 files changed, 264 insertions, 1426 deletions
diff --git a/.gitignore b/.gitignore
index 8d04c8e..b77a34d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -117,3 +117,4 @@ xunit*.xml
simulator-logs
test/functest/simulators/config
test/functest/simulators/osdf
+/pylint.out
diff --git a/config/log.yml b/config/log.yml
new file mode 100644
index 0000000..0b8815f
--- /dev/null
+++ b/config/log.yml
@@ -0,0 +1,95 @@
+version: 1
+disable_existing_loggers: True
+
+loggers:
+ error:
+ handlers: [error_handler]
+ level: "WARN"
+ propagate: True
+ debug:
+ handlers: [debug_handler]
+ level: "DEBUG"
+ propagate: True
+ metrics:
+ handlers: [metrics_handler]
+ level: "INFO"
+ propagate: True
+ audit:
+ handlers: [audit_handler]
+ level: "INFO"
+ propagate: True
+handlers:
+ debug_handler:
+ level: "DEBUG"
+ class: "logging.handlers.TimedRotatingFileHandler"
+ filename: "logs/debug.log"
+ formatter: "debugFormat"
+ when: midnight
+ interval: 1
+ utc: True
+ delay: False
+ backupCount: 10
+ error_handler:
+ level: "WARN"
+ class: "logging.handlers.TimedRotatingFileHandler"
+ filename: "logs/error.log"
+ formatter: "errorFormat"
+ when: midnight
+ interval: 1
+ utc: True
+ delay: False
+ backupCount: 10
+ metrics_handler:
+ level: "INFO"
+ class: "logging.handlers.TimedRotatingFileHandler"
+ filename: "logs/metrics.log"
+ formatter: "metricsFormat"
+ when: midnight
+ interval: 1
+ utc: True
+ delay: False
+ backupCount: 10
+ audit_handler:
+ level: "INFO"
+ class: "logging.handlers.TimedRotatingFileHandler"
+ filename: "logs/audit.log"
+ formatter: "auditFormat"
+ when: midnight
+ interval: 1
+ utc: True
+ delay: False
+ backupCount: 10
+formatters:
+ standard:
+ format: "%(asctime)s|||||%(name)s||%(thread)||%(funcName)s||%(levelname)s||%(message)s"
+ debugFormat:
+ format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
+ mdcfmt: "{requestID} {threadID} {serverName} {serviceName} {instanceUUID} {upperLogLevel} {severity} {serverIPAddress} {server} {IPAddress} {className} {timer} {detailMessage}"
+ datefmt: "%Y-%m-%d %H:%M:%S"
+ (): onaplogging.mdcformatter.MDCFormatter
+ errorFormat:
+ format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
+ mdcfmt: "{requestID} {threadID} {serviceName} {partnerName} {targetEntity} {targetServiceName} {errorCode} {errorDescription} {detailMessage}"
+ datefmt: "%Y-%m-%d %H:%M:%S"
+ (): onaplogging.mdcformatter.MDCFormatter
+ auditFormat:
+ format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
+ mdcfmt: "{requestID} {serviceInstanceID} {threadID} {serverName} {serviceName} {partnerName} {statusCode} {responseCode} {responseDescription} {instanceUUID} {upperLogLevel} {severity} \
+ {serverIPAddress} {timer} {server} {IPAddress} {className} {unused} {processKey} {customField1} {customField2} {customField3} {customField4} {detailMessage}"
+ datefmt: "%Y-%m-%d %H:%M:%S"
+ (): onaplogging.mdcformatter.MDCFormatter
+ metricsFormat:
+ format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
+ mdcfmt: "{requestID} {serviceInstanceID} {threadID} {serverName} {serviceName} {partnerName} \
+ {targetEntity} {targetServiceName} {statusCode} {responseCode} {responseDescription} \
+ {instanceUUID} {upperLogLevel} {severity} {serverIPAddress} {timer} {server} {IPAddress} \
+ {className} {unused} {processKey} {targetVirtualEntity} {customField1} {customField2} \
+ {customField3} {customField4} {detailMessage}"
+ datefmt: "%Y-%m-%d %H:%M:%S"
+ (): onaplogging.mdcformatter.MDCFormatter
+
+ mdcFormat:
+ format: "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s"
+ mdcfmt: "{requestID} {invocationID} {serviceName} {serviceIP}"
+ datefmt: "%Y-%m-%d %H:%M:%S"
+ (): onaplogging.mdcformatter.MDCFormatter
diff --git a/config/osdf_config.yaml b/config/osdf_config.yaml
index cf5426b..6cf8cec 100755
--- a/config/osdf_config.yaml
+++ b/config/osdf_config.yaml
@@ -17,9 +17,8 @@ conductorMaxRetries: 30 # if we don't get something in 30 minutes, give up
# versions to be set in HTTP header
conductorMinorVersion: 0
-# Policy Platform -- requires ClientAuth, Authorization, and Environment
-policyPlatformUrl: http://policy.api.simpledemo.onap.org:8081/pdp/api/getConfig # Policy Dev platform URL
-policyPlatformEnv: TEST # Environment for policy platform
+# Policy Platform -- requires Authorization
+policyPlatformUrl: https://policy-xacml-pdp:6969/policy/pdpx/decision/v1 # Policy Dev platform URL
# URL for policy model uploading
policyPlatformUrlModelUpload: https://policy.api.simpledemo.onap.org:8081/policy/api/v1/policytypes
pathPolicyModelUpload: ../../models/policy/placement/tosca_upload/
@@ -50,3 +49,6 @@ aaf_ca_certs: ssl_certs/aaf_root_ca.cer
configDbUrl: http://config.db.url:8080
configDbGetCellListUrl: 'SDNCConfigDBAPI/getCellList'
configDbGetNbrListUrl: 'SDNCConfigDBAPI/getNbrList'
+
+pciHMSUsername: test
+pciHMSPassword: passwd \ No newline at end of file
diff --git a/config/preload_secrets.yaml b/config/preload_secrets.yaml
index 1d2ea01..3050d87 100755
--- a/config/preload_secrets.yaml
+++ b/config/preload_secrets.yaml
@@ -11,12 +11,8 @@ secrets:
Password: plan.15
- name: policyPlatform
values:
- UserName: testpdp
- Password: alpha123
-- name: policyClient
- values:
- UserName: python
- Password: test
+ UserName: healthcheck
+ Password: zb!XztG34
- name: dmaap
values:
UserName: NA
diff --git a/docs/sections/release-notes.rst b/docs/sections/release-notes.rst
index 596db52..b931cd0 100644
--- a/docs/sections/release-notes.rst
+++ b/docs/sections/release-notes.rst
@@ -5,30 +5,67 @@
=============
Release Notes
=============
-Version: 5.0.0
+
+Version: 5.0.1
--------------
-:Release Date: 2019-09 (El Alto Release)
+:Release Date: 2019-09-30 (El Alto Release)
+
+The El Alto release is the fourth release for ONAP Optimization Framework (OOF).
+
+Artifacts released:
+
+optf-has:1.3.3
+optf-osdf:1.3.4
+optf-cmso:2.1.1
**New Features**
-The El Alto release is the fourth release for ONAP Optimization Framework (OOF).
+While no new features were added in the release, the following Stories were delivered as enhancements.
+
+ * [OPTFRA-415] Automation on policy model uploading
+ * [OPTFRA-427] CMSO - Schedule a workflow in SO and track status to completion
+
+* Platform Maturity Level 1
+ * ~65.1+ unit test coverage
-A summary of features includes
**Bug Fixes**
+The El Alto release for OOF fixed the following Bugs.
-**Known Issues**
+ * [OPTFRA-579] Json error in homing solution
+ * [OPTFRA-521] oof-has-api exposes plain text HTTP endpoint using port 30275
+ * [OPTFRA-522] oof-osdf exposes plain text HTTP endpoint using port 30248
+ * [OPTFRA-577] Need for "ReadWriteMany" access on storage when deploying on Kubernetes?
+ * [OPTFRA-517] Clean up optf/cmso in integration/csit for Dublin
+ * [OPTFRA-486] Support "identifiers" field as a list of values
+ * [OPTFRA-403] OOF CMSO Service kubernetes resources allocation is not done
+ * [OPTFRA-526] OOF pods not running
+ * [OPTFRA-409] Template example : purpose to be explained
+ * [OPTFRA-593] OOF-CSMO healthcheck is failing in Master
+**Known Issues**
+
+ * [OPTFRA-576] optf-has-master-csit-has is testing Dublin image
+ * [OPTFRA-596] CMSO - Sonar and CSIT jobs failing
+ * [OPTFRA-608] Error in Homing with multiple policies
**Security Notes**
*Fixed Security Issues*
+ * [OJSI-122] In default deployment OPTFRA (oof-osdf) exposes HTTP port 30248 outside of cluster.
+ * [OPTFRA-521] oof-has-api exposes plain text HTTP endpoint using port 30275
+ * [OPTFRA-522] oof-osdf exposes plain text HTTP endpoint using port 30248
+ * [OPTFRA-455] CMSO - Mitigate License Threat tomcat-embed-core
+
*Known Security Issues*
+ * [OPTFRA-481] Fix Vulnerability with spring-data-jpa package
+ * [OPTFRA-431] Fix Vulnerability with spring-security-web package
+
*Known Vulnerabilities in Used Modules*
**Upgrade Notes**
diff --git a/osdf/adapters/policy/interface.py b/osdf/adapters/policy/interface.py
index 7c3118c..61861de 100644
--- a/osdf/adapters/policy/interface.py
+++ b/osdf/adapters/policy/interface.py
@@ -128,11 +128,8 @@ def remote_api(req_json, osdf_config, service_type="placement"):
"""
config = osdf_config.deployment
uid, passwd = config['policyPlatformUsername'], config['policyPlatformPassword']
- pcuid, pcpasswd = config['policyClientUsername'], config['policyClientPassword']
- headers = {"ClientAuth": base64.b64encode(bytes("{}:{}".format(pcuid, pcpasswd), "ascii"))}
- headers.update({'Environment': config['policyPlatformEnv']})
url = config['policyPlatformUrl']
- rc = RestClient(userid=uid, passwd=passwd, headers=headers, url=url, log_func=debug_log.debug)
+ rc = RestClient(userid=uid, passwd=passwd, url=url, log_func=debug_log.debug)
if osdf_config.core['policy_info'][service_type]['policy_fetch'] == "by_name":
policies = get_by_name(rc, req_json[service_type + "Info"]['policyId'], wildcards=True)
diff --git a/osdf/config/loader.py b/osdf/config/loader.py
index 7cb363a..dca0033 100644
--- a/osdf/config/loader.py
+++ b/osdf/config/loader.py
@@ -31,7 +31,7 @@ def load_config_file(config_file: str, child_name="dockerConfiguration") -> dict
with open(config_file, 'r') as fid:
res = {}
if config_file.endswith(".yaml"):
- res = yaml.load(fid)
+ res = yaml.safe_load(fid)
elif config_file.endswith(".json") or config_file.endswith("json"):
res = json.load(fid)
return res.get(child_name, res) if child_name else res
diff --git a/osdf/logging/onap_common_v1/CommonLogger.py b/osdf/logging/onap_common_v1/CommonLogger.py
deleted file mode 100755
index 6572d6f..0000000
--- a/osdf/logging/onap_common_v1/CommonLogger.py
+++ /dev/null
@@ -1,900 +0,0 @@
-# -------------------------------------------------------------------------
-# Copyright (c) 2015-2017 AT&T Intellectual Property
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# -------------------------------------------------------------------------
-#
-
-"""ONAP Common Logging library in Python."""
-
-#!/usr/bin/python
-# -*- indent-tabs-mode: nil -*- vi: set expandtab:
-
-
-from __future__ import print_function
-import os, sys, getopt, logging, logging.handlers, time, re, uuid, socket, threading
-
-class CommonLogger:
- """ONAP Common Logging object.
-
- Public methods:
- __init__
- setFields
- debug
- info
- warn
- error
- fatal
- """
-
- UnknownFile = -1
- ErrorFile = 0
- DebugFile = 1
- AuditFile = 2
- MetricsFile = 3
- DateFmt = '%Y-%m-%dT%H:%M:%S'
- verbose = False
-
- def __init__(self, configFile, logKey, **kwargs):
- """Construct a Common Logger for one Log File.
-
- Arguments:
- configFile -- configuration filename.
- logKey -- the keyword in configFile that identifies the log filename.
-
- Keyword arguments: Annotations are d:debug, a=audit, m=metrics, e=error
- style -- the log file format (style) to use when writing log messages,
- one of CommonLogger.ErrorFile, CommonLogger.DebugFile,
- CommonLogger.AuditFile and CommonLogger.MetricsFile, or
- one of the strings "error", "debug", "audit" or "metrics".
- May also be set in the config file using a field named
- <logKey>Style (where <logKey> is the value of the logKey
- parameter). The keyword value overrides the value in the
- config file.
- requestID (dame) -- optional default value for this log record field.
- serviceInstanceID (am) -- optional default value for this log record field.
- threadID (am) -- optional default value for this log record field.
- serverName (am) -- optional default value for this log record field.
- serviceName (am) -- optional default value for this log record field.
- instanceUUID (am) -- optional default value for this log record field.
- severity (am) -- optional default value for this log record field.
- serverIPAddress (am) -- optional default value for this log record field.
- server (am) -- optional default value for this log record field.
- IPAddress (am) -- optional default value for this log record field.
- className (am) -- optional default value for this log record field.
- timer (am) -- (ElapsedTime) optional default value for this log record field.
- partnerName (ame) -- optional default value for this log record field.
- targetEntity (me) -- optional default value for this log record field.
- targetServiceName (me) -- optional default value for this log record field.
- statusCode (am) -- optional default value for this log record field.
- responseCode (am) -- optional default value for this log record field.
- responseDescription (am) -- optional default value for this log record field.
- processKey (am) -- optional default value for this log record field.
- targetVirtualEntity (m) -- optional default value for this log record field.
- customField1 (am) -- optional default value for this log record field.
- customField2 (am) -- optional default value for this log record field.
- customField3 (am) -- optional default value for this log record field.
- customField4 (am) -- optional default value for this log record field.
- errorCategory (e) -- optional default value for this log record field.
- errorCode (e) -- optional default value for this log record field.
- errorDescription (e) -- optional default value for this log record field.
-
- Note: the pipe '|' character is not allowed in any log record field.
- """
-
- self._monitorFlag = False
-
- # Get configuration parameters
- self._logKey = str(logKey)
- self._configFile = str(configFile)
- self._rotateMethod = 'time'
- self._timeRotateIntervalType = 'midnight'
- self._timeRotateInterval = 1
- self._sizeMaxBytes = 0
- self._sizeRotateMode = 'a'
- self._socketHost = None
- self._socketPort = 0
- self._typeLogger = 'filelogger'
- self._backupCount = 6
- self._logLevelThreshold = self._intLogLevel('')
- self._logFile = None
- self._begTime = None
- self._begMsec = 0
- self._fields = {}
- self._fields["style"] = CommonLogger.UnknownFile
- try:
- self._configFileModified = os.path.getmtime(self._configFile)
- for line in open(self._configFile):
- line = line.split('#',1)[0] # remove comments
- if '=' in line:
- key, value = [x.strip() for x in line.split('=',1)]
- if key == 'rotateMethod' and value.lower() in ['time', 'size', 'none']:
- self._rotateMethod = value.lower()
- elif key == 'timeRotateIntervalType' and value in ['S', 'M', 'H', 'D', 'W0', 'W1', 'W2', 'W3', 'W4', 'W5', 'W6', 'midnight']:
- self._timeRotateIntervalType = value
- elif key == 'timeRotateInterval' and int( value ) > 0:
- self._timeRotateInterval = int( value )
- elif key == 'sizeMaxBytes' and int( value ) >= 0:
- self._sizeMaxBytes = int( value )
- elif key == 'sizeRotateMode' and value in ['a']:
- self._sizeRotateMode = value
- elif key == 'backupCount' and int( value ) >= 0:
- self._backupCount = int( value )
- elif key == self._logKey + 'SocketHost':
- self._socketHost = value
- elif key == self._logKey + 'SocketPort' and int( value ) == 0:
- self._socketPort = int( value )
- elif key == self._logKey + 'LogType' and value.lower() in ['filelogger', 'stdoutlogger', 'stderrlogger', 'socketlogger', 'nulllogger']:
- self._typeLogger = value.lower()
- elif key == self._logKey + 'LogLevel':
- self._logLevelThreshold = self._intLogLevel(value.upper())
- elif key == self._logKey + 'Style':
- self._fields["style"] = value
- elif key == self._logKey:
- self._logFile = value
- except Exception as x:
- print("exception reading '%s' configuration file: %s" %(self._configFile, str(x)), file=sys.stderr)
- sys.exit(2)
- except:
- print("exception reading '%s' configuration file" %(self._configFile), file=sys.stderr)
- sys.exit(2)
-
- if self._logFile is None:
- print('configuration file %s is missing definition %s for log file' %(self._configFile, self._logKey), file=sys.stderr)
- sys.exit(2)
-
-
- # initialize default log fields
- # timestamp will automatically be generated
- for key in ['style', 'requestID', 'serviceInstanceID', 'threadID', 'serverName', 'serviceName', 'instanceUUID', \
- 'severity', 'serverIPAddress', 'server', 'IPAddress', 'className', 'timer', \
- 'partnerName', 'targetEntity', 'targetServiceName', 'statusCode', 'responseCode', \
- 'responseDescription', 'processKey', 'targetVirtualEntity', 'customField1', \
- 'customField2', 'customField3', 'customField4', 'errorCategory', 'errorCode', \
- 'errorDescription' ]:
- if key in kwargs and kwargs[key] != None:
- self._fields[key] = kwargs[key]
-
- self._resetStyleField()
-
- # Set up logger
- self._logLock = threading.Lock()
- with self._logLock:
- self._logger = logging.getLogger(self._logKey)
- self._logger.propagate = False
- self._createLogger()
-
- self._defaultServerInfo()
-
- # spawn a thread to monitor configFile for logLevel and logFile changes
- self._monitorFlag = True
- self._monitorThread = threading.Thread(target=self._monitorConfigFile, args=())
- self._monitorThread.daemon = True
- self._monitorThread.start()
-
-
- def _createLogger(self):
- if self._typeLogger == 'filelogger':
- self._mkdir_p(self._logFile)
- if self._rotateMethod == 'time':
- self._logHandler = logging.handlers.TimedRotatingFileHandler(self._logFile, \
- when=self._timeRotateIntervalType, interval=self._timeRotateInterval, \
- backupCount=self._backupCount, encoding=None, delay=False, utc=True)
- elif self._rotateMethod == 'size':
- self._logHandler = logging.handlers.RotatingFileHandler(self._logFile, \
- mode=self._sizeRotateMode, maxBytes=self._sizeMaxBytes, \
- backupCount=self._backupCount, encoding=None, delay=False)
-
- else:
- self._logHandler = logging.handlers.WatchedFileHandler(self._logFile, \
- mode=self._sizeRotateMode, \
- encoding=None, delay=False)
- elif self._typeLogger == 'stderrlogger':
- self._logHandler = logging.handlers.StreamHandler(sys.stderr)
- elif self._typeLogger == 'stdoutlogger':
- self._logHandler = logging.handlers.StreamHandler(sys.stdout)
- elif self._typeLogger == 'socketlogger':
- self._logHandler = logging.handlers.SocketHandler(self._socketHost, self._socketPort)
- elif self._typeLogger == 'nulllogger':
- self._logHandler = logging.handlers.NullHandler()
-
- if self._fields["style"] == CommonLogger.AuditFile or self._fields["style"] == CommonLogger.MetricsFile:
- self._logFormatter = logging.Formatter(fmt='%(begtime)s,%(begmsecs)03d+00:00|%(endtime)s,%(endmsecs)03d+00:00|%(message)s', datefmt=CommonLogger.DateFmt)
- else:
- self._logFormatter = logging.Formatter(fmt='%(asctime)s,%(msecs)03d+00:00|%(message)s', datefmt='%Y-%m-%dT%H:%M:%S')
- self._logFormatter.converter = time.gmtime
- self._logHandler.setFormatter(self._logFormatter)
- self._logger.addHandler(self._logHandler)
-
- def _resetStyleField(self):
- styleFields = ["error", "debug", "audit", "metrics"]
- if self._fields['style'] in styleFields:
- self._fields['style'] = styleFields.index(self._fields['style'])
-
- def __del__(self):
- if self._monitorFlag == False:
- return
-
- self._monitorFlag = False
-
- if self._monitorThread is not None and self._monitorThread.is_alive():
- self._monitorThread.join()
-
- self._monitorThread = None
-
-
- def _defaultServerInfo(self):
-
- # If not set or purposely set = None, then set default
- if self._fields.get('server') is None:
- try:
- self._fields['server'] = socket.getfqdn()
- except Exception as err:
- try:
- self._fields['server'] = socket.gethostname()
- except Exception as err:
- self._fields['server'] = ""
-
- # If not set or purposely set = None, then set default
- if self._fields.get('serverIPAddress') is None:
- try:
- self._fields['serverIPAddress'] = socket.gethostbyname(self._fields['server'])
- except Exception as err:
- self._fields['serverIPAddress'] = ""
-
-
- def _monitorConfigFile(self):
- while self._monitorFlag:
- try:
- fileTime = os.path.getmtime(self._configFile)
- if fileTime > self._configFileModified:
- self._configFileModified = fileTime
- ReopenLogFile = False
- logFile = self._logFile
- with open(self._configFile) as fp:
- for line in fp:
- line = line.split('#',1)[0] # remove comments
- if '=' in line:
- key, value = [x.strip() for x in line.split('=',1)]
- if key == 'rotateMethod' and value.lower() in ['time', 'size', 'none'] and self._rotateMethod != value:
- self._rotateMethod = value.lower()
- ReopenLogFile = True
- elif key == 'timeRotateIntervalType' and value in ['S', 'M', 'H', 'D', 'W0', 'W1', 'W2', 'W3', 'W4', 'W5', 'W6', 'midnight']:
- self._timeRotateIntervalType = value
- ReopenLogFile = True
- elif key == 'timeRotateInterval' and int( value ) > 0:
- self._timeRotateInterval = int( value )
- ReopenLogFile = True
- elif key == 'sizeMaxBytes' and int( value ) >= 0:
- self._sizeMaxBytes = int( value )
- ReopenLogFile = True
- elif key == 'sizeRotateMode' and value in ['a']:
- self._sizeRotateMode = value
- ReopenLogFile = True
- elif key == 'backupCount' and int( value ) >= 0:
- self._backupCount = int( value )
- ReopenLogFile = True
- elif key == self._logKey + 'SocketHost' and self._socketHost != value:
- self._socketHost = value
- ReopenLogFile = True
- elif key == self._logKey + 'SocketPort' and self._socketPort > 0 and self._socketPort != int( value ):
- self._socketPort = int( value )
- ReopenLogFile = True
- elif key == self._logKey + 'LogLevel' and self._logLevelThreshold != self._intLogLevel( value.upper() ):
- self._logLevelThreshold = self._intLogLevel(value.upper())
- elif key == self._logKey + 'LogType' and self._typeLogger != value and value.lower() in ['filelogger', 'stdoutlogger', 'stderrlogger', 'socketlogger', 'nulllogger']:
- self._typeLogger = value.lower()
- ReopenLogFile = True
- elif key == self._logKey + 'Style':
- self._fields["style"] = value
- self._resetStyleField()
- elif key == self._logKey and self._logFile != value:
- logFile = value
- ReopenLogFile = True
- if ReopenLogFile:
- with self._logLock:
- self._logger.removeHandler(self._logHandler)
- self._logFile = logFile
- self._createLogger()
- except Exception as err:
- pass
-
- time.sleep(5)
-
-
- def setFields(self, **kwargs):
- """Set default values for log fields.
-
- Keyword arguments: Annotations are d:debug, a=audit, m=metrics, e=error
- style -- the log file format (style) to use when writing log messages
- requestID (dame) -- optional default value for this log record field.
- serviceInstanceID (am) -- optional default value for this log record field.
- threadID (am) -- optional default value for this log record field.
- serverName (am) -- optional default value for this log record field.
- serviceName (am) -- optional default value for this log record field.
- instanceUUID (am) -- optional default value for this log record field.
- severity (am) -- optional default value for this log record field.
- serverIPAddress (am) -- optional default value for this log record field.
- server (am) -- optional default value for this log record field.
- IPAddress (am) -- optional default value for this log record field.
- className (am) -- optional default value for this log record field.
- timer (am) -- (ElapsedTime) optional default value for this log record field.
- partnerName (ame) -- optional default value for this log record field.
- targetEntity (me) -- optional default value for this log record field.
- targetServiceName (me) -- optional default value for this log record field.
- statusCode (am) -- optional default value for this log record field.
- responseCode (am) -- optional default value for this log record field.
- responseDescription (am) -- optional default value for this log record field.
- processKey (am) -- optional default value for this log record field.
- targetVirtualEntity (m) -- optional default value for this log record field.
- customField1 (am) -- optional default value for this log record field.
- customField2 (am) -- optional default value for this log record field.
- customField3 (am) -- optional default value for this log record field.
- customField4 (am) -- optional default value for this log record field.
- errorCategory (e) -- optional default value for this log record field.
- errorCode (e) -- optional default value for this log record field.
- errorDescription (e) -- optional default value for this log record field.
-
- Note: the pipe '|' character is not allowed in any log record field.
- """
-
- for key in ['style', 'requestID', 'serviceInstanceID', 'threadID', 'serverName', 'serviceName', 'instanceUUID', \
- 'severity', 'serverIPAddress', 'server', 'IPAddress', 'className', 'timer', \
- 'partnerName', 'targetEntity', 'targetServiceName', 'statusCode', 'responseCode', \
- 'responseDescription', 'processKey', 'targetVirtualEntity', 'customField1', \
- 'customField2', 'customField3', 'customField4', 'errorCategory', 'errorCode', \
- 'errorDescription' ]:
- if key in kwargs:
- if kwargs[key] != None:
- self._fields[key] = kwargs[key]
- elif key in self._fields:
- del self._fields[key]
-
- self._defaultServerInfo()
-
-
- def debug(self, message, **kwargs):
- """Write a DEBUG level message to the log file.
-
- Arguments:
- message -- value for the last log record field.
-
- Keyword arguments: Annotations are d:debug, a=audit, m=metrics, e=error
- style -- the log file format (style) to use when writing log messages
- requestID (dame) -- optional default value for this log record field.
- serviceInstanceID (am) -- optional default value for this log record field.
- threadID (am) -- optional default value for this log record field.
- serverName (am) -- optional default value for this log record field.
- serviceName (am) -- optional default value for this log record field.
- instanceUUID (am) -- optional default value for this log record field.
- severity (am) -- optional default value for this log record field.
- serverIPAddress (am) -- optional default value for this log record field.
- server (am) -- optional default value for this log record field.
- IPAddress (am) -- optional default value for this log record field.
- className (am) -- optional default value for this log record field.
- timer (am) -- (ElapsedTime) optional default value for this log record field.
- partnerName (ame) -- optional default value for this log record field.
- targetEntity (me) -- optional default value for this log record field.
- targetServiceName (me) -- optional default value for this log record field.
- statusCode (am) -- optional default value for this log record field.
- responseCode (am) -- optional default value for this log record field.
- responseDescription (am) -- optional default value for this log record field.
- processKey (am) -- optional default value for this log record field.
- targetVirtualEntity (m) -- optional default value for this log record field.
- customField1 (am) -- optional default value for this log record field.
- customField2 (am) -- optional default value for this log record field.
- customField3 (am) -- optional default value for this log record field.
- customField4 (am) -- optional default value for this log record field.
- errorCategory (e) -- optional default value for this log record field.
- errorCode (e) -- optional default value for this log record field.
- errorDescription (e) -- optional default value for this log record field.
-
- Note: the pipe '|' character is not allowed in any log record field.
- """
-
- self._log('DEBUG', message, errorCategory = 'DEBUG', **kwargs)
-
- def info(self, message, **kwargs):
- """Write an INFO level message to the log file.
-
- Arguments:
- message -- value for the last log record field.
-
- Keyword arguments: Annotations are d:debug, a=audit, m=metrics, e=error
- style -- the log file format (style) to use when writing log messages
- requestID (dame) -- optional default value for this log record field.
- serviceInstanceID (am) -- optional default value for this log record field.
- threadID (am) -- optional default value for this log record field.
- serverName (am) -- optional default value for this log record field.
- serviceName (am) -- optional default value for this log record field.
- instanceUUID (am) -- optional default value for this log record field.
- severity (am) -- optional default value for this log record field.
- serverIPAddress (am) -- optional default value for this log record field.
- server (am) -- optional default value for this log record field.
- IPAddress (am) -- optional default value for this log record field.
- className (am) -- optional default value for this log record field.
- timer (am) -- (ElapsedTime) optional default value for this log record field.
- partnerName (ame) -- optional default value for this log record field.
- targetEntity (me) -- optional default value for this log record field.
- targetServiceName (me) -- optional default value for this log record field.
- statusCode (am) -- optional default value for this log record field.
- responseCode (am) -- optional default value for this log record field.
- responseDescription (am) -- optional default value for this log record field.
- processKey (am) -- optional default value for this log record field.
- targetVirtualEntity (m) -- optional default value for this log record field.
- customField1 (am) -- optional default value for this log record field.
- customField2 (am) -- optional default value for this log record field.
- customField3 (am) -- optional default value for this log record field.
- customField4 (am) -- optional default value for this log record field.
- errorCategory (e) -- optional default value for this log record field.
- errorCode (e) -- optional default value for this log record field.
- errorDescription (e) -- optional default value for this log record field.
-
- Note: the pipe '|' character is not allowed in any log record field.
- """
-
- self._log('INFO', message, errorCategory = 'INFO', **kwargs)
-
- def warn(self, message, **kwargs):
- """Write a WARN level message to the log file.
-
- Arguments:
- message -- value for the last log record field.
-
- Keyword arguments: Annotations are d:debug, a=audit, m=metrics, e=error
- style -- the log file format (style) to use when writing log messages
- requestID (dame) -- optional default value for this log record field.
- serviceInstanceID (am) -- optional default value for this log record field.
- threadID (am) -- optional default value for this log record field.
- serverName (am) -- optional default value for this log record field.
- serviceName (am) -- optional default value for this log record field.
- instanceUUID (am) -- optional default value for this log record field.
- severity (am) -- optional default value for this log record field.
- serverIPAddress (am) -- optional default value for this log record field.
- server (am) -- optional default value for this log record field.
- IPAddress (am) -- optional default value for this log record field.
- className (am) -- optional default value for this log record field.
- timer (am) -- (ElapsedTime) optional default value for this log record field.
- partnerName (ame) -- optional default value for this log record field.
- targetEntity (me) -- optional default value for this log record field.
- targetServiceName (me) -- optional default value for this log record field.
- statusCode (am) -- optional default value for this log record field.
- responseCode (am) -- optional default value for this log record field.
- responseDescription (am) -- optional default value for this log record field.
- processKey (am) -- optional default value for this log record field.
- targetVirtualEntity (m) -- optional default value for this log record field.
- customField1 (am) -- optional default value for this log record field.
- customField2 (am) -- optional default value for this log record field.
- customField3 (am) -- optional default value for this log record field.
- customField4 (am) -- optional default value for this log record field.
- errorCategory (e) -- optional default value for this log record field.
- errorCode (e) -- optional default value for this log record field.
- errorDescription (e) -- optional default value for this log record field.
-
- Note: the pipe '|' character is not allowed in any log record field.
- """
-
- self._log('WARN', message, errorCategory = 'WARN', **kwargs)
-
- def error(self, message, **kwargs):
- """Write an ERROR level message to the log file.
-
- Arguments:
- message -- value for the last log record field.
-
- Keyword arguments: Annotations are d:debug, a=audit, m=metrics, e=error
- style -- the log file format (style) to use when writing log messages
- requestID (dame) -- optional default value for this log record field.
- serviceInstanceID (am) -- optional default value for this log record field.
- threadID (am) -- optional default value for this log record field.
- serverName (am) -- optional default value for this log record field.
- serviceName (am) -- optional default value for this log record field.
- instanceUUID (am) -- optional default value for this log record field.
- severity (am) -- optional default value for this log record field.
- serverIPAddress (am) -- optional default value for this log record field.
- server (am) -- optional default value for this log record field.
- IPAddress (am) -- optional default value for this log record field.
- className (am) -- optional default value for this log record field.
- timer (am) -- (ElapsedTime) optional default value for this log record field.
- partnerName (ame) -- optional default value for this log record field.
- targetEntity (me) -- optional default value for this log record field.
- targetServiceName (me) -- optional default value for this log record field.
- statusCode (am) -- optional default value for this log record field.
- responseCode (am) -- optional default value for this log record field.
- responseDescription (am) -- optional default value for this log record field.
- processKey (am) -- optional default value for this log record field.
- targetVirtualEntity (m) -- optional default value for this log record field.
- customField1 (am) -- optional default value for this log record field.
- customField2 (am) -- optional default value for this log record field.
- customField3 (am) -- optional default value for this log record field.
- customField4 (am) -- optional default value for this log record field.
- errorCategory (e) -- optional default value for this log record field.
- errorCode (e) -- optional default value for this log record field.
- errorDescription (e) -- optional default value for this log record field.
-
- Note: the pipe '|' character is not allowed in any log record field.
- """
-
- self._log('ERROR', message, errorCategory = 'ERROR', **kwargs)
-
- def fatal(self, message, **kwargs):
- """Write a FATAL level message to the log file.
-
- Arguments:
- message -- value for the last log record field.
-
- Keyword arguments: Annotations are d:debug, a=audit, m=metrics, e=error
- style -- the log file format (style) to use when writing log messages
- requestID (dame) -- optional default value for this log record field.
- serviceInstanceID (am) -- optional default value for this log record field.
- threadID (am) -- optional default value for this log record field.
- serverName (am) -- optional default value for this log record field.
- serviceName (am) -- optional default value for this log record field.
- instanceUUID (am) -- optional default value for this log record field.
- severity (am) -- optional default value for this log record field.
- serverIPAddress (am) -- optional default value for this log record field.
- server (am) -- optional default value for this log record field.
- IPAddress (am) -- optional default value for this log record field.
- className (am) -- optional default value for this log record field.
- timer (am) -- (ElapsedTime) optional default value for this log record field.
- partnerName (ame) -- optional default value for this log record field.
- targetEntity (me) -- optional default value for this log record field.
- targetServiceName (me) -- optional default value for this log record field.
- statusCode (am) -- optional default value for this log record field.
- responseCode (am) -- optional default value for this log record field.
- responseDescription (am) -- optional default value for this log record field.
- processKey (am) -- optional default value for this log record field.
- targetVirtualEntity (m) -- optional default value for this log record field.
- customField1 (am) -- optional default value for this log record field.
- customField2 (am) -- optional default value for this log record field.
- customField3 (am) -- optional default value for this log record field.
- customField4 (am) -- optional default value for this log record field.
- errorCategory (e) -- optional default value for this log record field.
- errorCode (e) -- optional default value for this log record field.
- errorDescription (e) -- optional default value for this log record field.
-
- Note: the pipe '|' character is not allowed in any log record field.
- """
-
- self._log('FATAL', message, errorCategory = 'FATAL', **kwargs)
-
- def _log(self, logLevel, message, **kwargs):
- """Write a message to the log file.
-
- Arguments:
- logLevel -- value ('DEBUG', 'INFO', 'WARN', 'ERROR', 'FATAL', ...) for the log record.
- message -- value for the last log record field.
-
- Keyword arguments: Annotations are d:debug, a=audit, m=metrics, e=error
- style -- the log file format (style) to use when writing log messages
- requestID (dame) -- optional default value for this log record field.
- serviceInstanceID (am) -- optional default value for this log record field.
- threadID (am) -- optional default value for this log record field.
- serverName (am) -- optional default value for this log record field.
- serviceName (am) -- optional default value for this log record field.
- instanceUUID (am) -- optional default value for this log record field.
- severity (am) -- optional default value for this log record field.
- serverIPAddress (am) -- optional default value for this log record field.
- server (am) -- optional default value for this log record field.
- IPAddress (am) -- optional default value for this log record field.
- className (am) -- optional default value for this log record field.
- timer (am) -- (ElapsedTime) optional default value for this log record field.
- partnerName (ame) -- optional default value for this log record field.
- targetEntity (me) -- optional default value for this log record field.
- targetServiceName (me) -- optional default value for this log record field.
- statusCode (am) -- optional default value for this log record field.
- responseCode (am) -- optional default value for this log record field.
- responseDescription (am) -- optional default value for this log record field.
- processKey (am) -- optional default value for this log record field.
- targetVirtualEntity (m) -- optional default value for this log record field.
- customField1 (am) -- optional default value for this log record field.
- customField2 (am) -- optional default value for this log record field.
- customField3 (am) -- optional default value for this log record field.
- customField4 (am) -- optional default value for this log record field.
- errorCategory (e) -- optional default value for this log record field.
- errorCode (e) -- optional default value for this log record field.
- errorDescription (e) -- optional default value for this log record field.
-
- Note: the pipe '|' character is not allowed in any log record field.
- """
-
- # timestamp will automatically be inserted
- style = int(self._getVal('style', '', **kwargs))
- requestID = self._getVal('requestID', '', **kwargs)
- serviceInstanceID = self._getVal('serviceInstanceID', '', **kwargs)
- threadID = self._getVal('threadID', threading.currentThread().getName(), **kwargs)
- serverName = self._getVal('serverName', '', **kwargs)
- serviceName = self._getVal('serviceName', '', **kwargs)
- instanceUUID = self._getVal('instanceUUID', '', **kwargs)
- upperLogLevel = self._noSep(logLevel.upper())
- severity = self._getVal('severity', '', **kwargs)
- serverIPAddress = self._getVal('serverIPAddress', '', **kwargs)
- server = self._getVal('server', '', **kwargs)
- IPAddress = self._getVal('IPAddress', '', **kwargs)
- className = self._getVal('className', '', **kwargs)
- timer = self._getVal('timer', '', **kwargs)
- partnerName = self._getVal('partnerName', '', **kwargs)
- targetEntity = self._getVal('targetEntity', '', **kwargs)
- targetServiceName = self._getVal('targetServiceName', '', **kwargs)
- statusCode = self._getVal('statusCode', '', **kwargs)
- responseCode = self._getVal('responseCode', '', **kwargs)
- responseDescription = self._noSep(self._getVal('responseDescription', '', **kwargs))
- processKey = self._getVal('processKey', '', **kwargs)
- targetVirtualEntity = self._getVal('targetVirtualEntity', '', **kwargs)
- customField1 = self._getVal('customField1', '', **kwargs)
- customField2 = self._getVal('customField2', '', **kwargs)
- customField3 = self._getVal('customField3', '', **kwargs)
- customField4 = self._getVal('customField4', '', **kwargs)
- errorCategory = self._getVal('errorCategory', '', **kwargs)
- errorCode = self._getVal('errorCode', '', **kwargs)
- errorDescription = self._noSep(self._getVal('errorDescription', '', **kwargs))
-
- detailMessage = self._noSep(message)
- if bool(re.match(r" *$", detailMessage)):
- return # don't log empty messages
-
- useLevel = self._intLogLevel(upperLogLevel)
- if CommonLogger.verbose: print("logger STYLE=%s" % style)
- if useLevel < self._logLevelThreshold:
- if CommonLogger.verbose: print("skipping because of level")
- pass
- else:
- with self._logLock:
- if style == CommonLogger.ErrorFile:
- if CommonLogger.verbose: print("using CommonLogger.ErrorFile")
- self._logger.log(50, '%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
- %(requestID, threadID, serviceName, partnerName, targetEntity, targetServiceName,
- errorCategory, errorCode, errorDescription, detailMessage))
- elif style == CommonLogger.DebugFile:
- if CommonLogger.verbose: print("using CommonLogger.DebugFile")
- self._logger.log(50, '%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
- %(requestID, threadID, serverName, serviceName, instanceUUID, upperLogLevel,
- severity, serverIPAddress, server, IPAddress, className, timer, detailMessage))
- elif style == CommonLogger.AuditFile:
- if CommonLogger.verbose: print("using CommonLogger.AuditFile")
- endAuditTime, endAuditMsec = self._getTime()
- if self._begTime is not None:
- d = { 'begtime': self._begTime, 'begmsecs': self._begMsec, 'endtime': endAuditTime, 'endmsecs': endAuditMsec }
- else:
- d = { 'begtime': endAuditTime, 'begmsecs': endAuditMsec, 'endtime': endAuditTime, 'endmsecs': endAuditMsec }
- self._begTime = None
- unused = ""
- self._logger.log(50, '%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
- %(requestID, serviceInstanceID, threadID, serverName, serviceName, partnerName,
- statusCode, responseCode, responseDescription, instanceUUID, upperLogLevel,
- severity, serverIPAddress, timer, server, IPAddress, className, unused,
- processKey, customField1, customField2, customField3, customField4, detailMessage), extra=d)
- elif style == CommonLogger.MetricsFile:
- if CommonLogger.verbose: print("using CommonLogger.MetricsFile")
- endMetricsTime, endMetricsMsec = self._getTime()
- if self._begTime is not None:
- d = { 'begtime': self._begTime, 'begmsecs': self._begMsec, 'endtime': endMetricsTime, 'endmsecs': endMetricsMsec }
- else:
- d = { 'begtime': endMetricsTime, 'begmsecs': endMetricsMsec, 'endtime': endMetricsTime, 'endmsecs': endMetricsMsec }
- self._begTime = None
- unused = ""
- self._logger.log(50, '%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
- %(requestID, serviceInstanceID, threadID, serverName, serviceName, partnerName,
- targetEntity, targetServiceName, statusCode, responseCode, responseDescription,
- instanceUUID, upperLogLevel, severity, serverIPAddress, timer, server, IPAddress,
- className, unused, processKey, targetVirtualEntity, customField1, customField2,
- customField3, customField4, detailMessage), extra=d)
- else:
- print("!!!!!!!!!!!!!!!! style not set: %s" % self._fields["style"])
-
- def _getTime(self):
- ct = time.time()
- lt = time.localtime(ct)
- return (time.strftime(CommonLogger.DateFmt, lt), (ct - int(ct)) * 1000)
-
- def setStartRecordEvent(self):
- """
- Set the start time to be saved for both audit and metrics records
- """
- self._begTime, self._begMsec = self._getTime()
-
- def _getVal(self, key, default, **kwargs):
- val = self._fields.get(key)
- if key in kwargs: val = kwargs[key]
- if val is None: val = default
- return self._noSep(val)
-
- def _noSep(self, message):
- if message is None: return ''
- return re.sub(r'[\|\n]', ' ', str(message))
-
- def _intLogLevel(self, logLevel):
- if logLevel == 'FATAL': useLevel = 50
- elif logLevel == 'ERROR': useLevel = 40
- elif logLevel == 'WARN': useLevel = 30
- elif logLevel == 'INFO': useLevel = 20
- elif logLevel == 'DEBUG': useLevel = 10
- else: useLevel = 0
- return useLevel
-
- def _mkdir_p(self, filename):
- """Create missing directories from a full filename path like mkdir -p"""
-
- if filename is None:
- return
-
- folder=os.path.dirname(filename)
-
- if folder == "":
- return
-
- if not os.path.exists(folder):
- try:
- os.makedirs(folder)
- except OSError as err:
- print("error number %d creating %s directory to hold %s logfile: %s" %(err.errno, err.filename, filename, err.strerror), file=sys.stderr)
- sys.exit(2)
- except Exception as err:
- print("error creating %s directory to hold %s logfile: %s" %(folder, filename, str(err)), file=sys.stderr)
- sys.exit(2)
-
-def __checkTime1(line):
- format = r'[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]T[0-9][0-9]:[0-9][0-9]:[0-9][0-9],[0-9][0-9][0-9][+]00:00[|]'
- format = r'[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2},[0-9]{3}[+]00:00[|]'
- m = re.match(format, line)
- if not m:
- print("ERROR: time string did not match proper time format, %s" %line)
- print("\t: format=%s" % format)
- return 1
- return 0
-
-def __checkTime2(line, different):
- format = '[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]T[0-9][0-9]:[0-9][0-9]:([0-9][0-9]),([0-9][0-9][0-9])[+]00:00[|][0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]T[0-9][0-9]:[0-9][0-9]:([0-9][0-9]),([0-9][0-9][0-9])[+]00:00[|]'
- format = r'[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:([0-9]{2}),([0-9]{3})[+]00:00[|][0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:([0-9]{2}),([0-9]{3})[+]00:00[|]'
- m = re.match(format, line)
- if not m:
- print("ERROR: time strings did not match proper time format, %s" %line)
- print("\t: format=%s" % format)
- return 1
- second1 = int(m.group(1))
- msec1 = int(m.group(2))
- second2 = int(m.group(3))
- msec2 = int(m.group(4))
- if second1 > second2: second2 += 60
- t1 = second1 * 1000 + msec1
- t2 = second2 * 1000 + msec2
- diff = t2 - t1
- # print("t1=%d (%d,%d) t2=%d (%d,%d), diff = %d" % (t1, second1, msec1, t2, second2, msec2, diff))
- if different:
- if diff < 500:
- print("ERROR: times did not differ enough: %s" % line)
- return 1
- else:
- if diff > 10:
- print("ERROR: times were too far apart: %s" % line)
- return 1
- return 0
-
-def __checkLog(logfile, numLines, numFields):
- lineCount = 0
- errorCount = 0
- with open(logfile, "r") as fp:
- for line in fp:
- # print("saw line %s" % line)
- lineCount += 1
- c = line.count('|')
- if c != numFields:
- print("ERROR: wrong number of fields. Expected %d, got %d: %s" % (numFields, c, line))
- errorCount += 1
- if re.search("should not appear", line):
- print("ERROR: a line appeared that should not have appeared, %s" % line)
- errorCount += 1
- elif re.search("single time", line):
- errorCount += __checkTime1(line)
- elif re.search("time should be the same", line):
- errorCount += __checkTime2(line, different=False)
- elif re.search("time should be different", line):
- errorCount += __checkTime2(line, different=True)
- else:
- print("ERROR: an unknown message appeared, %s" % line)
- errorCount += 1
-
- if lineCount != numLines:
- print("ERROR: expected %d lines, but got %d lines" % (numLines, lineCount))
- errorCount += 1
- return errorCount
-
-if __name__ == "__main__":
- import os, argparse
- parser = argparse.ArgumentParser(description="test the CommonLogger functions")
- parser.add_argument("-k", "--keeplogs", help="Keep the log files after finishing the tests", action="store_true")
- parser.add_argument("-v", "--verbose", help="Print debugging messages", action="store_true")
- args = parser.parse_args()
-
- spid = str(os.getpid())
- if args.keeplogs:
- spid = ""
- logcfg = "/tmp/cl.log" + spid + ".cfg"
- errorLog = "/tmp/cl.error" + spid + ".log"
- metricsLog = "/tmp/cl.metrics" + spid + ".log"
- auditLog = "/tmp/cl.audit" + spid + ".log"
- debugLog = "/tmp/cl.debug" + spid + ".log"
- if args.verbose: CommonLogger.verbose = True
-
- import atexit
- def cleanupTmps():
- for f in [ logcfg, errorLog, metricsLog, auditLog, debugLog ]:
- try:
- os.remove(f)
- except:
- pass
- if not args.keeplogs:
- atexit.register(cleanupTmps)
-
- with open(logcfg, "w") as o:
- o.write("error = " + errorLog + "\n" +
- "errorLogLevel = WARN\n" +
- "metrics = " + metricsLog + "\n" +
- "metricsLogLevel = INFO\n" +
- "audit = " + auditLog + "\n" +
- "auditLogLevel = INFO\n" +
- "debug = " + debugLog + "\n" +
- "debugLogLevel = DEBUG\n")
-
- import uuid
- instanceUUID = uuid.uuid1()
- serviceName = "testharness"
- errorLogger = CommonLogger(logcfg, "error", style=CommonLogger.ErrorFile, instanceUUID=instanceUUID, serviceName=serviceName)
- debugLogger = CommonLogger(logcfg, "debug", style=CommonLogger.DebugFile, instanceUUID=instanceUUID, serviceName=serviceName)
- auditLogger = CommonLogger(logcfg, "audit", style=CommonLogger.AuditFile, instanceUUID=instanceUUID, serviceName=serviceName)
- metricsLogger = CommonLogger(logcfg, "metrics", style=CommonLogger.MetricsFile, instanceUUID=instanceUUID, serviceName=serviceName)
-
- testsRun = 0
- errorCount = 0
- errorLogger.debug("error calling debug (should not appear)")
- errorLogger.info("error calling info (should not appear)")
- errorLogger.warn("error calling warn (single time)")
- errorLogger.error("error calling error (single time)")
- errorLogger.setStartRecordEvent()
- time.sleep(1)
- errorLogger.fatal("error calling fatal, after setStartRecordEvent and sleep (start should be ignored, single time)")
- testsRun += 6
- errorCount += __checkLog(errorLog, 3, 10)
-
- auditLogger.debug("audit calling debug (should not appear)")
- auditLogger.info("audit calling info (time should be the same)")
- auditLogger.warn("audit calling warn (time should be the same)")
- auditLogger.error("audit calling error (time should be the same)")
- auditLogger.setStartRecordEvent()
- time.sleep(1)
- auditLogger.fatal("audit calling fatal, after setStartRecordEvent and sleep, time should be different)")
- testsRun += 6
- errorCount += __checkLog(auditLog, 4, 25)
-
- debugLogger.debug("debug calling debug (single time)")
- debugLogger.info("debug calling info (single time)")
- debugLogger.warn("debug calling warn (single time)")
- debugLogger.setStartRecordEvent()
- time.sleep(1)
- debugLogger.error("debug calling error, after SetStartRecordEvent and sleep (start should be ignored, single time)")
- debugLogger.fatal("debug calling fatal (single time)")
- errorCount += __checkLog(debugLog, 5, 13)
- testsRun += 6
-
- metricsLogger.debug("metrics calling debug (should not appear)")
- metricsLogger.info("metrics calling info (time should be the same)")
- metricsLogger.warn("metrics calling warn (time should be the same)")
- metricsLogger.setStartRecordEvent()
- time.sleep(1)
- metricsLogger.error("metrics calling error, after SetStartRecordEvent and sleep, time should be different")
- metricsLogger.fatal("metrics calling fatal (time should be the same)")
- testsRun += 6
- errorCount += __checkLog(metricsLog, 4, 28)
-
- print("%d tests run, %d errors found" % (testsRun, errorCount))
diff --git a/osdf/logging/onap_common_v1/CommonLogger_test.config b/osdf/logging/onap_common_v1/CommonLogger_test.config
deleted file mode 100755
index 584fb5e..0000000
--- a/osdf/logging/onap_common_v1/CommonLogger_test.config
+++ /dev/null
@@ -1,58 +0,0 @@
-# -------------------------------------------------------------------------
-# Copyright (c) 2015-2017 AT&T Intellectual Property
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# -------------------------------------------------------------------------
-#
-
-# You may change this file while your program is running and CommonLogger will automatically reconfigure accordingly.
-# Changing these parameters may leave old log files lying around.
-
-
-#--- Parameters that apply to all logs
-#
-# rotateMethod: time, size, stdout, stderr, none
-#... Note: the following two parameters apply only when rotateMethod=time
-# timeRotateIntervalType: S, M, H, D, W0 - W6, or midnight (seconds, minutes, hours, days, weekday (0=Monday), or midnight UTC)
-# timeRotateInterval: >= 1 (1 means every timeRotateIntervalType, 2 every other, 3 every third, etc.)
-#... Note: the following parameter applies only when rotateMethod=size
-# sizeMaxBytes: >= 0 (0 means no limit, else maximum filesize in Bytes)
-# backupCount: >= 0 (Number of rotated backup files to retain. If rotateMethod=time, 0 retains *all* backups. If rotateMethod=size, 0 retains *no* backups.)
-#
-rotateMethod = time
-timeRotateIntervalType = midnight
-timeRotateInterval = 1
-sizeMaxBytes = 0
-backupCount = 6
-
-
-#--- Parameters that define log filenames and their initial LogLevel threshold
-#... Note: CommonLogger will exit if your process does not have permission to write to the file.
-#
-
-error = /opt/logs/oof/error.log
-errorLogLevel = WARN
-errorStyle = error
-
-metrics = /opt/logs/oof/metrics.log
-metricsLogLevel = INFO
-metricsStyle = metrics
-
-audit = /opt/logs/oof/audit.log
-auditLogLevel = INFO
-auditStyle = audit
-
-debug = /opt/logs/oof/debug.log
-debugLogLevel = DEBUG
-debugStyle = debug
diff --git a/osdf/logging/onap_common_v1/CommonLogger_testing.py b/osdf/logging/onap_common_v1/CommonLogger_testing.py
deleted file mode 100755
index 43e0ec3..0000000
--- a/osdf/logging/onap_common_v1/CommonLogger_testing.py
+++ /dev/null
@@ -1,143 +0,0 @@
-#!/usr/bin/python
-
-# -------------------------------------------------------------------------
-# Copyright (c) 2015-2017 AT&T Intellectual Property
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# -------------------------------------------------------------------------
-#
-"""
-Test the ONAP Common Logging library in Python.
-CommonLogger_test.py
-"""
-
-
-from __future__ import print_function # for the example code below parsing command line options
-import os, sys, getopt # for the example code below parsing command line options
-
-from osdf.logging.onap_common_v1.CommonLogger import CommonLogger # all that is needed to import the CommonLogger library
-
-import uuid # to create UUIDs for our log records
-import time # to create elapsed time for our log records
-
-
-#----- A client might want to allow specifying the configFile as a command line option
-usage="usage: %s [ -c <configFile> ]" % ( os.path.basename(__file__) )
-try:
- opts, args = getopt.getopt(sys.argv[1:], "c:")
-except getopt.GetoptError:
- print(usage, file=sys.stderr)
- sys.exit(2)
-
-configFile = "CommonLogger_test.config"
-for opt, arg in opts:
- if opt == "-c":
- configFile = arg
- else:
- print(usage, file=sys.stderr)
- sys.exit(2)
-
-
-#----- Instantiate the loggers
-
-# The client's top-level program (e.g., vPRO.py) can create a unique identifier UUID to differentiate between multiple instances of itself.
-instanceUUID = uuid.uuid1()
-
-# The client should identify its ONAP component -- and if applicable -- its ONAP sub-component
-serviceName = "DCAE/vPRO"
-
-# Instantiate using a configuration file with a key specifying the log file name and set fields' default values
-errorLog = CommonLogger.CommonLogger(configFile, "error", instanceUUID=instanceUUID, serviceName=serviceName)
-metricsLog = CommonLogger.CommonLogger(configFile, "metrics", instanceUUID=instanceUUID, serviceName=serviceName)
-auditLog = CommonLogger.CommonLogger(configFile, "audit", instanceUUID=instanceUUID, serviceName=serviceName)
-debugLog = CommonLogger.CommonLogger(configFile, "debug", instanceUUID=instanceUUID, serviceName=serviceName)
-
-
-#----- use the loggers
-
-# both metrics and audit logs can have an event starting time. This only affects the next log message.
-metricsLog.setStartRecordEvent()
-auditLog.setStartRecordEvent()
-
-# Simple log messages
-debugLog.debug("a DEBUG message for the debug log")
-metricsLog.info("an INFO message for the metrics log")
-auditLog.info("an INFO message for the audit log")
-errorLog.warn("a WARN message for the error log")
-errorLog.error("an ERROR message for the error log")
-errorLog.fatal("a FATAL message for the error log")
-
-
-# Can override any of the other fields when writing each log record
-debugLog.debug("demonstrating overriding all fields with atypical values", requestID="2", serviceInstanceID="3", threadID="4", serverName="5", serviceName="6", instanceUUID="7", severity="9", serverIPAddress="10", server="11", IPAddress="12", className="13", timer="14")
-
-
-# The is an example of an interaction between two ONAP components:
-
-# vPRO generates Closed Loop RESTful API requests to App-C, knowing this information:
-requestClient = "netman@localdcae.att.com:~/vPRO_trinity/vPRO.py:905" # uniquely identifies the requester
-requestTime = "2015-08-20 20:57:14.463426" # unique ID of the request within the requester's scope
-request = "Restart"
-
-# Form the value for Common Logging's requestID field:
-requestID = requestClient + "+" + requestTime # vPRO will use this as the unique requestID
-# requestID = uuid.uuid1() # other services might generate a UUID as their requestID
-
-# Form the value for Common Logging's serviceName field when an interaction between two ONAP components:
-ourONAP = serviceName
-peerONAP = "App-C"
-operation = request
-interaction = ourONAP + ":" + peerONAP + "." + operation
-
-# Let's calculate and report elapsed times
-start = time.time()
-
-# Log the request
-auditLog.info("Requesting %s to %s" %(peerONAP, operation), requestID=requestID, serviceName=interaction)
-
-# Wait for first response
-time.sleep(1) # simulate processing the action, e.g., waiting for response from App-C
-
-# Form the value for Common Logging's serviceName field when an interaction between two ONAP components:
-operation = 'PENDING'
-interaction = peerONAP + ":" + ourONAP + "." + operation
-
-# Log the response with elapsed time
-ms = int(round(1000 * (time.time() - start))) # Calculate elapsed time in ms
-auditLog.info("%s acknowledged receiving request for %s" %(peerONAP, operation), requestID=requestID, serviceName=interaction, timer=ms)
-
-# Wait for next response
-time.sleep(1) # simulate processing the action, e.g., waiting for response from App-C
-
-# Form the value for Common Logging's serviceName field when an interaction between two ONAP components:
-operation = 'SUCCESS'
-interaction = peerONAP + ":" + ourONAP + "." + operation
-
-# Log the response with elapsed time
-ms = int(round(1000 * (time.time() - start))) # Calculate elapsed time in ms
-auditLog.info("%s finished %s" %(peerONAP, operation), requestID=requestID, serviceName=interaction, timer=ms)
-
-
-# Can change the fields' default values for a logger after instantiation if desired
-debugLog.setFields(serviceName="DCAE", threadID='thread-2')
-
-# Then subsequent logging will have the new default field values
-debugLog.info("Something happened")
-debugLog.warn("Something happened again")
-
-
-# Unset (set=None) a field so the Common Logger will use the default value
-debugLog.info("threadID should be default", threadID=None)
-debugLog.setFields(threadID=None)
-debugLog.info("threadID should be default")
diff --git a/osdf/logging/onap_common_v1/README.md b/osdf/logging/onap_common_v1/README.md
deleted file mode 100755
index 596cd7f..0000000
--- a/osdf/logging/onap_common_v1/README.md
+++ /dev/null
@@ -1,214 +0,0 @@
-# -------------------------------------------------------------------------
-# Copyright (c) 2015-2017 AT&T Intellectual Property
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# -------------------------------------------------------------------------
-#
-
-# Common Logging Wrapper for Python
-
-* CommonLogger.py is the module (library) to import
-* CommonLogger_test.config is an example configuration file used by CommonLogger_test.py
-* CommonLogger_test.py is an example of how to import and use the CommonLogger module
-
-## Configuration File
-
-Configure common logging for a python application in a configuration file.
-In the file, put key = value assignments
-
-* defining the filename for each log file you will create, such as
-'error=/path/error.log', 'metrics=/path/metrics.log', 'audit=/path/audit.log',
-and 'debug=/path/debug.log'.
-The name used (shown here as 'error', 'metrics', etc.) is chosen in the program, allowing a single configuration file to be
-used by numerous different programs.
-(It will be referred to below as &lt;logKey&gt;.)
-* defining the style of the log messages to be produced,
-using &lt;logKey&gt; suffixed with 'Style', as in 'errorStyle=', and one of the
-words 'error', 'metrics', 'audit' and 'debug'.
-* defining the minimum level of log messages to be retained in a log file,
-using &lt;logKey&gt; suffixed with 'LogLevel', as in 'errorLogLevel=WARN'.
-The levels are DEBUG, INFO, WARN, ERROR, and FATAL.
-So specifying WARN will retain only WARN, ERROR, and FATAL level
-log messages, while specifying DEBUG will retain all levels of log messages:
-DEBUG, INFO, WARN, ERROR, and FATAL.
-
-Comments may be included on any line following a '#' character.
-
-Common logging monitors the configuration file so if the file is edited
-and any its values change, then common logging will implement the changes
-in the running application.
-This enables operations to change log levels or even log filenames without
-interrupting the running application.
-
-By default, log files are rotated daily at midnight UTC, retaining 6 backup versions by default.
-
-Other strategies can be specified within the configuration file using the keywords:
-
-* rotateMethod = one of 'time', 'size', and 'none' (case insensitive)
-
-If rotateMethod is 'time', the following keywords apply:
-* backupCount = Number of rotated backup files to retain, >= 0. 0 retains *all* backups.
-* timeRotateIntervalType = one of 'S', 'M', 'H', 'D', 'W0', 'W1', 'W2', 'W3', 'W4', 'W5', 'W6', 'midnight'
-(seconds, minutes, hours, days, weekday (0=Monday), or midnight UTC)
-* timeRotateInterval = number of seconds/minutes/hours/days between rotations. (Ignored for W#.)
-
-If rotateMethod is 'size', the following keywords apply:
-* backupCount = Number of rotated backup files to retain, >= 0. 0 retains *no* backups.
-* sizeMaxBytes = maximum number of bytes allowed in the file before rotation
-* sizeRotateMode = for now, this defaults to 'a' and may only be specified as 'a'.
-It is passed to the underlying Python Logging methods.
-
-
-Besides logging to a file, it is also possible to send log messages elsewhere,
-using &lt;logKey&gt; suffixed with 'LogType'.
-You can set &lt;logKey&gt;LogType to any of 'filelogger', 'stdoutlogger', 'stderrlogger', 'socketlogger' orlogger 'null' (case insensitive).
-
-* 'filelogger' is the default specifying logging to a file.
-* 'stdoutlogger' and 'stderrlogger' send the output to the corresponding output streams.
-* 'socketlogger' will send the output to the corresponding socket host.
-* 'nulllogger' turns off logging.
-
-If &lt;logKey&gt;LogType is 'socket', the following keywords apply:
-* &lt;logKey&gt;SocketHost = FQDN or IP address for a host to sent the logs to
-* &lt;logKey&gt;SocketPort = the port (> 0) to open on that host
-
-This is an example configuration file:
-
- error = /var/log/DCAE/vPRO/error.log
- errorLogLevel = WARN
- errorStyle = error
-
- metrics = /var/log/DCAE/vPRO/metrics.log
- metricsLogLevel = INFO
- metricsStyle = metrics
-
- audit = /var/log/DCAE/vPRO/audit.log
- auditLogLevel = INFO
- auditStyle = audit
-
- debug = /var/log/DCAE/vPRO/debug.log
- debugLogLevel = DEBUG
- debugStyle = debug
-
-## Coding Python Applications to Produce ONAP Common Logging
-
-A python application uses common logging by importing the CommonLogger
-module, instantiating a CommonLogger object for each log file, and then
-invoking each object's debug, info, warn, error, or fatal methods to log
-messages to the file. There are four styles of logging:
-error/info logs, debug logs, audit logs, and metrics logs.
-The difference between the types of logs is in the list of fields that
-are printed out.
-
-### Importing the CommonLogger Module
-
-Importing the CommonLogger module is typical:
-
- sys.path.append("/opt/app/dcae-commonlogging/python")
- import CommonLogger
-
-### Creating a CommonLogger object:
-
-When creating a CommonLogger object, three arguments are required:
-
-1. The configuration filename.
-2. The keyword name in the configuration file that
-defines the log filename and parameters controlling rotation of the logfiles.
-(This is the &lt;logKey&gt; referred to above.)
-3. The keyword arguments for style and to set default values for the log record fields.
-
-The style of the log (one of CommonLoger.DebugFile, CommonLogger.AuditFile,
-CommonLogger.MetricsFile and CommonLogger.ErrorFile), must be specified either
-in the configuration file (e.g., errorStyle=error or metricsStyle=metrics) or
-using a style= keyword and one of the values: CommonLoger.DebugFile,
-CommonLogger.AuditFile, CommonLogger.MetricsFile and CommonLogger.ErrorFile.
-
-Keyword arguments for log record fields are as follows.
-The annotation indicates whether the field is included in
-(d) debug logs, (a) audit logs, (m) metrics logs, and (e) error logs.
-
-* requestID (dame)
-* serviceInstanceID (am)
-* threadID (am)
-* serverName (am)
-* serviceName (am)
-* instanceUUID (am)
-* severity (am)
-* serverIPAddress (am)
-* server (am)
-* IPAddress (am)
-* className (am)
-* timer (am)
-* partnerName (ame)
-* targetEntity (me)
-* targetServiceName (me)
-* statusCode (am)
-* responseCode (am)
-* responseDescription (am)
-* processKey (am)
-* targetVirtualEntity (m)
-* customField1 (am)
-* customField2 (am)
-* customField3 (am)
-* customField4 (am)
-* errorCategory (e)
-* errorCode (e)
-* errorDescription (e)
-
-Sample code:
-
- """ The style can be specified here or in the config file using errorStyle. """
- errorLog = CommonLogger.CommonLogger("my.config", "error", style=CommonLogger.ErrorFile, serviceName="DCAE/vPRO")
- infoLog = CommonLogger.CommonLogger("my.config", "info", serviceName="DCAE/vPRO")
-
-### Setting default values for fields:
-
-The object's setFields method allows keyword arguments changing default values for the log record fields.
-
- errorLog.setFields(serviceName="DCAE/vPRO", threadID="thread-2")
-
-### Calling Methods
-
-The object's debug(), info(), warn(), error(), and fatal() methods require a detailMessage argument
-(which can be a zero-length string) and allow the keyword arguments for setting log record field
-values for just that one message.
-Any newlines or '|' characters in the message will be changed to a single space.
-
- infoLog.info("Something benign happened.")
- errorLog.fatal("Something very bad happened.", threadID="thread-4")
-
-### Output
-
-Note that no field may contain the '|' (pipe) field separation character, as that
-character is used as the separator between fields.
-Here is a possible example of a produced log record:
-
- 2015-10-12T15:56:43,182+00:00|netman@localdcae.att.com:~/vPRO_trinity/vPRO.py:905+2015-08-20 20:57:14.463426||||DCAE/vPRO:App-C.Restart|d4d5fc66-70f9-11e5-b0b1-005056866a82|INFO||135.16.76.33|mtvpro01dev1.dev.att.com|||1001|Finished Restart
- 2016-12-09T23:06:02,314+00:00||MainThread|DCAE/vPRO|||||||a FATAL message for the error log
-
-### Example Code
-
-The main within CommonLogger.py contains a regression test of the CommonLogger methods.
-
-CommonLogger_test.py contains a complete demonstration of a python application
-using the python CommonLogging wrapper module, including creating UUIDs,
-setting default log field values, and timing operations.
-
-## Upgrading from Previous Versions of CommonLogger
-
-The current version of CommonLogger is 99% compatible with earlier versions of CommonLogger.
-The key change, due to update ONAP logging requirements, is the choice to use different lists
-of fields in different types of log files.
-This required adding a mandatory "style" to be given, which we chose to do using either a
-new keyword in the configuration file, or using a new parameter keyword when creating the logger.
diff --git a/osdf/logging/onap_common_v1/__init__.py b/osdf/logging/onap_common_v1/__init__.py
deleted file mode 100755
index e69de29..0000000
--- a/osdf/logging/onap_common_v1/__init__.py
+++ /dev/null
diff --git a/osdf/logging/onap_common_v1/makefile b/osdf/logging/onap_common_v1/makefile
deleted file mode 100755
index 498127e..0000000
--- a/osdf/logging/onap_common_v1/makefile
+++ /dev/null
@@ -1,40 +0,0 @@
-# -------------------------------------------------------------------------
-# Copyright (c) 2015-2017 AT&T Intellectual Property
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# -------------------------------------------------------------------------
-#
-
-test:
- rm -f /tmp/cl.*.log
- python CommonLogger.py
- rm -f /tmp/cl.*.log
- python3 CommonLogger.py -k -v
- # python CommonLogger_test.py
- # python3 CommonLogger_test.py
-
-# STAGEDIR is overridden in ../makefile
-STAGEDIR=/tmp
-
-build: CommonLogger.html
- mkdir -p $(STAGEDIR)/python
- cp -p *.py *.config *.md CommonLogger.html $(STAGEDIR)/python
- chmod a+x $(STAGEDIR)/python/*.py
-
-CommonLogger.html: CommonLogger.py
- pydoc -w ./CommonLogger.py
-
-clean:
- rm -rf __pycache__ *.pyc CommonLogger.html
- rm -rf *~
diff --git a/osdf/logging/osdf_logging.py b/osdf/logging/osdf_logging.py
index a54d426..e457f18 100755
--- a/osdf/logging/osdf_logging.py
+++ b/osdf/logging/osdf_logging.py
@@ -16,33 +16,18 @@
# -------------------------------------------------------------------------
#
+import logging
+import os
import traceback
-import uuid
+from logging import config
+
+import yaml
+from onaplogging import monkey
-from .onap_common_v1.CommonLogger import CommonLogger
from osdf.utils.programming_utils import MetaSingleton
-
-def log_handlers_pre_onap(config_file="config/onap_logging_common_v1.config",
- service_name="OOF_OSDF"):
- """
- Convenience handlers for logging to different log files
-
- :param config_file: configuration file (properties file) that specifies log location, rotation, etc.
- :param service_name: name for this service
- :return: dictionary of log objects: "error", "metrics", "audit", "debug"
-
- We can use the returned values as follows:
- X["error"].fatal("a FATAL message for the error log")
- X["error"].error("an ERROR message for the error log")
- X["error"].warn("a WARN message for the error log")
- X["audit"].info("an INFO message for the audit log")
- X["metrics"].info("an INFO message for the metrics log")
- X["debug"].debug("a DEBUG message for the debug log")
- """
- main_params = dict(instanceUUID=uuid.uuid1(), serviceName=service_name, configFile=config_file)
- return dict((x, CommonLogger(logKey=x, **main_params))
- for x in ["error", "metrics", "audit", "debug"])
+BASE_DIR = os.path.dirname(__file__)
+LOGGING_FILE = os.path.join(BASE_DIR, '..', '..', 'config', 'log.yml')
def format_exception(err, prefix=None):
@@ -56,6 +41,15 @@ def format_exception(err, prefix=None):
return exception_desc if not prefix else prefix + ": " + exception_desc
+def create_log_dirs():
+ with open(LOGGING_FILE, 'r') as fid:
+ yaml_config = yaml.full_load(fid)
+ for key in yaml_config['handlers']:
+ a = yaml_config['handlers'][key]
+ if a.get('filename'):
+ os.makedirs(os.path.dirname(a['filename']), exist_ok=True)
+
+
class OOF_OSDFLogMessageHelper(metaclass=MetaSingleton):
"""Provides loggers as a singleton (otherwise, we end up with duplicate messages).
Provides error_log, metric_log, audit_log, and debug_log (in that order)
@@ -64,17 +58,7 @@ class OOF_OSDFLogMessageHelper(metaclass=MetaSingleton):
log_handlers = None
default_levels = ["error", "metrics", "audit", "debug"]
- def _setup_handlers(self, log_version="pre_onap", config_file=None, service_name=None):
- """return error_log, metrics_log, audit_log, debug_log"""
- if self.log_handlers is None:
- params = {}
- params.update({"config_file": config_file} if config_file else {})
- params.update({"service_name": service_name} if service_name else {})
-
- if log_version == "pre_onap":
- self.log_handlers = log_handlers_pre_onap(**params)
-
- def get_handlers(self, levels=None, log_version="pre_onap", config_file=None, service_name=None):
+ def get_handlers(self, levels=None):
"""Return ONAP-compliant log handlers for different levels. Each "level" ends up in a different log file
with a prefix of that level.
@@ -87,9 +71,11 @@ class OOF_OSDFLogMessageHelper(metaclass=MetaSingleton):
if levels is None: we return handlers for self.default_levels
if levels is ["error", "audit"], we return log handlers for that.
"""
- self._setup_handlers(log_version="pre_onap", config_file=config_file, service_name=service_name)
+ create_log_dirs()
+ monkey.patch_all()
+ config.yamlConfig(filepath=LOGGING_FILE, watchDog=False)
wanted_levels = self.default_levels if levels is None else levels
- return [self.log_handlers.get(x) for x in wanted_levels]
+ return [logging.getLogger(x) for x in wanted_levels]
class OOF_OSDFLogMessageFormatter(object):
@@ -228,6 +214,7 @@ class OOF_OSDFLogMessageFormatter(object):
MH = OOF_OSDFLogMessageFormatter
+
error_log, metrics_log, audit_log, debug_log = OOF_OSDFLogMessageHelper().get_handlers()
diff --git a/osdf/optimizers/licenseopt/simple_license_allocation.py b/osdf/optimizers/licenseopt/simple_license_allocation.py
index 74d220f..b2aaba4 100644
--- a/osdf/optimizers/licenseopt/simple_license_allocation.py
+++ b/osdf/optimizers/licenseopt/simple_license_allocation.py
@@ -15,6 +15,7 @@
#
# -------------------------------------------------------------------------
#
+from osdf.utils.mdc_utils import mdc_from_json
def license_optim(request_json):
@@ -24,6 +25,7 @@ def license_optim(request_json):
:param request_json: Request in a JSON format
:return: A tuple of licensekey-group-uuid-list and entitlement-group-uuid-list
"""
+ mdc_from_json(request_json)
req_id = request_json["requestInfo"]["requestId"]
model_name = request_json.get('placementInfo', {}).get('serviceInfo', {}).get('modelInfo', {}).get('modelName')
diff --git a/osdf/optimizers/pciopt/pci_opt_processor.py b/osdf/optimizers/pciopt/pci_opt_processor.py
index 03c742b..47c4288 100644
--- a/osdf/optimizers/pciopt/pci_opt_processor.py
+++ b/osdf/optimizers/pciopt/pci_opt_processor.py
@@ -26,6 +26,7 @@ from osdf.utils.interfaces import get_rest_client
from .configdb import request as config_request
from .solver.optimizer import pci_optimize as optimize
from .solver.pci_utils import get_cell_id, get_pci_value
+from osdf.utils.mdc_utils import mdc_from_json
"""
This application generates PCI Optimization API calls using the information received from PCI-Handler-MS, SDN-C
@@ -42,6 +43,7 @@ def process_pci_optimation(request_json, osdf_config, flat_policies):
:return: response from PCI Opt
"""
try:
+ mdc_from_json(request_json)
rc = get_rest_client(request_json, service="pcih")
req_id = request_json["requestInfo"]["requestId"]
cell_info_list, network_cell_info = config_request(request_json, osdf_config, flat_policies)
@@ -55,9 +57,11 @@ def process_pci_optimation(request_json, osdf_config, flat_policies):
body = build_json_error_body(err)
metrics_log.info(MH.sending_response(req_id, "ERROR"))
rc.request(json=body, noresponse=True)
- except RequestException:
+ except RequestException as err:
+ MDC.put('requestID',req_id)
error_log.error("Error sending asynchronous notification for {} {}".format(req_id, traceback.format_exc()))
- return
+ raise err
+
try:
metrics_log.info(MH.calling_back_with_body(req_id, rc.url, pci_response))
diff --git a/osdf/optimizers/placementopt/conductor/remote_opt_processor.py b/osdf/optimizers/placementopt/conductor/remote_opt_processor.py
index 614eca3..22a0307 100644
--- a/osdf/optimizers/placementopt/conductor/remote_opt_processor.py
+++ b/osdf/optimizers/placementopt/conductor/remote_opt_processor.py
@@ -24,6 +24,7 @@ from osdf.logging.osdf_logging import metrics_log, MH, error_log
from osdf.optimizers.placementopt.conductor import conductor
from osdf.optimizers.licenseopt.simple_license_allocation import license_optim
from osdf.utils.interfaces import get_rest_client
+from osdf.utils.mdc_utils import mdc_from_json
def process_placement_opt(request_json, policies, osdf_config):
@@ -37,6 +38,7 @@ def process_placement_opt(request_json, policies, osdf_config):
"""
try:
+ mdc_from_json(request_json)
rc = get_rest_client(request_json, service="so")
req_id = request_json["requestInfo"]["requestId"]
transaction_id = request_json['requestInfo']['transactionId']
diff --git a/osdf/optimizers/routeopt/simple_route_opt.py b/osdf/optimizers/routeopt/simple_route_opt.py
index 060e1ed..b00180d 100644
--- a/osdf/optimizers/routeopt/simple_route_opt.py
+++ b/osdf/optimizers/routeopt/simple_route_opt.py
@@ -19,6 +19,8 @@
import requests
from requests.auth import HTTPBasicAuth
+from osdf.utils.mdc_utils import mdc_from_json
+
class RouteOpt:
@@ -54,6 +56,7 @@ class RouteOpt:
:param logical_link:
:return:
"""
+ mdc_from_json(request)
src_access_node_id = request["srcPort"]["src-access-node-id"]
dst_access_node_id = request["dstPort"]["dst-access-node-id"]
diff --git a/osdf/utils/mdc_utils.py b/osdf/utils/mdc_utils.py
new file mode 100644
index 0000000..6da67bd
--- /dev/null
+++ b/osdf/utils/mdc_utils.py
@@ -0,0 +1,55 @@
+# -------------------------------------------------------------------------
+# Copyright (c) 2019 AT&T Intellectual Property
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
+import socket
+import threading
+import uuid
+
+from onaplogging.mdcContext import MDC
+
+
+def default_server_info():
+ # If not set or purposely set = None, then set default
+ if MDC.get('server') is None:
+ try:
+ server = socket.getfqdn()
+ except Exception as err:
+ try:
+ server = socket.gethostname()
+ except Exception as err:
+ server = ''
+ MDC.put('server', server)
+ if MDC.get('serverIPAddress') is None:
+ try:
+ server_ip_address = socket.gethostbyname(self._fields['server'])
+ except Exception:
+ server_ip_address = ""
+ MDC.put('serverIPAddress', server_ip_address)
+
+
+def mdc_from_json(request_json):
+ MDC.put('instanceUUID', uuid.uuid1())
+ MDC.put('serviceName', 'OOF_OSDF')
+ MDC.put('threadID', threading.currentThread().getName())
+ default_server_info()
+ MDC.put('requestID', request_json['requestInfo']['requestId'])
+ MDC.put('partnerName', request_json['requestInfo']['sourceId'])
+
+
+def clear_mdc():
+ MDC.clear()
diff --git a/osdfapp.py b/osdfapp.py
index b8afbf4..1cfb8a3 100755
--- a/osdfapp.py
+++ b/osdfapp.py
@@ -23,15 +23,19 @@ OSDF Manager Main Flask Application
import json
import ssl
import sys
+import time
import traceback
from optparse import OptionParser
from threading import Thread # for scaling up, may need celery with RabbitMQ or redis
import pydevd
+import yaml
from flask import Flask, request, Response, g
from requests import RequestException
from schematics.exceptions import DataError
+yaml.warnings({'YAMLLoadWarning': False})
+
import osdf.adapters.aaf.sms as sms
import osdf.operation.responses
from osdf.adapters.policy.interface import get_policies
@@ -47,6 +51,7 @@ from osdf.optimizers.pciopt.pci_opt_processor import process_pci_optimation
from osdf.optimizers.placementopt.conductor.remote_opt_processor import process_placement_opt
from osdf.optimizers.routeopt.simple_route_opt import RouteOpt
from osdf.utils import api_data_utils
+from osdf.utils.mdc_utils import clear_mdc, mdc_from_json
from osdf.webapp.appcontroller import auth_basic
ERROR_TEMPLATE = osdf.ERROR_TEMPLATE
@@ -96,6 +101,20 @@ def handle_data_error(e):
return response
+@app.before_request
+def log_request():
+ g.request_start = time.clock()
+ request_json = request.get_json()
+ g.request_id = request_json['requestInfo']['requestId']
+ mdc_from_json(request_json)
+
+
+@app.after_request
+def log_response(response):
+ clear_mdc()
+ return response
+
+
@app.route("/api/oof/v1/healthcheck", methods=["GET"])
def do_osdf_health_check():
"""Simple health check"""
diff --git a/pom.xml b/pom.xml
index bfeded2..4b513ea 100644
--- a/pom.xml
+++ b/pom.xml
@@ -21,7 +21,7 @@
<parent>
<groupId>org.onap.oparent</groupId>
<artifactId>oparent-python</artifactId>
- <version>2.0.0</version>
+ <version>2.1.0</version>
</parent>
<groupId>org.onap.optf.osdf</groupId>
diff --git a/requirements.txt b/requirements.txt
index 241392b..0ea452e 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -13,3 +13,4 @@ docopt>=0.6.2
pydevd==1.4.0
onapsmsclient>=0.0.4
pymzn==0.17.1
+onappylog>=1.0.9
diff --git a/test/logging/test_osdf_logging.py b/test/logging/test_osdf_logging.py
index 982ef0b..50689dd 100755
--- a/test/logging/test_osdf_logging.py
+++ b/test/logging/test_osdf_logging.py
@@ -45,10 +45,6 @@ class TestOSDFLogging(unittest.TestCase):
self.json_body = mock.MagicMock()
self.F = formatter
- def test_log_handlers_pre_onap(self):
- res = L1.log_handlers_pre_onap()
- assert type(res) == dict
-
def test_format_exception(self):
res = L1.format_exception(Exception("Some error"))
@@ -168,14 +164,6 @@ class TestOSDFLogging(unittest.TestCase):
"""Log the message to error_log.warn and audit_log.warn"""
L1.warn_audit_error("Some warning message")
- def test_log_message_multi(msg):
- X = L1.log_handlers_pre_onap()
- wanted_methods = [
- X["error"].error, X["error"].warn, X["audit"].info,
- X["metrics"].info, X["debug"].debug, X["error"].fatal
- ]
- L1.log_message_multi("Some log message", *wanted_methods)
-
if __name__ == "__main__":
unittest.main()
diff --git a/tox.ini b/tox.ini
index 0272bda..4723f46 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,7 +1,7 @@
[tox]
skipsdist=True
-envlist = py3
+envlist = py3, pylint
[testenv]
distribute = False
@@ -21,5 +21,9 @@ deps = -r{toxinidir}/requirements.txt
[run]
source=./osdf/,osdfapp.py
+[testenv:pylint]
+whitelist_externals=bash
+commands = bash -c "pylint --reports=y osdf | tee pylint.out"
+
[testenv:py3]
basepython=python3.6