aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSatoshi Fujii <fujii-satoshi@jp.fujitsu.com>2021-10-12 05:21:57 +0000
committerSatoshi Fujii <fujii-satoshi@jp.fujitsu.com>2021-10-15 02:51:43 +0000
commit8e86bb7817a272fa8d1c6ecc16435e1324326ac0 (patch)
treeb0ececd4dacf7f4b2841f8fe1dd545452f66cc02
parentb681828aacf0f0bf516d539a70be74d556206062 (diff)
Remove unused code
config_notif.py is not referenced from main code and not used at all. Also tests/HB_Array.json is not used by any test. Signed-off-by: Satoshi Fujii <fujii-satoshi@jp.fujitsu.com> Issue-ID: DCAEGEN2-2939 Change-Id: I6d462a406e2d36d0243f4a72b7faac2f79e353d9
-rw-r--r--.gitignore1
-rw-r--r--Changelog.md5
-rw-r--r--miss_htbt_service/config_notif.py226
-rw-r--r--miss_htbt_service/mod/trapd_vnf_table.py16
-rw-r--r--pom.xml2
-rw-r--r--setup.py2
-rw-r--r--tests/HB_Array.json82
-rw-r--r--tests/test_config_notif.py436
-rw-r--r--tests/test_trapd_vnf_table.py6
-rw-r--r--version.properties4
10 files changed, 11 insertions, 769 deletions
diff --git a/.gitignore b/.gitignore
index 464c7a2..2f126be 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,4 @@ hb_logs.txt
xunit-results.xml
miss_htbt_service.egg-info
target/
+.idea/
diff --git a/Changelog.md b/Changelog.md
index b40916e..54a6970 100644
--- a/Changelog.md
+++ b/Changelog.md
@@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
+## [2.4.0] - 2021/10/12
+### Changed
+- [DCAEGEN2-2939] Removed unused code (config\_notif.py)
+
+
## [2.3.1.] - 2021/06/19
### Security
- Fixed SQL injection vulnerability
diff --git a/miss_htbt_service/config_notif.py b/miss_htbt_service/config_notif.py
deleted file mode 100644
index 053784d..0000000
--- a/miss_htbt_service/config_notif.py
+++ /dev/null
@@ -1,226 +0,0 @@
-#!/usr/bin/env python3
-# ============LICENSE_START=======================================================
-# Copyright 2018-2020 AT&T Intellectual Property, Inc. All rights reserved.
-# Copyright (c) 2019 Pantheon.tech. All rights reserved.
-# Copyright 2020 Deutsche Telekom. All rights reserved.
-# Copyright 2021 Samsung Electronics. All rights reserved.
-# Copyright 2021 Fujitsu Ltd.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# Author Prakash Hosangady (ph553f)
-# Read the hb_common table
-# Update the state to RECONFIGURATION and save the hb_common table
-
-import os
-import os.path as path
-import socket
-import yaml
-import json
-import time
-import psycopg2
-
-# use the fully qualified name here to let monkeypatching work
-# from .mod.trapd_get_cbs_config import get_cbs_config
-import mod.trapd_get_cbs_config
-import mod.trapd_settings as tds
-
-hb_properties_file = path.abspath(path.join(__file__, "../config/hbproperties.yaml"))
-
-
-def postgres_db_open(username, password, host, port, database_name):
- envPytest = os.getenv('pytest', "")
- if envPytest == 'test':
- return True
- try:
- connection = psycopg2.connect(database=database_name, user=username, password=password, host=host, port=port)
- except Exception as e:
- print("HB_Notif::postgress connect error: %s" % e)
- connection = True
- return connection
-
-
-def db_table_creation_check(connection_db, table_name):
- envPytest = os.getenv('pytest', "")
- if envPytest == 'test':
- return True
- cur = None
- try:
- cur = connection_db.cursor()
- cur.execute("SELECT * FROM information_schema.tables WHERE table_name = %s", (table_name,))
- database_names = cur.fetchone()
- if (database_names is not None) and (table_name in database_names):
- print(f"FOUND the table {table_name}")
- print("HB_Notif::Postgres already has table - %s" % table_name)
- return True
- else:
- print(f"did NOT find the table {table_name}")
- print("HB_Notif::Postgres does not have table - %s" % table_name)
- return False
- except psycopg2.DatabaseError as e:
- print('COMMON:Error %s' % e)
- finally:
- if cur:
- cur.close()
-
-
-def commit_and_close_db(connection_db):
- envPytest = os.getenv('pytest', "")
- if envPytest == 'test':
- return True
- try:
- connection_db.commit() # <--- makes sure the change is shown in the database
- connection_db.close()
- return True
- except psycopg2.DatabaseError as e:
- return False
-
-
-def read_hb_properties_default():
- # Read the hbproperties.yaml for postgress and CBS related data
- s = open(hb_properties_file, 'r')
- a = yaml.full_load(s)
- if ((os.getenv('pg_ipAddress') is None) or (os.getenv('pg_portNum') is None) or (
- os.getenv('pg_userName') is None) or (os.getenv('pg_passwd') is None)):
- ip_address = a['pg_ipAddress']
- port_num = a['pg_portNum']
- user_name = a['pg_userName']
- password = a['pg_passwd']
- else:
- ip_address = os.getenv('pg_ipAddress')
- port_num = os.getenv('pg_portNum')
- user_name = os.getenv('pg_userName')
- password = os.getenv('pg_passwd')
-
- dbName = a['pg_dbName']
- db_name = dbName.lower()
- cbs_polling_required = a['CBS_polling_allowed']
- cbs_polling_interval = a['CBS_polling_interval']
- s.close()
- # TODO: there is a mismatch here between read_hb_properties_default and read_hb_properties.
- # read_hb_properties() forces all of the variables returned here to be strings, while the code here does not.
- return ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval
-
-
-def read_hb_properties(jsfile):
- try:
- with open(jsfile, 'r') as outfile:
- cfg = json.load(outfile)
- except Exception as err:
- print("Json file read error - %s" % err)
- return read_hb_properties_default()
- try:
- ip_address = str(cfg['pg_ipAddress'])
- port_num = str(cfg['pg_portNum'])
- user_name = str(cfg['pg_userName'])
- password = str(cfg['pg_passwd'])
- dbName = str(cfg['pg_dbName'])
- db_name = dbName.lower()
- cbs_polling_required = str(cfg['CBS_polling_allowed'])
- cbs_polling_interval = str(cfg['CBS_polling_interval'])
- if "SERVICE_NAME" in cfg:
- os.environ['SERVICE_NAME'] = str(cfg['SERVICE_NAME'])
- except Exception as err:
- print("Json file read parameter error - %s" % err)
- return read_hb_properties_default()
- return ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval
-
-
-def read_hb_common(user_name, password, ip_address, port_num, db_name):
- envPytest = os.getenv('pytest', "")
- if envPytest == 'test':
- hbc_pid = 10
- hbc_srcName = "srvc_name"
- hbc_time = 1541234567
- hbc_state = "RUNNING"
- return hbc_pid, hbc_state, hbc_srcName, hbc_time
- connection_db = postgres_db_open(user_name, password, ip_address, port_num, db_name)
- cur = connection_db.cursor()
- cur.execute("SELECT process_id, source_name, last_accessed_time, current_state FROM hb_common")
- rows = cur.fetchall()
- # TODO: What if rows returned None or empty?
- print("HB_Notif::hb_common contents - %s" % rows)
- hbc_pid = rows[0][0]
- hbc_srcName = rows[0][1]
- hbc_time = rows[0][2]
- hbc_state = rows[0][3]
- commit_and_close_db(connection_db)
- cur.close()
- return hbc_pid, hbc_state, hbc_srcName, hbc_time
-
-
-def update_hb_common(update_flg, process_id, state, user_name, password, ip_address, port_num, db_name):
- current_time = int(round(time.time()))
- source_name = socket.gethostname()
- source_name = source_name + "-" + str(os.getenv('SERVICE_NAME', ""))
- envPytest = os.getenv('pytest', "")
- if envPytest == 'test':
- return True
- connection_db = postgres_db_open(user_name, password, ip_address, port_num, db_name)
- cur = connection_db.cursor()
- cur.execute("UPDATE hb_common SET LAST_ACCESSED_TIME = %s, CURRENT_STATE = %s WHERE "
- "PROCESS_ID = %s AND SOURCE_NAME = %s", (current_time, state, process_id, source_name))
- commit_and_close_db(connection_db)
- cur.close()
- return True
-
-
-def fetch_json_file(download_json="../etc/download1.json", config_json="../etc/config.json"):
- # use the fully qualified name here to let monkeypatching work
- # if get_cbs_config():
- if mod.trapd_get_cbs_config.get_cbs_config():
- current_runtime_config_file_name = download_json
- envPytest = os.getenv('pytest', "")
- if envPytest == 'test':
- jsfile = "../etc/config.json"
- return jsfile
- print("Config_N:current config logged to : %s" % current_runtime_config_file_name)
- with open(current_runtime_config_file_name, 'w') as outfile:
- json.dump(tds.c_config, outfile)
- jsfile = current_runtime_config_file_name
- else:
- print("MSHBD:CBS Config not available, using local config")
- jsfile = config_json
- print("Config_N: The json file is - %s" % jsfile)
- return jsfile
-
-
-def config_notif_run():
- jsfile = fetch_json_file()
- ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval = read_hb_properties(
- jsfile)
- envPytest = os.getenv('pytest', "")
- if envPytest == 'test':
- return True
- connection_db = postgres_db_open(user_name, password, ip_address, port_num, db_name)
- cur = connection_db.cursor()
- if db_table_creation_check(connection_db, "hb_common") is False:
- print("HB_Notif::ERROR::hb_common table not exists - No config download")
- connection_db.close()
- else:
- hbc_pid, hbc_state, hbc_srcName, hbc_time = read_hb_common(user_name, password, ip_address, port_num, db_name)
- state = "RECONFIGURATION"
- update_flg = 1
- ret = update_hb_common(update_flg, hbc_pid, state, user_name, password, ip_address, port_num, db_name)
- # TODO: There is no way for update_hb_common() to return false
- if ret:
- print("HB_Notif::hb_common table updated with RECONFIGURATION state")
- commit_and_close_db(connection_db)
- return True
- else:
- print("HB_Notif::Failure updating hb_common table")
- commit_and_close_db(connection_db)
- return False
-
- cur.close()
diff --git a/miss_htbt_service/mod/trapd_vnf_table.py b/miss_htbt_service/mod/trapd_vnf_table.py
index 81f1f3b..ffef262 100644
--- a/miss_htbt_service/mod/trapd_vnf_table.py
+++ b/miss_htbt_service/mod/trapd_vnf_table.py
@@ -36,7 +36,6 @@ import get_logger
import db_monitoring as dbmon
import htbtworker as pm
import misshtbtd as db
-import config_notif as cf
import cbs_polling as cbs
prog_name = os.path.basename(__file__)
@@ -89,21 +88,6 @@ def verify_DB_creation_hb_common(user_name,password,ip_address,port_num,db_name)
return _db_status
-def verify_cbsPolling_required():
- _cbspolling_status = True
- os.environ['pytest']='test'
- os.environ['CONSUL_HOST']='localhost'
- os.environ['SERVICE_NAME']='mvp-dcaegen2-heartbeat-static'
- try:
- _cbspolling_status=cf.config_notif_run()
- except Exception as e:
- print("Config_notify error - %s" % e)
-
- os.unsetenv('pytest')
- os.unsetenv('CONSUL_HOST')
- os.unsetenv('SERVICE_NAME')
- return _cbspolling_status
-
def verify_cbspolling():
os.environ['pytest']='test'
os.environ['SERVICE_NAME']='mvp-dcaegen2-heartbeat-static'
diff --git a/pom.xml b/pom.xml
index 12cb29c..aeb1ab6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -37,7 +37,7 @@ limitations under the License.
<groupId>org.onap.dcaegen2.services</groupId>
<artifactId>heartbeat</artifactId>
<name>dcaegen2-services-heartbeat</name>
- <version>2.3.1</version>
+ <version>2.4.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<sonar.sources>.</sonar.sources>
diff --git a/setup.py b/setup.py
index a5d581a..22af9ab 100644
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,7 @@ from setuptools import setup, find_packages
setup(
name='miss_htbt_service',
description='Missing heartbeat microservice to communicate with policy-engine',
- version='2.3.1',
+ version='2.4.0',
#packages=find_packages(exclude=["tests.*", "tests"]),
packages=find_packages(),
install_requires=[
diff --git a/tests/HB_Array.json b/tests/HB_Array.json
deleted file mode 100644
index 1b44802..0000000
--- a/tests/HB_Array.json
+++ /dev/null
@@ -1,82 +0,0 @@
-[{ "event": { "commonEventHeader": { "vesEventListenerVersion": "7.0.2", "domain": "heartbeat", "eventId": "mvfs10", "eventName": "Heartbeat_vDNS1", "lastEpochMicrosec": 1548653647392, "priority": "Normal", "reportingEntityName": "ibcx0001vm002oam001", "sequence": 1000, "sourceName": "SOURCE_NAME2", "startEpochMicrosec": 1548653647392, "version": "4.0.2", "reportingEntityId": "cc305d54-75b4-431b-adb2-eb6b9e541234", "sourceId": "VNFA_SRC1", "eventType": "platform", "nfcNamingCode": "VNFA", "nfNamingCode": "VNFA", "timeZoneOffset": "UTC-05:30" }, "heartbeatFields": { "heartbeatInterval": 20, "heartbeatFieldsVersion": "3.0" } } },
-{
- "event": {
- "commonEventHeader": {
- "vesEventListenerVersion": "7.0.2",
- "domain": "heartbeat",
- "eventId": "mvfs10",
- "eventName": "Heartbeat_vFW1",
- "lastEpochMicrosec": 1548653647392,
- "priority": "Normal",
- "reportingEntityName": "ibcx0001vm002oam001",
- "sequence": 1000,
- "sourceName": "SOURCE_NAME3",
- "startEpochMicrosec": 1548653647392,
- "version": "4.0.2",
- "reportingEntityId": "cc305d54-75b4-431b-adb2-eb6b9e541234",
- "sourceId": "VNFB_SRC5",
- "eventType": "platform",
- "nfcNamingCode": "VNFB",
- "nfNamingCode": "VNFB",
- "timeZoneOffset": "UTC-05:30"
- },
- "heartbeatFields": {
- "heartbeatInterval": 20,
- "heartbeatFieldsVersion": "3.0"
- }
- }
-},
-{
- "event": {
- "commonEventHeader": {
- "vesEventListenerVersion": "7.0.2",
- "domain": "heartbeat",
- "eventId": "mvfs10",
- "eventName": "Heartbeat_vFW1",
- "lastEpochMicrosec": 1548653647392,
- "priority": "Normal",
- "reportingEntityName": "ibcx0001vm002oam001",
- "sequence": 1000,
- "sourceName": "SOURCE_NAME4",
- "startEpochMicrosec": 1548653647392,
- "version": "4.0.2",
- "reportingEntityId": "cc305d54-75b4-431b-adb2-eb6b9e541234",
- "sourceId": "VNFA_SRC3",
- "eventType": "platform",
- "nfcNamingCode": "VNFA",
- "nfNamingCode": "VNFA",
- "timeZoneOffset": "UTC-05:30"
- },
- "heartbeatFields": {
- "heartbeatInterval": 20,
- "heartbeatFieldsVersion": "3.0"
- }
- }
-},
-{
- "event": {
- "commonEventHeader": {
- "vesEventListenerVersion": "7.0.2",
- "domain": "heartbeat",
- "eventId": "mvfs10",
- "eventName": "Heartbeat_xx",
- "lastEpochMicrosec": 1548653647392,
- "priority": "Normal",
- "reportingEntityName": "ibcx0001vm002oam001",
- "sequence": 1000,
- "sourceName": "SOURCE_NAME5",
- "startEpochMicrosec": 1548653647392,
- "version": "4.0.2",
- "reportingEntityId": "cc305d54-75b4-431b-adb2-eb6b9e541234",
- "sourceId": "VNFA_SRC3",
- "eventType": "platform",
- "nfcNamingCode": "VNFA",
- "nfNamingCode": "VNFA",
- "timeZoneOffset": "UTC-05:30"
- },
- "heartbeatFields": {
- "heartbeatInterval": 20,
- "heartbeatFieldsVersion": "3.0"
- }
- }
-}
diff --git a/tests/test_config_notif.py b/tests/test_config_notif.py
deleted file mode 100644
index 01ea737..0000000
--- a/tests/test_config_notif.py
+++ /dev/null
@@ -1,436 +0,0 @@
-# ============LICENSE_START=======================================================
-# Copyright (c) 2020 AT&T Intellectual Property. All rights reserved.
-# Copyright 2020 Deutsche Telekom. All rights reserved.
-# Copyright 2021 Fujitsu Ltd.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-
-import config_notif
-import mod.trapd_get_cbs_config
-import mod.trapd_settings
-
-from . import monkey_psycopg2
-import psycopg2
-import tempfile, json, os
-
-def assert_default_values(ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval):
- """
- used in the test_read_hb_properties*() tests
- """
- assert(str(port_num) == "5432")
- assert(str(user_name) == "postgres")
- assert(str(db_name) == "postgres")
- assert(str(password) == "postgres")
-
-def test_read_hb_properties_default():
- """
- run read_hb_properties_default()
- """
- ( ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval ) = config_notif.read_hb_properties_default()
- assert_default_values(ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval)
-
-def test_read_hb_properties_success():
- """
- run read_hb_properties() to read properties from a file
- """
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- "CBS_polling_allowed": True,
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
- ( ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval ) = config_notif.read_hb_properties(tmp.name)
- assert(str(ip_address) == str(testdata["pg_ipAddress"]))
- assert(str(port_num) == str(testdata["pg_portNum"]))
- assert(str(user_name) == str(testdata["pg_userName"]))
- assert(str(password) == str(testdata["pg_passwd"]))
- assert(str(db_name) == str(testdata["pg_dbName"]))
- assert(str(cbs_polling_required) == str(testdata["CBS_polling_allowed"]))
- assert(str(cbs_polling_interval) == str(testdata["CBS_polling_interval"]))
- assert(str(os.environ['SERVICE_NAME']) == str(testdata["SERVICE_NAME"]))
-
-def test_read_hb_properties_fail_bad_json():
- """
- failure case for read_hb_properties: bad json in the file
- """
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- print("bad json", file=tmp)
- tmp.flush()
- ( ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval ) = config_notif.read_hb_properties(tmp.name)
- assert_default_values(ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval)
-
-def test_read_hb_properties_fail_missing_parameter():
- """
- failure case for read_hb_properties: CBS_polling_allowed is missing
- """
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- # "CBS_polling_allowed": True, # missing CBS_polling_allowed
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
- ( ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval ) = config_notif.read_hb_properties(tmp.name)
- assert_default_values(ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval)
-
-def test_postgres_db_open(monkeypatch):
- """
- test postgres_db_open()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- dbconn = config_notif.postgres_db_open("test", "testpswd", "testsite", 5432, "dbname")
- assert(type(dbconn) is monkey_psycopg2.MockConn)
-
-def test_postgres_db_open_fail(monkeypatch):
- """
- failure ase for postgres_db_open()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces(connect=True)
- dbconn = config_notif.postgres_db_open("test", "badpassword", "testsite", 5432, "dbname")
- assert(type(dbconn) is not monkey_psycopg2.MockConn)
-
-def test_db_table_creation_check(monkeypatch):
- """
- test db_table_creation_check()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- dbconn = config_notif.postgres_db_open("test", "testpswd", "testsite", 5432, "dbname")
- dbconn.monkey_setDbInfo({ "select * from information_schema.tables": [ [ "testtable" ] ] })
- assert(type(dbconn) is monkey_psycopg2.MockConn)
- ret = config_notif.db_table_creation_check(dbconn, "testtable")
- assert(ret == True)
- ret2 = config_notif.db_table_creation_check(dbconn, "missingtable")
- monkey_psycopg2.monkey_reset_forces(cursor=True)
- ret3 = config_notif.db_table_creation_check(dbconn, "testtable")
- assert(ret3 is None)
-
-def test_commit_and_close_db(monkeypatch):
- """
- test commit_and_close_db()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- dbconn = config_notif.postgres_db_open("test", "testpswd", "testsite", 5432, "dbname")
- assert(type(dbconn) is monkey_psycopg2.MockConn)
- print("commit_and_close_db(): no forced failures")
- ret = config_notif.commit_and_close_db(dbconn)
- assert(ret == True)
-
-def test_commit_and_close_db_fail1(monkeypatch):
- """
- failure case for commit_and_close_db(): dbconn.close() fails
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- dbconn = config_notif.postgres_db_open("test", "testpswd", "testsite", 5432, "dbname")
- assert(type(dbconn) is monkey_psycopg2.MockConn)
- print("commit_and_close_db() - close failure")
- monkey_psycopg2.monkey_reset_forces(close=True)
- ret = config_notif.commit_and_close_db(dbconn)
- assert(ret == False)
-
-def test_commit_and_close_db_fail2(monkeypatch):
- """
- failure case for commit_and_close_db(): dbconn.commit() fails
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- dbconn = config_notif.postgres_db_open("test", "testpswd", "testsite", 5432, "dbname")
- assert(type(dbconn) is monkey_psycopg2.MockConn)
- print("commit_and_close_db() - commit failure")
- monkey_psycopg2.monkey_reset_forces(commit=True)
- ret = config_notif.commit_and_close_db(dbconn)
- assert(ret == False)
-
-def test_read_hb_properties_default(monkeypatch):
- """
- test read_hb_properties_default()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- monkey_psycopg2.monkey_set_defaults({
- "testdb1": {
- "hb_common": [
- [ 1, "sn1", 31, "st1" ],
- [ 2, "sn2", 32, "st2" ]
- ]
- }
- })
-
- output = config_notif.read_hb_common("test", "testpswd", "testsite", 5432, "testdb1")
- assert(output[0] == 1)
- assert(output[1] == "st1")
- assert(output[2] == "sn1")
- assert(output[3] == 31)
-
-def test_update_hb_common(monkeypatch):
- """
- test update_hb_common()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- output = config_notif.update_hb_common(None, 1234, "st1234", "test", "testpswd", "testsite", 5432, "testdb1")
- assert(output == True)
-
-def monkeypatch_get_cbs_config_False():
- """
- monkeypatch for get_cbs_config() to force it to return False
- Required side affect: c_config is set to a json value
- """
- print("monkeypatch_get_cbs_config_False()")
- mod.trapd_settings.c_config = { "patch": "false" }
- return False
-
-def monkeypatch_get_cbs_config_True():
- """
- monkeypatch for get_cbs_config() to force it to return False
- Required side affect: c_config is set to a json value
- """
- print("monkeypatch_get_cbs_config_True()")
- mod.trapd_settings.c_config = { "patch": "true" }
- return True
-
-def test_fetch_json_file_get_cbs_config_is_true(monkeypatch):
- """
- test fetch_json_file() with get_cbs_config() returning True
- """
- monkeypatch.setattr(mod.trapd_get_cbs_config, 'get_cbs_config', monkeypatch_get_cbs_config_True)
- tmp1 = tempfile.NamedTemporaryFile(mode="w+")
- tmp2 = tempfile.NamedTemporaryFile(mode="w+")
- output = config_notif.fetch_json_file(download_json = tmp1.name, config_json = tmp2.name)
- assert(output == tmp1.name)
- with open(tmp1.name, "r") as fp:
- j1 = json.load(fp)
- print(f"j1={j1}")
- assert("patch" in j1 and j1["patch"] == "true")
-
-def test_fetch_json_file_get_cbs_config_is_false(monkeypatch):
- """
- test fetch_json_file() with get_cbs_config() returning False
- """
- monkeypatch.setattr(mod.trapd_get_cbs_config, 'get_cbs_config', monkeypatch_get_cbs_config_False)
- tmp1 = tempfile.NamedTemporaryFile(mode="w+")
- tmp2 = tempfile.NamedTemporaryFile(mode="w+")
- output = config_notif.fetch_json_file(download_json = tmp1.name, config_json = tmp2.name)
- assert(output == tmp2.name)
-
-FETCH_JSON_FILE = None
-
-def monkeypatch_fetch_json_file():
- """
- Monkeypatch for fetch_json_file() to test config_notif_run()
- """
- print("monkeypatch_fetch_json_file()")
- return FETCH_JSON_FILE
-
-def monkeypatch_return_False(*args, **kwargs):
- """
- Monkeypatch that can be used to force a function to return False
- """
- print("monkeypatch_return_False()")
- return False
-
-
-def test_config_notif_run_good(monkeypatch):
- """
- test config_notif_run()
- everything good: "dbname" found (from below JSON info), "hb_common" listed in tables
- and hb_common has data.
- """
- monkeypatch.setattr(config_notif, 'fetch_json_file', monkeypatch_fetch_json_file)
-
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- global FETCH_JSON_FILE
- FETCH_JSON_FILE = tmp.name
-
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
-
- monkey_psycopg2.monkey_set_defaults({
- "dbname": {
- "from information_schema.tables": [
- [ "hb_common" ]
- ],
- "from hb_common": [
- [ 1, "sn1", 31, "st1" ],
- [ 2, "sn2", 32, "st2" ]
- ]
- }
- })
-
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- "CBS_polling_allowed": True,
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
-
- output = config_notif.config_notif_run()
- print(f"output={output}")
- assert(output == True)
-
-def test_config_notif_run_fail1(monkeypatch):
- """
- test config_notif_run()
- Failure case 1: "dbname" NOT found (from below JSON info), "hb_common" listed in tables
- and hb_common has data.
- """
- monkeypatch.setattr(config_notif, 'fetch_json_file', monkeypatch_fetch_json_file)
-
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- global FETCH_JSON_FILE
- FETCH_JSON_FILE = tmp.name
-
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
-
- monkey_psycopg2.monkey_set_defaults({
- "dbnameNOTHERE": {
- "from information_schema.tables": [
- [ "hb_common" ]
- ],
- "from hb_common": [
- [ 1, "sn1", 31, "st1" ],
- [ 2, "sn2", 32, "st2" ]
- ]
- }
- })
-
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- "CBS_polling_allowed": True,
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
-
- output = config_notif.config_notif_run()
- print(f"output={output}")
- assert(output is None)
-
-def test_config_notif_run_fail2(monkeypatch):
- """
- test config_notif_run()
- Failure case 2: "dbname" found (from below JSON info), "hb_common" NOT listed in tables
- and hb_common has data.
- """
- monkeypatch.setattr(config_notif, 'fetch_json_file', monkeypatch_fetch_json_file)
-
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- global FETCH_JSON_FILE
- FETCH_JSON_FILE = tmp.name
-
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
-
- monkey_psycopg2.monkey_set_defaults({
- "dbname": {
- "from information_schema.tables": [
- [ "hb_commonNOTHERE" ]
- ],
- "from hb_common": [
- [ 1, "sn1", 31, "st1" ],
- [ 2, "sn2", 32, "st2" ]
- ]
- }
- })
-
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- "CBS_polling_allowed": True,
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
-
- output = config_notif.config_notif_run()
- print(f"output={output}")
- assert(output is None)
-
-def test_config_notif_run_fail3(monkeypatch):
- """
- test config_notif_run()
- Failure case 3: "dbname" found (from below JSON info), "hb_common" listed in tables
- and update_hb_common() fails
- """
- monkeypatch.setattr(config_notif, 'fetch_json_file', monkeypatch_fetch_json_file)
- monkeypatch.setattr(config_notif, 'update_hb_common', monkeypatch_return_False)
-
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- global FETCH_JSON_FILE
- FETCH_JSON_FILE = tmp.name
-
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
-
- monkey_psycopg2.monkey_set_defaults({
- "dbname": {
- "from information_schema.tables": [
- [ "hb_common" ]
- ],
- "from hb_common": [
- [ 1, "sn1", 31, "st1" ],
- [ 2, "sn2", 32, "st2" ]
- ]
- }
- })
-
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- "CBS_polling_allowed": True,
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
-
- output = config_notif.config_notif_run()
- print(f"output={output}")
- assert(output == False)
diff --git a/tests/test_trapd_vnf_table.py b/tests/test_trapd_vnf_table.py
index f89aa6a..577a320 100644
--- a/tests/test_trapd_vnf_table.py
+++ b/tests/test_trapd_vnf_table.py
@@ -28,7 +28,7 @@ import unittest
import get_logger
from mod.trapd_vnf_table import (
verify_DB_creation_1, verify_DB_creation_2, verify_DB_creation_hb_common,
- verify_cbsPolling_required, hb_properties, verify_cbspolling,
+ hb_properties, verify_cbspolling,
verify_sendControlLoop_VNF_ONSET, verify_sendControlLoop_VM_ONSET,
verify_sendControlLoop_VNF_ABATED, verify_sendControlLoop_VM_ABATED,
verify_fetch_json_file, verify_misshtbtdmain, verify_dbmonitoring,
@@ -56,10 +56,6 @@ class test_vnf_tables(unittest.TestCase):
self.assertEqual(result, True)
- def test_validate_cbspolling_required(self):
- result = verify_cbsPolling_required()
- self.assertEqual(result, True)
-
def test_cbspolling(self):
result= verify_cbspolling()
_logger.info(result)
diff --git a/version.properties b/version.properties
index f1c5779..c0f75b6 100644
--- a/version.properties
+++ b/version.properties
@@ -1,6 +1,6 @@
major=2
-minor=3
-patch=1
+minor=4
+patch=0
base_version=${major}.${minor}.${patch}
release_version=${base_version}
snapshot_version=${base_version}-SNAPSHOT