aboutsummaryrefslogtreecommitdiffstats
path: root/tests
diff options
context:
space:
mode:
authorSatoshi Fujii <fujii-satoshi@jp.fujitsu.com>2021-10-12 05:21:57 +0000
committerSatoshi Fujii <fujii-satoshi@jp.fujitsu.com>2021-10-15 02:51:43 +0000
commit8e86bb7817a272fa8d1c6ecc16435e1324326ac0 (patch)
treeb0ececd4dacf7f4b2841f8fe1dd545452f66cc02 /tests
parentb681828aacf0f0bf516d539a70be74d556206062 (diff)
Remove unused code
config_notif.py is not referenced from main code and not used at all. Also tests/HB_Array.json is not used by any test. Signed-off-by: Satoshi Fujii <fujii-satoshi@jp.fujitsu.com> Issue-ID: DCAEGEN2-2939 Change-Id: I6d462a406e2d36d0243f4a72b7faac2f79e353d9
Diffstat (limited to 'tests')
-rw-r--r--tests/HB_Array.json82
-rw-r--r--tests/test_config_notif.py436
-rw-r--r--tests/test_trapd_vnf_table.py6
3 files changed, 1 insertions, 523 deletions
diff --git a/tests/HB_Array.json b/tests/HB_Array.json
deleted file mode 100644
index 1b44802..0000000
--- a/tests/HB_Array.json
+++ /dev/null
@@ -1,82 +0,0 @@
-[{ "event": { "commonEventHeader": { "vesEventListenerVersion": "7.0.2", "domain": "heartbeat", "eventId": "mvfs10", "eventName": "Heartbeat_vDNS1", "lastEpochMicrosec": 1548653647392, "priority": "Normal", "reportingEntityName": "ibcx0001vm002oam001", "sequence": 1000, "sourceName": "SOURCE_NAME2", "startEpochMicrosec": 1548653647392, "version": "4.0.2", "reportingEntityId": "cc305d54-75b4-431b-adb2-eb6b9e541234", "sourceId": "VNFA_SRC1", "eventType": "platform", "nfcNamingCode": "VNFA", "nfNamingCode": "VNFA", "timeZoneOffset": "UTC-05:30" }, "heartbeatFields": { "heartbeatInterval": 20, "heartbeatFieldsVersion": "3.0" } } },
-{
- "event": {
- "commonEventHeader": {
- "vesEventListenerVersion": "7.0.2",
- "domain": "heartbeat",
- "eventId": "mvfs10",
- "eventName": "Heartbeat_vFW1",
- "lastEpochMicrosec": 1548653647392,
- "priority": "Normal",
- "reportingEntityName": "ibcx0001vm002oam001",
- "sequence": 1000,
- "sourceName": "SOURCE_NAME3",
- "startEpochMicrosec": 1548653647392,
- "version": "4.0.2",
- "reportingEntityId": "cc305d54-75b4-431b-adb2-eb6b9e541234",
- "sourceId": "VNFB_SRC5",
- "eventType": "platform",
- "nfcNamingCode": "VNFB",
- "nfNamingCode": "VNFB",
- "timeZoneOffset": "UTC-05:30"
- },
- "heartbeatFields": {
- "heartbeatInterval": 20,
- "heartbeatFieldsVersion": "3.0"
- }
- }
-},
-{
- "event": {
- "commonEventHeader": {
- "vesEventListenerVersion": "7.0.2",
- "domain": "heartbeat",
- "eventId": "mvfs10",
- "eventName": "Heartbeat_vFW1",
- "lastEpochMicrosec": 1548653647392,
- "priority": "Normal",
- "reportingEntityName": "ibcx0001vm002oam001",
- "sequence": 1000,
- "sourceName": "SOURCE_NAME4",
- "startEpochMicrosec": 1548653647392,
- "version": "4.0.2",
- "reportingEntityId": "cc305d54-75b4-431b-adb2-eb6b9e541234",
- "sourceId": "VNFA_SRC3",
- "eventType": "platform",
- "nfcNamingCode": "VNFA",
- "nfNamingCode": "VNFA",
- "timeZoneOffset": "UTC-05:30"
- },
- "heartbeatFields": {
- "heartbeatInterval": 20,
- "heartbeatFieldsVersion": "3.0"
- }
- }
-},
-{
- "event": {
- "commonEventHeader": {
- "vesEventListenerVersion": "7.0.2",
- "domain": "heartbeat",
- "eventId": "mvfs10",
- "eventName": "Heartbeat_xx",
- "lastEpochMicrosec": 1548653647392,
- "priority": "Normal",
- "reportingEntityName": "ibcx0001vm002oam001",
- "sequence": 1000,
- "sourceName": "SOURCE_NAME5",
- "startEpochMicrosec": 1548653647392,
- "version": "4.0.2",
- "reportingEntityId": "cc305d54-75b4-431b-adb2-eb6b9e541234",
- "sourceId": "VNFA_SRC3",
- "eventType": "platform",
- "nfcNamingCode": "VNFA",
- "nfNamingCode": "VNFA",
- "timeZoneOffset": "UTC-05:30"
- },
- "heartbeatFields": {
- "heartbeatInterval": 20,
- "heartbeatFieldsVersion": "3.0"
- }
- }
-}
diff --git a/tests/test_config_notif.py b/tests/test_config_notif.py
deleted file mode 100644
index 01ea737..0000000
--- a/tests/test_config_notif.py
+++ /dev/null
@@ -1,436 +0,0 @@
-# ============LICENSE_START=======================================================
-# Copyright (c) 2020 AT&T Intellectual Property. All rights reserved.
-# Copyright 2020 Deutsche Telekom. All rights reserved.
-# Copyright 2021 Fujitsu Ltd.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-
-import config_notif
-import mod.trapd_get_cbs_config
-import mod.trapd_settings
-
-from . import monkey_psycopg2
-import psycopg2
-import tempfile, json, os
-
-def assert_default_values(ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval):
- """
- used in the test_read_hb_properties*() tests
- """
- assert(str(port_num) == "5432")
- assert(str(user_name) == "postgres")
- assert(str(db_name) == "postgres")
- assert(str(password) == "postgres")
-
-def test_read_hb_properties_default():
- """
- run read_hb_properties_default()
- """
- ( ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval ) = config_notif.read_hb_properties_default()
- assert_default_values(ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval)
-
-def test_read_hb_properties_success():
- """
- run read_hb_properties() to read properties from a file
- """
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- "CBS_polling_allowed": True,
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
- ( ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval ) = config_notif.read_hb_properties(tmp.name)
- assert(str(ip_address) == str(testdata["pg_ipAddress"]))
- assert(str(port_num) == str(testdata["pg_portNum"]))
- assert(str(user_name) == str(testdata["pg_userName"]))
- assert(str(password) == str(testdata["pg_passwd"]))
- assert(str(db_name) == str(testdata["pg_dbName"]))
- assert(str(cbs_polling_required) == str(testdata["CBS_polling_allowed"]))
- assert(str(cbs_polling_interval) == str(testdata["CBS_polling_interval"]))
- assert(str(os.environ['SERVICE_NAME']) == str(testdata["SERVICE_NAME"]))
-
-def test_read_hb_properties_fail_bad_json():
- """
- failure case for read_hb_properties: bad json in the file
- """
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- print("bad json", file=tmp)
- tmp.flush()
- ( ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval ) = config_notif.read_hb_properties(tmp.name)
- assert_default_values(ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval)
-
-def test_read_hb_properties_fail_missing_parameter():
- """
- failure case for read_hb_properties: CBS_polling_allowed is missing
- """
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- # "CBS_polling_allowed": True, # missing CBS_polling_allowed
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
- ( ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval ) = config_notif.read_hb_properties(tmp.name)
- assert_default_values(ip_address, port_num, user_name, password, db_name, cbs_polling_required, cbs_polling_interval)
-
-def test_postgres_db_open(monkeypatch):
- """
- test postgres_db_open()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- dbconn = config_notif.postgres_db_open("test", "testpswd", "testsite", 5432, "dbname")
- assert(type(dbconn) is monkey_psycopg2.MockConn)
-
-def test_postgres_db_open_fail(monkeypatch):
- """
- failure ase for postgres_db_open()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces(connect=True)
- dbconn = config_notif.postgres_db_open("test", "badpassword", "testsite", 5432, "dbname")
- assert(type(dbconn) is not monkey_psycopg2.MockConn)
-
-def test_db_table_creation_check(monkeypatch):
- """
- test db_table_creation_check()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- dbconn = config_notif.postgres_db_open("test", "testpswd", "testsite", 5432, "dbname")
- dbconn.monkey_setDbInfo({ "select * from information_schema.tables": [ [ "testtable" ] ] })
- assert(type(dbconn) is monkey_psycopg2.MockConn)
- ret = config_notif.db_table_creation_check(dbconn, "testtable")
- assert(ret == True)
- ret2 = config_notif.db_table_creation_check(dbconn, "missingtable")
- monkey_psycopg2.monkey_reset_forces(cursor=True)
- ret3 = config_notif.db_table_creation_check(dbconn, "testtable")
- assert(ret3 is None)
-
-def test_commit_and_close_db(monkeypatch):
- """
- test commit_and_close_db()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- dbconn = config_notif.postgres_db_open("test", "testpswd", "testsite", 5432, "dbname")
- assert(type(dbconn) is monkey_psycopg2.MockConn)
- print("commit_and_close_db(): no forced failures")
- ret = config_notif.commit_and_close_db(dbconn)
- assert(ret == True)
-
-def test_commit_and_close_db_fail1(monkeypatch):
- """
- failure case for commit_and_close_db(): dbconn.close() fails
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- dbconn = config_notif.postgres_db_open("test", "testpswd", "testsite", 5432, "dbname")
- assert(type(dbconn) is monkey_psycopg2.MockConn)
- print("commit_and_close_db() - close failure")
- monkey_psycopg2.monkey_reset_forces(close=True)
- ret = config_notif.commit_and_close_db(dbconn)
- assert(ret == False)
-
-def test_commit_and_close_db_fail2(monkeypatch):
- """
- failure case for commit_and_close_db(): dbconn.commit() fails
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- dbconn = config_notif.postgres_db_open("test", "testpswd", "testsite", 5432, "dbname")
- assert(type(dbconn) is monkey_psycopg2.MockConn)
- print("commit_and_close_db() - commit failure")
- monkey_psycopg2.monkey_reset_forces(commit=True)
- ret = config_notif.commit_and_close_db(dbconn)
- assert(ret == False)
-
-def test_read_hb_properties_default(monkeypatch):
- """
- test read_hb_properties_default()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- monkey_psycopg2.monkey_set_defaults({
- "testdb1": {
- "hb_common": [
- [ 1, "sn1", 31, "st1" ],
- [ 2, "sn2", 32, "st2" ]
- ]
- }
- })
-
- output = config_notif.read_hb_common("test", "testpswd", "testsite", 5432, "testdb1")
- assert(output[0] == 1)
- assert(output[1] == "st1")
- assert(output[2] == "sn1")
- assert(output[3] == 31)
-
-def test_update_hb_common(monkeypatch):
- """
- test update_hb_common()
- """
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
- output = config_notif.update_hb_common(None, 1234, "st1234", "test", "testpswd", "testsite", 5432, "testdb1")
- assert(output == True)
-
-def monkeypatch_get_cbs_config_False():
- """
- monkeypatch for get_cbs_config() to force it to return False
- Required side affect: c_config is set to a json value
- """
- print("monkeypatch_get_cbs_config_False()")
- mod.trapd_settings.c_config = { "patch": "false" }
- return False
-
-def monkeypatch_get_cbs_config_True():
- """
- monkeypatch for get_cbs_config() to force it to return False
- Required side affect: c_config is set to a json value
- """
- print("monkeypatch_get_cbs_config_True()")
- mod.trapd_settings.c_config = { "patch": "true" }
- return True
-
-def test_fetch_json_file_get_cbs_config_is_true(monkeypatch):
- """
- test fetch_json_file() with get_cbs_config() returning True
- """
- monkeypatch.setattr(mod.trapd_get_cbs_config, 'get_cbs_config', monkeypatch_get_cbs_config_True)
- tmp1 = tempfile.NamedTemporaryFile(mode="w+")
- tmp2 = tempfile.NamedTemporaryFile(mode="w+")
- output = config_notif.fetch_json_file(download_json = tmp1.name, config_json = tmp2.name)
- assert(output == tmp1.name)
- with open(tmp1.name, "r") as fp:
- j1 = json.load(fp)
- print(f"j1={j1}")
- assert("patch" in j1 and j1["patch"] == "true")
-
-def test_fetch_json_file_get_cbs_config_is_false(monkeypatch):
- """
- test fetch_json_file() with get_cbs_config() returning False
- """
- monkeypatch.setattr(mod.trapd_get_cbs_config, 'get_cbs_config', monkeypatch_get_cbs_config_False)
- tmp1 = tempfile.NamedTemporaryFile(mode="w+")
- tmp2 = tempfile.NamedTemporaryFile(mode="w+")
- output = config_notif.fetch_json_file(download_json = tmp1.name, config_json = tmp2.name)
- assert(output == tmp2.name)
-
-FETCH_JSON_FILE = None
-
-def monkeypatch_fetch_json_file():
- """
- Monkeypatch for fetch_json_file() to test config_notif_run()
- """
- print("monkeypatch_fetch_json_file()")
- return FETCH_JSON_FILE
-
-def monkeypatch_return_False(*args, **kwargs):
- """
- Monkeypatch that can be used to force a function to return False
- """
- print("monkeypatch_return_False()")
- return False
-
-
-def test_config_notif_run_good(monkeypatch):
- """
- test config_notif_run()
- everything good: "dbname" found (from below JSON info), "hb_common" listed in tables
- and hb_common has data.
- """
- monkeypatch.setattr(config_notif, 'fetch_json_file', monkeypatch_fetch_json_file)
-
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- global FETCH_JSON_FILE
- FETCH_JSON_FILE = tmp.name
-
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
-
- monkey_psycopg2.monkey_set_defaults({
- "dbname": {
- "from information_schema.tables": [
- [ "hb_common" ]
- ],
- "from hb_common": [
- [ 1, "sn1", 31, "st1" ],
- [ 2, "sn2", 32, "st2" ]
- ]
- }
- })
-
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- "CBS_polling_allowed": True,
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
-
- output = config_notif.config_notif_run()
- print(f"output={output}")
- assert(output == True)
-
-def test_config_notif_run_fail1(monkeypatch):
- """
- test config_notif_run()
- Failure case 1: "dbname" NOT found (from below JSON info), "hb_common" listed in tables
- and hb_common has data.
- """
- monkeypatch.setattr(config_notif, 'fetch_json_file', monkeypatch_fetch_json_file)
-
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- global FETCH_JSON_FILE
- FETCH_JSON_FILE = tmp.name
-
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
-
- monkey_psycopg2.monkey_set_defaults({
- "dbnameNOTHERE": {
- "from information_schema.tables": [
- [ "hb_common" ]
- ],
- "from hb_common": [
- [ 1, "sn1", 31, "st1" ],
- [ 2, "sn2", 32, "st2" ]
- ]
- }
- })
-
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- "CBS_polling_allowed": True,
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
-
- output = config_notif.config_notif_run()
- print(f"output={output}")
- assert(output is None)
-
-def test_config_notif_run_fail2(monkeypatch):
- """
- test config_notif_run()
- Failure case 2: "dbname" found (from below JSON info), "hb_common" NOT listed in tables
- and hb_common has data.
- """
- monkeypatch.setattr(config_notif, 'fetch_json_file', monkeypatch_fetch_json_file)
-
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- global FETCH_JSON_FILE
- FETCH_JSON_FILE = tmp.name
-
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
-
- monkey_psycopg2.monkey_set_defaults({
- "dbname": {
- "from information_schema.tables": [
- [ "hb_commonNOTHERE" ]
- ],
- "from hb_common": [
- [ 1, "sn1", 31, "st1" ],
- [ 2, "sn2", 32, "st2" ]
- ]
- }
- })
-
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- "CBS_polling_allowed": True,
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
-
- output = config_notif.config_notif_run()
- print(f"output={output}")
- assert(output is None)
-
-def test_config_notif_run_fail3(monkeypatch):
- """
- test config_notif_run()
- Failure case 3: "dbname" found (from below JSON info), "hb_common" listed in tables
- and update_hb_common() fails
- """
- monkeypatch.setattr(config_notif, 'fetch_json_file', monkeypatch_fetch_json_file)
- monkeypatch.setattr(config_notif, 'update_hb_common', monkeypatch_return_False)
-
- tmp = tempfile.NamedTemporaryFile(mode="w+")
- global FETCH_JSON_FILE
- FETCH_JSON_FILE = tmp.name
-
- monkeypatch.setattr(psycopg2, 'connect', monkey_psycopg2.monkey_connect)
- monkey_psycopg2.monkey_reset_forces()
-
- monkey_psycopg2.monkey_set_defaults({
- "dbname": {
- "from information_schema.tables": [
- [ "hb_common" ]
- ],
- "from hb_common": [
- [ 1, "sn1", 31, "st1" ],
- [ 2, "sn2", 32, "st2" ]
- ]
- }
- })
-
- testdata = {
- "pg_ipAddress": "10.0.0.99",
- "pg_portNum": 65432,
- "pg_dbName": "dbname",
- "pg_userName": "pguser",
- "pg_passwd": "pgpswd",
- "CBS_polling_allowed": True,
- "CBS_polling_interval": 30,
- "SERVICE_NAME": "service_name"
- }
- json.dump(testdata, tmp)
- tmp.flush()
-
- output = config_notif.config_notif_run()
- print(f"output={output}")
- assert(output == False)
diff --git a/tests/test_trapd_vnf_table.py b/tests/test_trapd_vnf_table.py
index f89aa6a..577a320 100644
--- a/tests/test_trapd_vnf_table.py
+++ b/tests/test_trapd_vnf_table.py
@@ -28,7 +28,7 @@ import unittest
import get_logger
from mod.trapd_vnf_table import (
verify_DB_creation_1, verify_DB_creation_2, verify_DB_creation_hb_common,
- verify_cbsPolling_required, hb_properties, verify_cbspolling,
+ hb_properties, verify_cbspolling,
verify_sendControlLoop_VNF_ONSET, verify_sendControlLoop_VM_ONSET,
verify_sendControlLoop_VNF_ABATED, verify_sendControlLoop_VM_ABATED,
verify_fetch_json_file, verify_misshtbtdmain, verify_dbmonitoring,
@@ -56,10 +56,6 @@ class test_vnf_tables(unittest.TestCase):
self.assertEqual(result, True)
- def test_validate_cbspolling_required(self):
- result = verify_cbsPolling_required()
- self.assertEqual(result, True)
-
def test_cbspolling(self):
result= verify_cbspolling()
_logger.info(result)