From c7c4cc227ed9447b4fdceeceece35384404bd7ec Mon Sep 17 00:00:00 2001 From: Moshe Date: Wed, 20 Jun 2018 10:23:28 +0300 Subject: Add validation abilities to test cases Change-Id: I76b28e6170d6e91836b195d58c0b882168c11a67 Issue-ID: VNFSDK-275 Signed-off-by: Moshe Add unit tests Issue-ID: VNFSDK-275 Change-Id: I34bc9a11e16e4092fdad3b4a1733c7219e624f5f Signed-off-by: Moshe add unit tests Issue-ID: VNFSDK-275 Change-Id: Ib99c3521438b002e0d8aaff9870224673e34899f Signed-off-by: Moshe add unit tests Issue-ID: VNFSDK-275 Change-Id: I1ac560dfb40df5f346b0db8f40b8c52a2fb6b350 Signed-off-by: Moshe --- vnftest/crawlers/base.py | 15 ++++++++++++++- vnftest/crawlers/default.py | 8 ++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) (limited to 'vnftest/crawlers') diff --git a/vnftest/crawlers/base.py b/vnftest/crawlers/base.py index 8b5a526..c7813e8 100755 --- a/vnftest/crawlers/base.py +++ b/vnftest/crawlers/base.py @@ -25,7 +25,7 @@ class Crawler(object): @staticmethod def get_cls(crawler_type): """return class of specified type""" - for crawler in utils.itersubclasses(Crawler): + for crawler in utils.findsubclasses(Crawler): if crawler_type == crawler.__crawler_type__: return crawler raise RuntimeError("No such crawler_type %s" % crawler_type) @@ -35,3 +35,16 @@ class Crawler(object): def crawl(self, dictionary, path): raise NotImplementedError + + @staticmethod + def crawl(json_as_dict, output_config): + output = {} + for output_parameter in output_config: + param_name = output_parameter['parameter_name'] + param_value = output_parameter.get('value', "[]") + crawler_type = output_parameter.get('type', 'default') + crawler_class = Crawler.get_cls(crawler_type) + crawler = crawler_class() + param_value = crawler.crawl(json_as_dict, param_value) + output[param_name] = param_value + return output diff --git a/vnftest/crawlers/default.py b/vnftest/crawlers/default.py index da4df0a..74f9554 100644 --- a/vnftest/crawlers/default.py +++ b/vnftest/crawlers/default.py @@ -13,6 +13,9 @@ ############################################################################## from __future__ import absolute_import + +from vnftest.common.exceptions import MandatoryKeyException + from vnftest.crawlers import base import logging @@ -23,6 +26,9 @@ class DefaultCrawler(base.Crawler): __crawler_type__ = 'default' def crawl(self, dictionary, path): + if path.find("[") < 0: + return path # the path is a hardcoded value + path_list = path.split("[") value = dictionary for path_element in path_list: @@ -32,4 +38,6 @@ class DefaultCrawler(base.Crawler): if isinstance(value, list): path_element = int(path_element) value = value[path_element] + if value is None: + raise MandatoryKeyException(key_name='param_path', class_name=str(dictionary)) return value -- cgit 1.2.3-korg