aboutsummaryrefslogtreecommitdiffstats
path: root/vnfs/DAaaS/lib
diff options
context:
space:
mode:
authorRajamohan Raj <rajamohan.raj@intel.com>2019-05-31 23:36:21 +0000
committerRajamohan Raj <rajamohan.raj@intel.com>2019-06-03 17:50:48 +0000
commite8abc0fdf80f9c75b8f789cd5eb5dd3e5c4510a8 (patch)
tree4e343c21921f91e2e2b47aa2d1a2fb6b3373982d /vnfs/DAaaS/lib
parentabbee1b46d58da6f3b297724f11f873a2567c3a8 (diff)
Handle more error scenarios for promql_api
Handle more error scenarios for promql and improve README.Added copy rights Change-Id: Idb3ac7e2aea6fe3e6df069f63e63b2a5208f96d1 Issue-ID: ONAPARC-452 Signed-off-by: Rajamohan Raj <rajamohan.raj@intel.com>
Diffstat (limited to 'vnfs/DAaaS/lib')
-rw-r--r--vnfs/DAaaS/lib/promql_api/README.md36
-rw-r--r--vnfs/DAaaS/lib/promql_api/prom_ql_api.py65
2 files changed, 77 insertions, 24 deletions
diff --git a/vnfs/DAaaS/lib/promql_api/README.md b/vnfs/DAaaS/lib/promql_api/README.md
index de64fe0b..f2d45951 100644
--- a/vnfs/DAaaS/lib/promql_api/README.md
+++ b/vnfs/DAaaS/lib/promql_api/README.md
@@ -1,12 +1,12 @@
## What does this API do ?
-This api as of now provides a function which takes in a lits of 'LABELS' of prometheus
+This api as of now provides a function which takes in a list of 'QUERY_STRINGs' with respect to prometheus
and returns the corresponding result_sets in a list.
For eg:
-If the labels is
+If the QUERY_STRING is
```
-LABELS = ['irate(collectd_cpufreq{exported_instance="otconap7",cpufreq="1"}[2m])']
+QUERY_STRING = ['irate(collectd_cpufreq{exported_instance="otconap7",cpufreq="1"}[2m])']
```
The return is:
@@ -22,7 +22,7 @@ The return is:
'service': 'collectd'},
'value': [1559177169.415, '119727200']}]
```
-
+i
## How to use this API ?
```
@@ -35,13 +35,13 @@ from promql_api.prom_ql_api import query
```
```
-3. have a global or local variable as 'LABELS'
-LABELS = ['irate(collectd_cpufreq{exported_instance="otconap7",cpufreq="1"}[2m])']
+3. have a global or local variable as 'QUERY_STRING'
+QUERY_STRING = ['irate(collectd_cpufreq{exported_instance="otconap7",cpufreq="1"}[2m])']
```
```
4. Store the result set in a list:
-list_of_result_sets = query(LABELS)
+list_of_result_sets = query(QUERY_STRING)
```
## Troubleshooting tips
@@ -49,6 +49,7 @@ list_of_result_sets = query(LABELS)
* Check the sample file - sample_promql_query.py in the repo ( sample-apps/m3db_promql)
* Make sure the file "__init__.py" is present in promql_api directory after you copy the directory.
* Make sure environment variables like "DATA_ENDPOINT" are correctly set.
+* For custom and advanced querying https://prometheus.io/docs/prometheus/latest/querying/api/
* Logs are generated in the directory where the query function is called.
* sample log file - promql_api.log
@@ -63,3 +64,24 @@ list_of_result_sets = query(LABELS)
05-30-2019 08:47:53PM ::prom_ql_api.py :: query :: INFO :: ::::::::::RESULTS::::::::::::: irate(collectd_cpufreq{exported_instance="otconap7",cpufreq="1"}[2m])
05-30-2019 08:47:53PM ::prom_ql_api.py :: query :: INFO :: {'metric': {'cpufreq': '1', 'endpoint': 'collectd- prometheus', 'exported_instance': 'otconap7', 'instance': '172.25.103.1:9103', 'job': 'collectd', 'namespace': 'edge1', 'pod': 'plundering-liger-collectd-wz7xg', 'service': 'collectd'}, 'value': [1559249299.084, '236300']}
```
+
+ * Tested Error scenario: Configure QUERY_STRING as :
+ ```
+ QUERY_STRING = ['irate(collectd_cpufreq{exported_instance="otconap7", cpufreq="1"}[2m])', 'collectd_cpu_percent{job="collectd" exported_instance="an11-31"}[1m]']
+ ```
+ O/P :
+ ```
+ Check logs..HTTP error occurred: 400 Client Error: Bad Request for url: http://172.25.103.1:30090/api/v1/query?query=collectd_cpu_percent%7Bjob%3D%22collectd%22+exported_instance%3D%22an11-31%22%7D%5B1m%5D
+[{'metric': {'cpufreq': '1',
+ 'endpoint': 'collectd-prometheus',
+ 'exported_instance': 'otconap7',
+ 'instance': '172.25.103.1:9103',
+ 'job': 'collectd',
+ 'namespace': 'edge1',
+ 'pod': 'plundering-liger-collectd-wz7xg',
+ 'service': 'collectd'},
+ 'value': [1559343866.631, '119798600']}]
+{'error': 'parse error at char 37: missing comma before next identifier '
+ '"exported_instance"',
+ 'errorType': 'bad_data'}
+```
diff --git a/vnfs/DAaaS/lib/promql_api/prom_ql_api.py b/vnfs/DAaaS/lib/promql_api/prom_ql_api.py
index 970d2416..8a5ce508 100644
--- a/vnfs/DAaaS/lib/promql_api/prom_ql_api.py
+++ b/vnfs/DAaaS/lib/promql_api/prom_ql_api.py
@@ -1,3 +1,21 @@
+# -------------------------------------------------------------------------
+# Copyright (c) 2019 Intel Corporation Intellectual Property
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# -------------------------------------------------------------------------
+
+
from __future__ import print_function
from os import environ
import logging
@@ -5,12 +23,12 @@ import requests
from requests.exceptions import HTTPError
-
API_VERSION = '/api/v1/query'
LIST_OF_ENV_VARIABLES = ["DATA_ENDPOINT"]
MAP_ENV_VARIABLES = dict()
LOG = logging.getLogger(__name__)
+
def set_log_config():
logging.basicConfig(format='%(asctime)s ::%(filename)s :: %(funcName)s :: %(levelname)s :: %(message)s',
datefmt='%m-%d-%Y %I:%M:%S%p',
@@ -32,12 +50,12 @@ def load_and_validate_env_vars(list_of_env_vars):
raise KeyError("Env variable: {} not found ! ".format(env_var.upper()))
-def query(LABELS):
+def query(QUERY_STRING):
"""
Input parameters:
- LABELS : a list of the LABELS like ['irate(collectd_cpufreq{exported_instance="otconap7",cpufreq="1"}[2m])']
+ QUERY_STRING : a list of the query strings like ['irate(collectd_cpufreq{exported_instance="otconap7",cpufreq="1"}[2m])']
Return:
- returns a list of result sets of different labels.
+ returns a list of result sets corresponding to each of the query strings..
SAMPLE O/P:
[{'metric': {'cpufreq': '1',
'endpoint': 'collectd-prometheus',
@@ -59,24 +77,37 @@ def query(LABELS):
list_of_substrings.append(API_VERSION)
url = ''.join(list_of_substrings)
- for each_label in LABELS:
- params_map['query'] = each_label
+ for each_query_string in QUERY_STRING:
+ params_map['query'] = each_query_string
try:
LOG.info('API request::: URL: {} '.format(url))
LOG.info('API request::: params: {} '.format(params_map))
response = requests.get(url, params=params_map)
- response.raise_for_status()
+ response.raise_for_status() # This might raise HTTPError which is handled in except block
except HTTPError as http_err:
- print(f'HTTP error occurred: {http_err}')
- return None
+ if response.json()['status'] == "error":
+ LOG.error("::::ERROR OCCURED::::")
+ LOG.error("::::ERROR TYPE:::: {}".format(response.json()['errorType']))
+ LOG.error("::::ERROR:::: {}".format(response.json()['error']))
+ list_of_result_sets.append(dict({'error':response.json()['error'],
+ 'errorType' : response.json()['errorType']}))
+ print(f'Check logs..HTTP error occurred: {http_err}')
+
except Exception as err:
- print(f'Other error occurred: {err}')
- return None
- else:
+ print(f'Check logs..Other error occurred: {err}')
- results = response.json()['data']['result']
- LOG.info('::::::::::RESULTS::::::::::::: {}'.format(each_label))
- for each_result in results:
- LOG.info(each_result)
- list_of_result_sets.append(results)
+ else:
+ if response.json()['status'] == "error":
+ LOG.error("::::ERROR OCCURED!::::")
+ LOG.error("::::ERROR TYPE:::: {}".format(response.json()['errorType']))
+ LOG.error("::::ERROR:::: {}".format(response.json()['error']))
+ list_of_result_sets.append(response.json()['error'])
+ list_of_result_sets.append(dict({'error':response.json()['error'],
+ 'errorType' : response.json()['errorType']}))
+ else:
+ results = response.json()['data']['result']
+ LOG.info('::::::::::RESULTS::::::::::::: {}'.format(each_query_string))
+ for each_result in results:
+ LOG.info(each_result)
+ list_of_result_sets.append(results)
return list_of_result_sets