aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.coveragerc4
-rw-r--r--.gitignore3
-rw-r--r--Changelog.md67
-rw-r--r--Dockerfile32
-rw-r--r--LICENSE.txt6
-rw-r--r--README.md99
-rw-r--r--app/app/MANIFEST.in1
-rw-r--r--app/app/__init__.py0
-rw-r--r--app/app/config_binding_service/__init__.py (renamed from config_binding_service/__init__.py)30
-rw-r--r--app/app/config_binding_service/client.py297
-rw-r--r--app/app/config_binding_service/controller.py108
-rw-r--r--app/app/config_binding_service/logging.py204
-rwxr-xr-xapp/app/main.py32
-rw-r--r--app/app/pom.xml270
-rw-r--r--app/app/setup.py (renamed from setup.py)33
-rw-r--r--app/app/swagger.yaml100
-rw-r--r--app/app/tests/__init__.py21
-rw-r--r--app/app/tests/conftest.py70
-rw-r--r--app/app/tests/test_client.py168
-rw-r--r--app/app/tests/test_controller.py120
-rw-r--r--app/app/tox.ini26
-rw-r--r--app/pom.xml80
-rw-r--r--app/uwsgi.ini3
-rwxr-xr-xbin/run.py15
-rw-r--r--config_binding_service/client.py181
-rw-r--r--config_binding_service/controller.py49
-rw-r--r--config_binding_service/swagger/swagger.yaml33
-rw-r--r--doc/cbs_diagram.pngbin72540 -> 0 bytes
-rwxr-xr-xmvn-phase-script.sh288
-rw-r--r--nginxhttps.conf17
-rw-r--r--pom.xml236
-rw-r--r--requirements.txt4
-rw-r--r--tests/test_binding.py139
-rw-r--r--tox-local.ini18
-rw-r--r--tox.ini14
-rw-r--r--version.properties12
36 files changed, 1773 insertions, 1007 deletions
diff --git a/.coveragerc b/.coveragerc
index fdedea1..10793c3 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -23,7 +23,3 @@ exclude_lines =
if __name__ == .__main__.:
ignore_errors = True
-
-[xml]
-output = coverage-reports/coverage-configbinding.xml
-
diff --git a/.gitignore b/.gitignore
index c86ccf6..6b91aaa 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,6 @@
+.pytest_cache/
+run_tests.sh
+run_tests_https.sh
xunit-results.xml
.DS_Store
# Byte-compiled / optimized / DLL files
diff --git a/Changelog.md b/Changelog.md
index 9985855..d1c2fac 100644
--- a/Changelog.md
+++ b/Changelog.md
@@ -1,13 +1,70 @@
# Change Log
All notable changes to this project will be documented in this file.
-The format is based on [Keep a Changelog](http://keepachangelog.com/)
+The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
+## [2.2.3] - 7/25/2018
+* By request, include a self signed cert so the image always comes up.
+
+## [2.2.2] - 7/9/2018
+* Add EELF metrics log and logging statements
+* Fixed a redundant Consul call where client.resolve_all did not need to call the transaction API twice
+* Fix some comments / add deprecation warnings
+
+## [2.2.1] - 7/5/2018
+* Fix bug where healthcheck call was not in the audit log
+* Add service_component_name into the audit record message field on audit calls
+* Rename "log.log" to "audit.log"
+* Add EELF compliant "error.log"
+
+## [2.2.0] - 6/26/2018
+* Productionalize by moving to NGINX+UWSGI. Flask was not meant to be run as a production server
+* This is towards HTTPS support, which will now be done via NGINX reverse proxying instead of in the application code itself
+* The app structure has changed due to the project I am now using for this. See https://hub.docker.com/r/tiangolo/uwsgi-nginx-flask/
+
+## [2.1.5] - 4/10/2018
+* Fix a key where an invalid JSON in Consul blows up the CBS
+* Refactor the tests into smaller files
+
+## [2.1.4] - 4/3/2018
+* Adhere to EELF metrics log for the log file
+
+## [2.1.3]
+* Small cleanups; move swagger, remove bin, do proper install in Dockerfile
+
+## [2.1.2]
+* Log to a file to be picked up by ELK per DCAEGEN2-387
+* Logging not totally finished w.r.t. formats, but this at least logs more and gets them into ELK
+
+## [2.1.1]
+* [Shamefully this entry was missing]
+
+## [2.1.0]
+* Add a generic API for getting arbitrary keys
+* Some PEP8/Pylint compliance
+* Fix SONAR complaints
+
+## [2.0.0]
+* Remove policy and DTI APIs
+* Add new API that returns Config, Policy, DTI, Everything
+* Test coverage 82%
+
+## [1.3.1]
+* Add more tests (Currently 75%)
+* Fix liscenses
+
+## [1.3.0]
+* Sync ONAP with Internal CBS
+* Add tests (Currently 62%)
+* Update docker python version to 3.6
+* Move installation of reqs into Docker container
+
## [1.2.0]
-* Remove waterfalled CONSUL_HOST
+* Remove waterfalled CONSUL_HOST
* Add ONAP liscenses
* Remove references to specific telco and it's IPs in tests
+* [Internal version conflict]: Add dti and policies endpoints
## [1.1.0]
* Add a healthcheck endpoint
@@ -15,11 +72,11 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
## [1.0.1]
* Fix {{}} to resolve to [] instead of whatever is in rels key
-* Remove all impure tests. All tests are now unit tests.
+* Remove all impure tests. All tests are now unit tests.
## [1.0.0]
-* GLORIOUS CHANGE! At some point, CASK fixed a bug where if you sent a configuration JSON to CDAP that contained a value that was not a string, it would blow up. This allows me to remove the endpoint specific to CDAP components so the same endpoint is now used for Docker and CDAP.
-* Props to Terry Troutman for helping me discover this.
+* GLORIOUS CHANGE! At some point, CASK fixed a bug where if you sent a configuration JSON to CDAP that contained a value that was not a string, it would blow up. This allows me to remove the endpoint specific to CDAP components so the same endpoint is now used for Docker and CDAP.
+* Props to Terry Troutman for helping me discover this.
* Removes some impure tests. Still some impurity there
## [0.9.0]
diff --git a/Dockerfile b/Dockerfile
index 1163e9f..6c783f0 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,14 +1,30 @@
-FROM python:3.6
+FROM tiangolo/uwsgi-nginx-flask:python3.6
MAINTAINER tommy@research.att.com
-ADD . /tmp
+#setup uwsgi+nginx
+# https://hub.docker.com/r/tiangolo/uwsgi-nginx-flask/
+COPY ./app /app
-#need pip > 8 to have internal pypi repo in requirements.txt
-RUN pip install --upgrade pip
-#do the install
-WORKDIR /tmp
-RUN pip install -e .
+RUN pip install --upgrade pip
+RUN pip install /app/app
+RUN mkdir -p /opt/logs/
+
+# create the dir for the ssl certs
+RUN mkdir -p /etc/nginx/ssl
+
+COPY nginxhttps.conf /etc/nginx/conf.d/nginxhttps.conf
+
+#443 is https, 10000 is http
+# in the future, hopefully http can go away completely
+ENV LISTEN_PORT 10000
+EXPOSE 443
EXPOSE 10000
-CMD run.py
+# Mount a self signed certificate that should be overwritten upon Run
+RUN apt-get update && \
+ apt-get install -y openssl && \
+ openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout /etc/nginx/ssl/nginx.key -out /etc/nginx/ssl/nginx.crt -subj "/C=US/ST=NJ/L=foo/O=ONAP/OU=ONAP/CN=configbinding"
+
+#this is a registrator flag that tells it to ignore 80 from service discovery. Nothing is listening on 80, but the parent Dockerfile here exposes it. This container is internally listening on 10000 and 443.
+ENV SERVICE_80_IGNORE true
diff --git a/LICENSE.txt b/LICENSE.txt
index 69d5fc1..9536f0b 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -1,11 +1,11 @@
/*
* ============LICENSE_START==========================================
* ===================================================================
-* Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+* Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
* ===================================================================
*
* Unless otherwise specified, all software contained herein is licensed
-* under the Apache License, Version 2.0 (the “License”);
+* under the Apache License, Version 2.0 (the "License");
* you may not use this software except in compliance with the License.
* You may obtain a copy of the License at
*
@@ -20,7 +20,7 @@
*
*
* Unless otherwise specified, all documentation contained herein is licensed
-* under the Creative Commons License, Attribution 4.0 Intl. (the “License”);
+* under the Creative Commons License, Attribution 4.0 Intl. (the "License");
* you may not use this documentation except in compliance with the License.
* You may obtain a copy of the License at
*
diff --git a/README.md b/README.md
index 39c740b..6cc17c4 100644
--- a/README.md
+++ b/README.md
@@ -1,29 +1,27 @@
# config_binding_service
-# Interface Diagram
-This repo is the thing in red:
+# Changelog
+All changes are logged in Changelog.md
-![Alt text](doc/cbs_diagram.png?raw=true)
-
-# Overview
+# Overview
DCAE has a "templating language" built into components' configurations, as explained further below.
The orchestrator populates one/two keys (depending on the blueprint) into Consul that are used to *bind* component configurations config, a "rels key" and a "dmaap key".
-If component A wants to connect to a component of type B, then A's rels key holds what specific service component name of B that A should connect to over direct HTTP.
-Service component name here means the full name that the component of type B is registered under in Consul (there can be multiple components of type B registered in Consul).
-The CBS (config binding service) then pulls down that rels key, fetches the connection information about that B (IP:Port), and replaces it into A's config.
+If component A wants to connect to a component of type B, then A's rels key holds what specific service component name of B that A should connect to over direct HTTP.
+Service component name here means the full name that the component of type B is registered under in Consul (there can be multiple components of type B registered in Consul).
+The CBS (config binding service) then pulls down that rels key, fetches the connection information about that B (IP:Port), and replaces it into A's config.
There is also a "dmaap key", which is the same concept, except what gets injected is a JSON of DMaaP connection information instead of an IP:Port.
-# Usage
-hit `url_of_this/service_component/service_component_name` and you are returned your bound config.
+In addition, this service provides the capability to retrieve either the DTI events (not history) or the policies for a given service_component.
-(Note: there is also a backdoor in the `client` module that allows you to pass in a direct JSON and a direct rels, but this isn't exposed via the HTTP API as of now)
+# Usage
+See the Swagger spec.
# Assumptions
1. `CONSUL_HOST` is set as an environmental variable where this binding service is run. If it is not, it defaults to the Rework Consul which is probably not what you want.
2. `service_component_name` is in consul as a key and holds the config
-3. `service_component_name:rel` is in consul as a key *if* you are expecting a direct HTTP resolution, and holds the service component names of connections.
-4. `service_component_name:dmaap` is in consul *if* you are expecting a DMaaP resolution, and holds the components DMaaP information.
+3. `service_component_name:rel` is in consul as a key *if* you are expecting a direct HTTP resolution, and holds the service component names of connections.
+4. `service_component_name:dmaap` is in consul *if* you are expecting a DMaaP resolution, and holds the components DMaaP information.
# Templating Language
The CBS tries to resolve a component's configuration with a templating language. We have two templating languages embedded in our component's configuration (`{{...}}` and `<<...>>`). There are two because the CBS has to be able to distinguish between a rels-key-resolve and a dmaap-key-resolve. That is, if component X is trying to bind their component, and they want to talk to Y, someone has to tell the CBS whether they are trying to talk via IP:port or a feed.
@@ -39,7 +37,80 @@ X's configuration:
}
```
-# Tests And Test Coverage
+# A note about directory structure
+This project uses https://hub.docker.com/r/tiangolo/uwsgi-nginx-flask/
+This is a solution that runs a productionalized setup using NGINX+uwsgi+Flask (Flask is not meant to be run as a real webserver per their docs). This project requires the app/app structure. Tox still works from the root due to tox magic.
+
+This structure, combined with Sonar limitations, leads to an unfortunate need of having three nested poms. There is a top level pom, a tiny pom in /app, and the "main" pom in /app/app.
+
+# Development
+## Version changes
+An unforunate consequence of the nested poms is that development changes require a version bump in several places. They are:
+1. top level pom
+2. pom in /app
+3. pom in /app/app
+4. setup.py in /app/app
+5. Changelod.md
+
+Additionally, if the development leads to an API change,
+6. swagger.yaml in /app/app
+
+## Testing
+You need `tox`.
+
+To recreate the tox that the ONAP build process calls, from /app/app, *not in a virtual env*, just run:
+```
+tox
+```
+
+For local development, there is a tox that outputs to an html website that is easier to read and navigate then xml. From the *root*, run
```
tox -c tox-local.ini
```
+
+# Deployment
+
+## Ports, HTTPS key/cert location
+
+The CBS frontend (NGINX) exposes 10000 and 443. It runs HTTP on 10000 and HTTPS on 443. 80 is also exposed by the parent Dockerfile but nothing is listening there so it can be ignored.
+
+The dockerimage mounts it's own self signed certificate. If deploying into a production level scenario, *you should overwrite this cert!*! It expects a key to be mounted at `/etc/nginx/ssl/nginx.key` and a cert to be mounted at `/etc/nginx/ssl/nginx.crt`. For example, a snippet from a `docker run` command:
+
+```
+... -v /host/path/to/nginx.key:/etc/nginx/ssl/nginx.key -v /host/path/to/nginx.crt:/etc/nginx/ssl/nginx.crt ...
+```
+
+These ports can be mapped to whatever extnernally. To keep the legacy behavior of prior ONAP releases of HTTP on 10000, map 10000:10000. Or, you can now make 10000 HTTPS by mapping 10000:443. This is determined by the deployment blueprint.
+
+## Non-K8, Registrator, Consul setup
+This section only pertains to a very specific setup of using Registrator and Consul (registrator to register a Consul healthcheck, and relying on Consul health checking). This section does *not* pertain to a Kubernetes deployment that uses K8 "readiness probes" instead of Consul.
+
+There is a combination of issues, rooting from a bug in registrator:
+1. https://jira.onap.org/browse/DCAEGEN2-482
+2. https://github.com/gliderlabs/registrator/issues/605
+
+That causes the Consul registration to be suffixed with ports, breaking the expected service name (`config_binding_service`), **even if** those ports are not mapped externally. That is, even if only one of the two ports (10000,443) is mapped, due to the above-linked bug, the service name will be wrong in Consul.
+
+The solution is to run the container with a series of ENV variables. If you want the healthchecks to go over HTTPS, you also need to run the latest version on `master` in registrator. The old (3 year old) release of `v7` does not allow for HTTPS healthchecks. The below example fixes the service name, turns OFF HTTP healthchecks, and turns ON HTTPS healthchecks (only works with latest registrator):
+
+```
+ENV SERVICE_10000_IGNORE true
+ENV SERVICE_443_NAME config_binding_service
+ENV SERVICE_443_CHECK_HTTPS /healthcheck
+ENV SERVICE_443_CHECK_INTERVAL 15s
+```
+
+E.g., in Docker run terminology:
+
+```
+... -e SERVICE_10000_IGNORE=true -e SERVICE_443_NAME=config_binding_service -e SERVICE_443_CHECK_HTTPS=/healthcheck -e SERVICE_443_CHECK_INTERVAL=15s ...
+```
+
+If you wish to turn ON HTTP healthchecks and turn OFF HTTPS healthchecks, swith 10000 and 443 above. That will work even with `v7` of registrator (that is, `SERVICE_x_CHECK_HTTP` was already supported)
+
+## Running locally for development (no docker)
+It is recommended that you do this step in a virtualenv.
+(set -x is Fish notaion, change for Bash etc. accordingly)
+```
+pip install --ignore-installed .; set -x CONSUL_HOST <YOUR_HOST>; ./main.py
+```
diff --git a/app/app/MANIFEST.in b/app/app/MANIFEST.in
new file mode 100644
index 0000000..3d5afa6
--- /dev/null
+++ b/app/app/MANIFEST.in
@@ -0,0 +1 @@
+include swagger.yaml
diff --git a/app/app/__init__.py b/app/app/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/app/app/__init__.py
diff --git a/config_binding_service/__init__.py b/app/app/config_binding_service/__init__.py
index 51d3246..5a6b081 100644
--- a/config_binding_service/__init__.py
+++ b/app/app/config_binding_service/__init__.py
@@ -1,14 +1,12 @@
# ============LICENSE_START=======================================================
-# org.onap.dcae
-# ================================================================================
-# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,25 +15,16 @@
# ============LICENSE_END=========================================================
#
# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
import os
-import logging
-'''Configures the module root logger'''
-root = logging.getLogger()
-if root.handlers:
- root.handlers.clear()
-formatter = logging.Formatter('%(asctime)s | %(name)s | %(module)s | %(funcName)s | %(lineno)d | %(levelname)s | %(message)s')
-handler = logging.StreamHandler()
-handler.setFormatter(formatter)
-root.addHandler(handler)
-root.setLevel("DEBUG")
class BadEnviornmentENVNotFound(Exception):
+ """
+ Specific exception to be raised when a required ENV varaible is missing
+ """
pass
-def get_logger(module=None):
- '''Returns a module-specific logger or global logger if the module is None'''
- return root if module is None else root.getChild(module)
def get_consul_uri():
"""
@@ -45,8 +34,7 @@ def get_consul_uri():
if "CONSUL_HOST" in os.environ:
# WARNING! TODO! Currently the env file does not include the port.
# But some other people think that the port should be a part of that.
- # For now, I'm hardcoding 8500 until this gets resolved.
+ # For now, I'm hardcoding 8500 until this gets resolved.
return "http://{0}:{1}".format(os.environ["CONSUL_HOST"], 8500)
else:
raise BadEnviornmentENVNotFound("CONSUL_HOST")
-
diff --git a/app/app/config_binding_service/client.py b/app/app/config_binding_service/client.py
new file mode 100644
index 0000000..93f4d69
--- /dev/null
+++ b/app/app/config_binding_service/client.py
@@ -0,0 +1,297 @@
+# ============LICENSE_START=======================================================
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+import re
+from functools import partial, reduce
+import base64
+import copy
+import json
+import requests
+import six
+from config_binding_service import get_consul_uri
+from config_binding_service.logging import utc, metrics
+
+
+CONSUL = get_consul_uri()
+
+template_match_rels = re.compile("\{{2}([^\}\{]*)\}{2}")
+template_match_dmaap = re.compile("<{2}([^><]*)>{2}")
+
+###
+# Cusom Exception
+###
+
+
+class CantGetConfig(Exception):
+ """
+ Represents an exception where a required key in consul isn't there
+ """
+
+ def __init__(self, code, response):
+ self.code = code
+ self.response = response
+
+
+class BadRequest(Exception):
+ """
+ Exception to be raised when the user tried to do something they shouldn't
+ """
+
+ def __init__(self, response):
+ self.code = 400
+ self.response = response
+
+
+###
+# Private Functions
+###
+
+
+def _consul_get_all_as_transaction(service_component_name, raw_request, xer):
+ """
+ Use Consul's transaction API to get all keys of the form service_component_name:*
+ Return a dict with all the values decoded
+ """
+ payload = [
+ {
+ "KV": {
+ "Verb": "get-tree",
+ "Key": service_component_name,
+ }
+ }]
+
+ bts = utc()
+ response = requests.put("{0}/v1/txn".format(CONSUL), json=payload)
+ metrics(raw_request, bts, xer, "Consul", "/v1/txn".format(service_component_name), response.status_code, __name__, msg="Retrieving Consul transaction for all keys for {0}".format(service_component_name))
+
+ try:
+ response.raise_for_status()
+ except requests.exceptions.HTTPError as exc:
+ raise CantGetConfig(exc.response.status_code, exc.response.text)
+
+ result = json.loads(response.text)['Results']
+
+ new_res = {}
+ for res in result:
+ key = res["KV"]["Key"]
+ val = base64.b64decode(res["KV"]["Value"]).decode("utf-8")
+ try:
+ new_res[key] = json.loads(val)
+ except json.decoder.JSONDecodeError:
+ new_res[key] = "INVALID JSON" # TODO, should we just include the original value somehow?
+
+ if service_component_name not in new_res:
+ raise CantGetConfig(404, "")
+
+ return new_res
+
+
+def _get_config_rels_dmaap(service_component_name, raw_request, xer):
+ allk = _consul_get_all_as_transaction(service_component_name, raw_request, xer)
+ config = allk[service_component_name]
+ rels = allk.get(service_component_name + ":rels", [])
+ dmaap = allk.get(service_component_name + ":dmaap", {})
+ return config, rels, dmaap
+
+
+def _get_connection_info_from_consul(service_component_name):
+ """
+ Call consul's catalog
+ TODO: currently assumes there is only one service
+
+ DEPRECATION NOTE:
+ This function existed when DCAE was using Consul to resolve service component's connection information.
+ This relied on a "rels" key and a Cloudify relationship plugin to set up the magic.
+ The consensous is that this feature is no longer used.
+ This functionality is very likely deprecated by Kubernetes service discovery mechanism, and DMaaP.
+
+ This function also includes logic related to CDAP, which is also likely deprecated.
+
+ This code shall remain here for now but is at risk of being deleted in a future release.
+ """
+ # Note: there should be a metrics log here, but see the deprecation note above; this function is due to be deleted.
+ res = requests.get("{0}/v1/catalog/service/{1}".format(CONSUL, service_component_name))
+ res.raise_for_status()
+ services = res.json()
+ if services == []:
+ return None # later will get filtered out
+ ip_addr = services[0]["ServiceAddress"]
+ port = services[0]["ServicePort"]
+
+ if "cdap_app" in service_component_name:
+ redirectish_url = "http://{0}:{1}/application/{2}".format(ip_addr, port, service_component_name)
+ res = requests.get(redirectish_url)
+ res.raise_for_status()
+ details = res.json()
+ # Pick out the details to expose to the component developers. These keys come from the broker API
+ return {key: details[key] for key in ["connectionurl", "serviceendpoints"]}
+ return "{0}:{1}".format(ip_addr, port)
+
+
+def _replace_rels_template(rels, template_identifier):
+ """
+ The magic. Replaces a template identifier {{...}} with the entrie(s) from the rels keys
+ NOTE: There was a discussion over whether the CBS should treat {{}} as invalid. Mike asked that
+ it resolve to the empty list. So, it does resolve it to empty list.
+ """
+ returnl = []
+ for rel in rels:
+ if template_identifier in rel and template_identifier is not "":
+ returnl.append(rel)
+ # returnl now contains a list of DNS names (possible empty), now resolve them (or not if they are not regustered)
+ return list(filter(lambda x: x is not None, map(_get_connection_info_from_consul, returnl)))
+
+
+def _replace_dmaap_template(dmaap, template_identifier):
+ """
+ This one liner could have been just put inline in the caller but maybe this will get more complex in future
+ Talked to Mike, default value if key is not found in dmaap key should be {}
+ """
+ return {} if (template_identifier not in dmaap or template_identifier == "<<>>") else dmaap[template_identifier]
+
+
+def _replace_value(v, rels, dmaap):
+ """
+ Takes a value v that was some value in the templatized configuration, determines whether it needs replacement (either {{}} or <<>>), and if so, replaces it.
+ Otherwise just returns v
+
+ implementation notes:
+ - the split below sees if we have v = x,y,z... so we can support {{x,y,z,....}}
+ - the lambda is because we can't fold operators in Python, wanted fold(+, L) where + when applied to lists in python is list concatenation
+ """
+ if isinstance(v, six.string_types): # do not try to replace anything that is not a string
+ match_on_rels = re.match(template_match_rels, v)
+ if match_on_rels:
+ # now holds just x,.. of {{x,...}}
+ template_identifier = match_on_rels.groups()[0].strip()
+ rtpartial = partial(_replace_rels_template, rels)
+ return reduce(lambda a, b: a + b, map(rtpartial, template_identifier.split(",")), [])
+ match_on_dmaap = re.match(template_match_dmaap, v)
+ if match_on_dmaap:
+ template_identifier = match_on_dmaap.groups()[0].strip()
+ """
+ Here is what Mike said:
+ 1) want simple replacement of "<< >>" with dmaap key value
+ 2) never need to support <<f1,f2>> whereas we do support {{sct1,sct2}}
+ The consequence is that if you give the CBS a dmaap key like {"foo" : {...}} you are going to get back {...}, but rels always returns [...].
+ So now component developers have to possible handle dicts and [], and we have to communicate that to them
+ """
+ return _replace_dmaap_template(dmaap, template_identifier)
+ return v # was not a match or was not a string, return value as is
+
+
+def _recurse(config, rels, dmaap):
+ """
+ Recurse throug a configuration, or recursively a sub elemebt of it.
+ If it's a dict: recurse over all the values
+ If it's a list: recurse over all the values
+ If it's a string: return the replacement
+ If none of the above, just return the item.
+ """
+ if isinstance(config, list):
+ return [_recurse(item, rels, dmaap) for item in config]
+ if isinstance(config, dict):
+ for key in config:
+ config[key] = _recurse(config[key], rels, dmaap)
+ return config
+ if isinstance(config, six.string_types):
+ return _replace_value(config, rels, dmaap)
+ # not a dict, not a list, not a string, nothing to do.
+ return config
+
+
+#########
+# PUBLIC API
+#########
+
+
+def resolve(service_component_name, raw_request, xer):
+ """
+ Return the bound config of service_component_name
+
+ raw_request and xer are needed to form the correct metrics log
+ """
+ config, rels, dmaap = _get_config_rels_dmaap(service_component_name, raw_request, xer)
+ return _recurse(config, rels, dmaap)
+
+
+def resolve_override(config, rels=[], dmaap={}):
+ """
+ Explicitly take in a config, rels, dmaap and try to resolve it.
+ Useful for testing where you dont want to put the test values in consul
+ """
+ # use deepcopy to make sure that config is not touched
+ return _recurse(copy.deepcopy(config), rels, dmaap)
+
+
+def resolve_all(service_component_name, raw_request, xer):
+ """
+ Return config, policies, and any other k such that service_component_name:k exists (other than :dmaap and :rels)
+
+ raw_request and xer are needed to form the correct metrics log
+ """
+ allk = _consul_get_all_as_transaction(service_component_name, raw_request, xer)
+ returnk = {}
+
+ # replace the config with the resolved config
+ returnk["config"] = resolve_override(allk[service_component_name],
+ allk.get("{0}:rels".format(service_component_name), []),
+ allk.get("{0}:dmaap".format(service_component_name), {}))
+
+ # concatenate the items
+ for k in allk:
+ if "policies" in k:
+ if "policies" not in returnk:
+ returnk["policies"] = {}
+ returnk["policies"]["event"] = {}
+ returnk["policies"]["items"] = []
+
+ if k.endswith(":policies/event"):
+ returnk["policies"]["event"] = allk[k]
+ elif ":policies/items" in k:
+ returnk["policies"]["items"].append(allk[k])
+ else:
+ if not(k == service_component_name or k.endswith(":rels") or k.endswith(":dmaap")):
+ # this would blow up if you had a key in consul without a : but this shouldnt happen
+ suffix = k.split(":")[1]
+ returnk[suffix] = allk[k]
+
+ return returnk
+
+
+def get_key(key, service_component_name, raw_request, xer):
+ """
+ Try to fetch a key k from Consul of the form service_component_name:k
+
+ raw_request and xer are needed to form the correct metrics log
+ """
+ if key == "policies":
+ raise BadRequest(
+ ":policies is a complex folder and should be retrieved using the service_component_all API")
+
+ bts = utc()
+ path = "v1/kv/{0}:{1}".format(service_component_name, key)
+ response = requests.get("{0}/{1}".format(CONSUL, path))
+ metrics(raw_request, bts, xer, "Consul", path, response.status_code, __name__, msg="Retrieving single Consul key {0} for {1}".format(key, service_component_name))
+
+ try:
+ response.raise_for_status()
+ except requests.exceptions.HTTPError as exc:
+ raise CantGetConfig(exc.response.status_code, exc.response.text)
+ rest = json.loads(response.text)[0]
+ return json.loads(base64.b64decode(rest["Value"]).decode("utf-8"))
diff --git a/app/app/config_binding_service/controller.py b/app/app/config_binding_service/controller.py
new file mode 100644
index 0000000..943837c
--- /dev/null
+++ b/app/app/config_binding_service/controller.py
@@ -0,0 +1,108 @@
+# ============LICENSE_START=======================================================
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+import json
+import requests
+import connexion
+import uuid
+from flask import Response
+from config_binding_service import client, get_consul_uri
+from config_binding_service.logging import audit, utc, error, metrics
+
+
+def _get_helper(json_expecting_func, **kwargs):
+ """
+ Helper function used by several functions below
+ """
+ try:
+ payload = json_expecting_func(**kwargs)
+ response, status_code, mimetype = json.dumps(payload), 200, "application/json"
+ except client.BadRequest as exc:
+ response, status_code, mimetype = exc.response, exc.code, "text/plain"
+ except client.CantGetConfig as exc:
+ response, status_code, mimetype = exc.response, exc.code, "text/plain"
+ except Exception as exc:
+ response, status_code, mimetype = "Unknown error", 500, "text/plain"
+ return response, status_code, mimetype
+
+
+def _get_or_generate_xer(raw_request):
+ """get or generate the transaction id"""
+ xer = raw_request.headers.get("x-onap-requestid", None)
+ if xer is None:
+ # some components are still using the old name
+ xer = raw_request.headers.get("x-ecomp-requestid", None)
+ if xer is None:
+ # the user did NOT supply a request id, generate one
+ xer = str(uuid.uuid4())
+ return xer
+
+
+def bind_all(service_component_name):
+ """
+ Get all the keys in Consul for this SCN, and bind the config
+ """
+ xer = _get_or_generate_xer(connexion.request)
+ bts = utc()
+ response, status_code, mimetype = _get_helper(client.resolve_all, service_component_name=service_component_name, raw_request=connexion.request, xer=xer)
+ audit(connexion.request, bts, xer, status_code, __name__, "called for component {0}".format(service_component_name))
+ # Even though some older components might be using the ecomp name, we return the proper one
+ return Response(response=response, status=status_code, mimetype=mimetype, headers={"x-onap-requestid": xer})
+
+
+def bind_config_for_scn(service_component_name):
+ """
+ Bind just the config for this SCN
+ """
+ xer = _get_or_generate_xer(connexion.request)
+ bts = utc()
+ response, status_code, mimetype = _get_helper(client.resolve, service_component_name=service_component_name, raw_request=connexion.request, xer=xer)
+ audit(connexion.request, bts, xer, status_code, __name__, "called for component {0}".format(service_component_name))
+ return Response(response=response, status=status_code, mimetype=mimetype, headers={"x-onap-requestid": xer})
+
+
+def get_key(key, service_component_name):
+ """
+ Get a single key k of the form service_component_name:k from Consul.
+ Should not be used and will return a BAD REQUEST for k=policies because it's a complex object
+ """
+ xer = _get_or_generate_xer(connexion.request)
+ bts = utc()
+ response, status_code, mimetype = _get_helper(client.get_key, key=key, service_component_name=service_component_name, raw_request=connexion.request, xer=xer)
+ audit(connexion.request, bts, xer, status_code, __name__, "called for component {0}".format(service_component_name))
+ return Response(response=response, status=status_code, mimetype=mimetype, headers={"x-onap-requestid": xer})
+
+
+def healthcheck():
+ """
+ CBS Healthcheck
+ """
+ xer = _get_or_generate_xer(connexion.request)
+ path = "v1/catalog/service/config_binding_service"
+ bts = utc()
+ res = requests.get("{0}/{1}".format(get_consul_uri(), path))
+ status = res.status_code
+ if status == 200:
+ msg = "CBS is alive and Consul connection OK"
+ else:
+ msg = "CBS is alive but cannot reach Consul"
+ # treating this as a WARN because this could be a temporary network glitch. Also per EELF guidelines this is a 200 ecode (availability)
+ error(connexion.request, xer, "WARN", 200, tgt_entity="Consul", tgt_path="/v1/catalog/service/config_binding_service", msg=msg)
+ metrics(connexion.request, bts, xer, "Consul", path, res.status_code, __name__, msg="Checking Consul connectivity during CBS healthcheck, {0}".format(msg))
+ audit(connexion.request, bts, xer, status, __name__, msg=msg)
+ return Response(response=msg, status=status)
diff --git a/app/app/config_binding_service/logging.py b/app/app/config_binding_service/logging.py
new file mode 100644
index 0000000..b6275a7
--- /dev/null
+++ b/app/app/config_binding_service/logging.py
@@ -0,0 +1,204 @@
+# ============LICENSE_START=======================================================
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+from logging import getLogger, Formatter
+from logging.handlers import RotatingFileHandler
+from os import makedirs
+import datetime
+
+
+_AUDIT_LOGGER = getLogger("defaultlogger")
+_ERROR_LOGGER = getLogger("defaultlogger")
+_METRICS_LOGGER = getLogger("defaultlogger")
+
+
+def _create_logger(name, logfile):
+ """
+ Create a RotatingFileHandler and a streamhandler for stdout
+ https://docs.python.org/3/library/logging.handlers.html
+ what's with the non-pythonic naming in these stdlib methods? Shameful.
+ """
+ logger = getLogger(name)
+ file_handler = RotatingFileHandler(logfile,
+ maxBytes=10000000, backupCount=2) # 10 meg with one backup..
+ formatter = Formatter('%(message)s')
+ file_handler.setFormatter(formatter)
+ logger.setLevel("DEBUG")
+ logger.addHandler(file_handler)
+ return logger
+
+
+def create_loggers():
+ """
+ Public method to set the global logger, launched from Run
+ This is *not* launched during unit testing, so unit tests do not create/write log files
+ """
+ makedirs("/opt/logs", exist_ok=True)
+
+ # create the audit log
+ aud_file = "/opt/logs/audit.log"
+ open(aud_file, 'a').close() # this is like "touch"
+ global _AUDIT_LOGGER
+ _AUDIT_LOGGER = _create_logger("config_binding_service_audit", aud_file)
+
+ # create the error log
+ err_file = "/opt/logs/error.log"
+ open(err_file, 'a').close() # this is like "touch"
+ global _ERROR_LOGGER
+ _ERROR_LOGGER = _create_logger("config_binding_service_error", err_file)
+
+ # create the metrics log
+ met_file = "/opt/logs/metrics.log"
+ open(met_file, 'a').close() # this is like "touch"
+ global _METRICS_LOGGER
+ _METRICS_LOGGER = _create_logger("config_binding_service_metrics", met_file)
+
+
+def utc():
+ """gets current time in utc"""
+ return datetime.datetime.utcnow()
+
+
+def audit(raw_request, bts, xer, rcode, calling_mod, msg="n/a"):
+ """
+ write an EELF audit record per https://wiki.onap.org/download/attachments/1015849/ONAP%20application%20logging%20guidelines.pdf?api=v2
+ %The audit fields implemented:
+
+ 1 BeginTimestamp Implemented (bts)
+ 2 EndTimestamp Auto Injected when this is called
+ 3 RequestID Implemented (xer)
+ 5 threadId n/a
+ 7 serviceName Implemented (from Req)
+ 9 StatusCode Auto injected based on rcode
+ 10 ResponseCode Implemented (rcode)
+ 13 Category log level - all audit records are INFO.
+ 15 Server IP address Implemented (from Req)
+ 16 ElapsedTime Auto Injected (milliseconds)
+ 17 Server This is running in a Docker container so this is not applicable, my HOSTNAME is always "config_binding_service"
+ 18 ClientIPaddress Implemented (from Req)
+ 19 class name Implemented (mod), though docs say OOP, I am using the python module here
+ 20 Unused ...implemented....
+ 21-25 Custom n/a
+ 26 detailMessage Implemented (msg)
+
+ Not implemented
+ 4 serviceInstanceID - ?
+ 6 physical/virtual server name (Optional)
+ 8 PartnerName - nothing in the request tells me this
+ 11 Response Description - the CBS follows standard HTTP error codes so look them up
+ 12 instanceUUID - Optional
+ 14 Severity (Optional)
+ """
+ ets = utc()
+
+ _AUDIT_LOGGER.info("{bts}|{ets}|{xer}||n/a||{path}||{status}|{rcode}|||INFO||{servip}|{et}|config_binding_service|{clientip}|{calling_mod}|||||||{msg}".format(
+ bts=bts.isoformat(),
+ ets=ets.isoformat(),
+ xer=xer, rcode=rcode,
+ path=raw_request.path.split("/")[1],
+ status="COMPLETE" if rcode < 400 else "ERROR",
+ servip=raw_request.host.split(":")[0],
+ et=int((ets - bts).microseconds / 1000), # supposed to be in milleseconds
+ clientip=raw_request.remote_addr,
+ calling_mod=calling_mod, msg=msg
+ ))
+
+
+def error(raw_request, xer, severity, ecode, tgt_entity="n/a", tgt_path="n/a", msg="n/a", adv_msg="n/a"):
+ """
+ write an EELF error record per
+ the error fields implemented:
+
+ 1 Timestamp Auto Injected when this is called
+ 2 RequestID Implemented (xer)
+ 3 ThreadID n/a
+ 4 ServiceName Implemented (from Req)
+ 6 TargetEntity Implemented (tgt_entity)
+ 7 TargetServiceName Implemented (tgt_path)/
+ 8 ErrorCategory Implemented (severity)
+ 9. ErrorCode Implemented (ecode)
+ 10 ErrorDescription Implemented (msg)
+ 11. detailMessage Implemented (adv_msg)
+
+ Not implemented:
+ 5 PartnerName - nothing in the request tells me this
+ """
+ ets = utc()
+
+ _ERROR_LOGGER.error("{ets}|{xer}|n/a|{path}||{tge}|{tgp}|{sev}|{ecode}|{msg}|{amsg}".format(
+ ets=ets,
+ xer=xer,
+ path=raw_request.path.split("/")[1],
+ tge=tgt_entity,
+ tgp=tgt_path,
+ sev=severity,
+ ecode=ecode,
+ msg=msg,
+ amsg=adv_msg))
+
+
+def metrics(raw_request, bts, xer, target, target_path, rcode, calling_mod, msg="n/a"):
+ """
+ write an EELF metrics record per https://wiki.onap.org/download/attachments/1015849/ONAP%20application%20logging%20guidelines.pdf?api=v2
+ %The metrics fields implemented:
+
+ 1 BeginTimestamp Implemented (bts)
+ 2 EndTimestamp Auto Injected when this is called
+ 3 RequestID Implemented (xer)
+ 5 threadId n/a
+ 7 serviceName Implemented (from Req)
+ 9 TargetEntity Implemented (target)
+ 10 TargetServiceName Implemented (target_path)
+ 11 StatusCode Implemented (based on rcode)
+ 12 Response Code Implemented (rcode)
+ 15 Category log level all metrics records are INFO.
+ 17 Server IP address Implemented (from Req)
+ 18 ElapsedTime Auto Injected (milliseconds)
+ 19 Server This is running in a Docker container so this is not applicable, my HOSTNAME is always "config_binding_service"
+ 20 ClientIPaddress Implemented (from Req)
+ 21 class name Implemented (mod), though docs say OOP, I am using the python module here
+ 22 Unused ...implemented....
+ 24 TargetVirtualEntity n/a
+ 25-28 Custom n/a
+ 29 detailMessage Implemented (msg)
+
+ Not implemented
+ 4 serviceInstanceID - ?
+ 6 physical/virtual server name (Optional)
+ 8 PartnerName - nothing in the request tells me this
+ 13 Response Description - the CBS follows standard HTTP error codes so look them up
+ 14 instanceUUID - Optional
+ 16 Severity (Optional)
+ 23 ProcessKey - optional
+ """
+ ets = utc()
+
+ _METRICS_LOGGER.info("{bts}|{ets}|{xer}||n/a||{path}||{tge}|{tgp}|{status}|{rcode}|||INFO||{servip}|{et}|config_binding_service|{clientip}|{calling_mod}|||n/a|||||{msg}".format(
+ bts=bts.isoformat(),
+ ets=ets.isoformat(),
+ xer=xer,
+ path=raw_request.path.split("/")[1],
+ tge=target,
+ tgp=target_path,
+ status="COMPLETE" if rcode < 400 else "ERROR",
+ rcode=rcode,
+ servip=raw_request.host.split(":")[0],
+ et=int((ets - bts).microseconds / 1000), # supposed to be in milleseconds
+ clientip=raw_request.remote_addr,
+ calling_mod=calling_mod, msg=msg
+ ))
diff --git a/app/app/main.py b/app/app/main.py
new file mode 100755
index 0000000..dd71ccf
--- /dev/null
+++ b/app/app/main.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+
+# ============LICENSE_START=======================================================
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+import connexion
+from config_binding_service.logging import create_loggers
+
+# Entrypoint When in uwsgi
+# This create logger call used to be in the main block, but when moving to NGINX+uwsgi, this had to change. See https://hub.docker.com/r/tiangolo/uwsgi-nginx-flask/
+create_loggers()
+app = connexion.App(__name__, specification_dir='.')
+app.add_api('swagger.yaml', arguments={'title': 'Config Binding Service'})
+
+if __name__ == "__main__":
+ # Only for debugging while developing
+ app.run(host='0.0.0.0', port=10000, debug=True)
diff --git a/app/app/pom.xml b/app/app/pom.xml
new file mode 100644
index 0000000..806098d
--- /dev/null
+++ b/app/app/pom.xml
@@ -0,0 +1,270 @@
+<?xml version="1.0"?>
+<!--
+================================================================================
+Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
+================================================================================
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+============LICENSE_END=========================================================
+
+ECOMP is a trademark and service mark of AT&T Intellectual Property.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.onap.dcaegen2.platform.configbinding</groupId>
+ <artifactId>app</artifactId>
+ <version>2.2.3-SNAPSHOT</version>
+ </parent>
+ <!--- CHANGE THE FOLLOWING 3 OBJECTS for your own repo -->
+ <groupId>org.onap.dcaegen2.platform.configbinding</groupId>
+ <artifactId>app-app</artifactId>
+ <name>dcaegen2-platform-configbinding-app-app</name>
+ <version>2.2.3-SNAPSHOT</version>
+ <url>http://maven.apache.org</url>
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <sonar.sources>.</sonar.sources>
+ <sonar.junit.reportsPath>xunit-results.xml</sonar.junit.reportsPath>
+ <sonar.python.coverage.reportPath>coverage.xml</sonar.python.coverage.reportPath>
+ <sonar.python.xunit.reportPath>xunit-results.xml</sonar.python.xunit.reportPath>
+ <sonar.language>py</sonar.language>
+ <sonar.pluginname>python</sonar.pluginname>
+ <sonar.inclusions>config_binding_service/*.py</sonar.inclusions>
+ <sonar.exclusions>tests/*,setup.py</sonar.exclusions>
+ </properties>
+ <build>
+ <finalName>${project.artifactId}-${project.version}</finalName>
+ <pluginManagement>
+ <plugins>
+ <!-- the following plugins are invoked from oparent, we do not need them -->
+ <plugin>
+ <groupId>org.sonatype.plugins</groupId>
+ <artifactId>nexus-staging-maven-plugin</artifactId>
+ <version>1.6.7</version>
+ <configuration>
+ <skipNexusStagingDeployMojo>true</skipNexusStagingDeployMojo>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-deploy-plugin</artifactId>
+ <!-- This version supports the "deployAtEnd" parameter -->
+ <version>2.8</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-enforcer-plugin</artifactId>
+ <version>3.0.0-M1</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ <!-- first disable the default Java plugins at various stages -->
+ <!-- maven-resources-plugin is called during "*resource" phases by default behavior. it prepares the resources
+ dir. we do not need it -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ <version>2.6</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ <!-- maven-compiler-plugin is called during "compile" phases by default behavior. we do not need it -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>3.1</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ <!-- maven-jar-plugin is called during "compile" phase by default behavior. we do not need it -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.4</version>
+ <executions>
+ <execution>
+ <id>default-jar</id>
+ <phase/>
+ </execution>
+ </executions>
+ </plugin>
+ <!-- maven-install-plugin is called during "install" phase by default behavior. it tries to copy stuff under
+ target dir to ~/.m2. we do not need it -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-install-plugin</artifactId>
+ <version>2.4</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ <!-- maven-surefire-plugin is called during "test" phase by default behavior. it triggers junit test.
+ we do not need it -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <version>2.12.4</version>
+ <configuration>
+ <skipTests>true</skipTests>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>1.2.1</version>
+ <configuration>
+ <executable>${session.executionRootDirectory}/mvn-phase-script.sh</executable>
+ <environmentVariables>
+ <!-- make mvn properties as env for our script -->
+ <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID>
+ <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID>
+ <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION>
+ <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY>
+ <MVN_RAWREPO_BASEURL_UPLOAD>${onap.nexus.rawrepo.baseurl.upload}</MVN_RAWREPO_BASEURL_UPLOAD>
+ <MVN_RAWREPO_BASEURL_DOWNLOAD>${onap.nexus.rawrepo.baseurl.download}</MVN_RAWREPO_BASEURL_DOWNLOAD>
+ <MVN_RAWREPO_SERVERID>${onap.nexus.rawrepo.serverid}</MVN_RAWREPO_SERVERID>
+ <MVN_DOCKERREGISTRY_DAILY>${onap.nexus.dockerregistry.daily}</MVN_DOCKERREGISTRY_DAILY>
+ <MVN_DOCKERREGISTRY_RELEASE>${onap.nexus.dockerregistry.release}</MVN_DOCKERREGISTRY_RELEASE>
+ </environmentVariables>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ <plugins>
+ <!-- plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.4.1</version>
+ <configuration>
+ <descriptors>
+ <descriptor>assembly/dep.xml</descriptor>
+ </descriptors>
+ </configuration>
+ <executions>
+ <execution>
+ <id>make-assembly</id>
+ <phase>package</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin -->
+ <!-- now we configure custom action (calling a script) at various lifecycle phases -->
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>1.2.1</version>
+ <executions>
+ <execution>
+ <id>clean phase script</id>
+ <phase>clean</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <arguments>
+ <argument>__</argument>
+ <argument>clean</argument>
+ </arguments>
+ </configuration>
+ </execution>
+ <execution>
+ <id>generate-sources script</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <arguments>
+ <argument>__</argument>
+ <argument>generate-sources</argument>
+ </arguments>
+ </configuration>
+ </execution>
+ <execution>
+ <id>compile script</id>
+ <phase>compile</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <arguments>
+ <argument>__</argument>
+ <argument>compile</argument>
+ </arguments>
+ </configuration>
+ </execution>
+ <execution>
+ <id>package script</id>
+ <phase>package</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <arguments>
+ <argument>__</argument>
+ <argument>package</argument>
+ </arguments>
+ </configuration>
+ </execution>
+ <execution>
+ <id>test script</id>
+ <phase>test</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <arguments>
+ <argument>__</argument>
+ <argument>test</argument>
+ </arguments>
+ </configuration>
+ </execution>
+ <execution>
+ <id>install script</id>
+ <phase>install</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <arguments>
+ <argument>__</argument>
+ <argument>install</argument>
+ </arguments>
+ </configuration>
+ </execution>
+ <execution>
+ <id>deploy script</id>
+ <phase>deploy</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <arguments>
+ <argument>__</argument>
+ <argument>deploy</argument>
+ </arguments>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
diff --git a/setup.py b/app/app/setup.py
index cc28a43..237f7c4 100644
--- a/setup.py
+++ b/app/app/setup.py
@@ -1,14 +1,12 @@
# ============LICENSE_START=======================================================
-# org.onap.dcae
-# ================================================================================
-# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,24 +15,17 @@
# ============LICENSE_END=========================================================
#
# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-import os
-from setuptools import setup, find_packages
-from pip.req import parse_requirements
-from pip.download import PipSession
-import pip
-pip.main(['install','-r','requirements.txt'])
+from setuptools import setup, find_packages
setup(
name='config_binding_service',
- version='1.2.0',
- packages=find_packages(),
- author = "Tommy Carpenter",
- author_email = "tommy at research dot a t t dot com",
+ version='2.2.3',
+ packages=find_packages(exclude=["tests.*", "tests"]),
+ author="Tommy Carpenter",
+ author_email="tommy@research.att.com",
description='Service to fetch and bind configurations',
- license = "",
- keywords = "",
- url = "ONAP URL TBD",
- zip_safe=False,
- scripts = ["bin/run.py"]
+ url="https://gerrit.onap.org/r/#/admin/projects/dcaegen2/platform/configbinding",
+ install_requires=["requests", "Flask", "connexion", "six"],
+ include_package_data=True
)
diff --git a/app/app/swagger.yaml b/app/app/swagger.yaml
new file mode 100644
index 0000000..cfe0944
--- /dev/null
+++ b/app/app/swagger.yaml
@@ -0,0 +1,100 @@
+# ============LICENSE_START=======================================================
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+
+---
+swagger: "2.0"
+info:
+ version: "2.1.0"
+ title: "Config Binding Service"
+paths:
+ /service_component/{service_component_name}:
+ parameters:
+ - name: "service_component_name"
+ in: "path"
+ description: "Service Component Name. service_component_name must be a key in consul."
+ required: true
+ type: "string"
+ get:
+ description: "Binds the configuration for service_component_name and returns the bound configuration as a JSON"
+ operationId: "config_binding_service.controller.bind_config_for_scn"
+ responses:
+ 200:
+ description: OK; the bound config is returned as an object
+ schema:
+ type: object
+ 404:
+ description: there is no configuration in Consul for this component
+
+ /service_component_all/{service_component_name}:
+ parameters:
+ - name: "service_component_name"
+ in: "path"
+ description: "Service Component Name. service_component_name must be a key in consul."
+ required: true
+ type: "string"
+ get:
+ description: "Binds the configuration for service_component_name and returns the bound configuration, policies, and any other keys that are in Consul"
+ operationId: "config_binding_service.controller.bind_all"
+ responses:
+ 200:
+ description: "OK; returns {config : ..., policies : ....., k : ...} for all other k in Consul"
+ schema:
+ type: object
+ 404:
+ description: there is no configuration in Consul for this component
+
+ /{key}/{service_component_name}:
+ parameters:
+ - name: "key"
+ in: "path"
+ description: "this endpoint tries to pull service_component_name:key; key is the key after the colon"
+ required: true
+ type: "string"
+ - name: "service_component_name"
+ in: "path"
+ description: "Service Component Name."
+ required: true
+ type: "string"
+ get:
+ description: "this is an endpoint that fetches a generic service_component_name:key out of Consul. The idea is that we don't want to tie components to Consul directly in case we swap out the backend some day, so the CBS abstracts Consul from clients. The structuring and weird collision of this new API with the above is unfortunate but due to legacy concerns."
+ operationId: "config_binding_service.controller.get_key"
+ responses:
+ 200:
+ description: "OK; returns service_component_name:key"
+ schema:
+ type: object
+ 404:
+ description: "key does not exist"
+ schema:
+ type: string
+ 400:
+ description: "bad request. Currently this is only returned on :policies, which is a complex object, and should be gotten through service_component_all"
+ schema:
+ type: string
+
+ /healthcheck:
+ get:
+ description: "This is the health check endpoint. If this returns a 200, the server is alive and consul can be reached. If not a 200, either dead, or no connection to consul"
+ operationId: "config_binding_service.controller.healthcheck"
+ parameters: []
+ responses:
+ 200:
+ description: Successful response
+ 503:
+ description: the config binding service cannot reach Consul
diff --git a/app/app/tests/__init__.py b/app/app/tests/__init__.py
new file mode 100644
index 0000000..1875bf6
--- /dev/null
+++ b/app/app/tests/__init__.py
@@ -0,0 +1,21 @@
+# ================================================================================
+# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+
+# empty __init__.py so that pytest can add correct path to coverage report, -- per pytest
+# best practice guideline
diff --git a/app/app/tests/conftest.py b/app/app/tests/conftest.py
new file mode 100644
index 0000000..c8f2a06
--- /dev/null
+++ b/app/app/tests/conftest.py
@@ -0,0 +1,70 @@
+import pytest
+from requests.exceptions import HTTPError
+from config_binding_service import get_consul_uri
+
+
+class FakeResponse():
+ def __init__(self, status_code, text):
+ self.text = text
+ self.status_code = status_code
+
+ def raise_for_status(self):
+ if self.status_code >= 400:
+ raise HTTPError(response=FakeResponse(404, ""))
+
+
+@pytest.fixture
+def expected_config():
+ return {"deep": {"ALL YOUR SERVICE BELONG TO US": ['6.6.6.6:666', '7.7.7.7:777', '5.5.5.5:555', '5.5.5.5:555']},
+ "doubledeep": {"sodeep": {"hello": "darkness"}}}
+
+
+@pytest.fixture
+def monkeyed_get_connection_info_from_consul():
+ def _monkeyed_get_connection_info_from_consul(service_component_name):
+ # shared monkeypatch. probably somewhat lazy because the function htis patches can be broken up.
+ if service_component_name == "cdap":
+ return '666.666.666.666:666'
+ elif service_component_name == "testing_bravo.somedomain.com":
+ return '7.7.7.7:777'
+ elif service_component_name == "testing_alpha.somedomain.com":
+ return '6.6.6.6:666'
+ elif service_component_name == "testing_charlie.somedomain.com":
+ return '5.5.5.5:555'
+ elif service_component_name == "nonexistent_hope":
+ return None # the real function returns None here
+ elif service_component_name == "cdap_serv.dcae.ecomp.somedomain.com":
+ broker_ip = '1.1.1.1'
+ broker_port = 444
+ return "http://{0}:{1}/application/{2}".format(broker_ip, broker_port, service_component_name)
+ return _monkeyed_get_connection_info_from_consul
+
+
+@pytest.fixture
+def monkeyed_requests_get():
+ def _monkeyed_requests_get(url):
+ if url == "{0}/v1/kv/test_service_component_name.unknown.unknown.unknown.dcae.onap.org:dti".format(get_consul_uri()):
+ return FakeResponse(status_code=200, text='[{"LockIndex":0,"Key":"test_service_component_name.unknown.unknown.unknown.dcae.onap.org:dti","Flags":0,"Value": "eyJteSIgOiAiZHRpIn0=","CreateIndex":4066524,"ModifyIndex":4066524}]')
+ else:
+ return FakeResponse(status_code=404, text="")
+ return _monkeyed_requests_get
+
+
+@pytest.fixture
+def monkeyed_requests_put():
+ def _monkeyed_requests_put(url, json):
+ if url == "{0}/v1/txn".format(get_consul_uri()):
+ key = json[0]["KV"]["Key"]
+ if key == "test_service_component_name.unknown.unknown.unknown.dcae.onap.org":
+ return FakeResponse(status_code=200, text='{"Results":[{"KV":{"LockIndex":0,"Key":"test_service_component_name.unknown.unknown.unknown.dcae.onap.org","Flags":0,"Value":"eyJteSIgOiAiYW1hemluZyBjb25maWcifQ==","CreateIndex":4051555,"ModifyIndex":4051555}},{"KV":{"LockIndex":0,"Key":"test_service_component_name.unknown.unknown.unknown.dcae.onap.org:dmaap","Flags":0,"Value":"eyJmb28iIDogImJhciJ9","CreateIndex":4051571,"ModifyIndex":4051571}},{"KV":{"LockIndex":0,"Key":"test_service_component_name.unknown.unknown.unknown.dcae.onap.org:dti","Flags":0,"Value":"eyJteSIgOiAiZHRpIn0=","CreateIndex":4066524,"ModifyIndex":4066524}},{"KV":{"LockIndex":0,"Key":"test_service_component_name.unknown.unknown.unknown.dcae.onap.org:policies/event","Flags":0,"Value":"eyJhY3Rpb24iOiAiZ2F0aGVyZWQiLCAidGltZXN0YW1wIjogIjIwMTgtMDItMTkgMTU6MzY6NDQuODc3MzgwIiwgInVwZGF0ZV9pZCI6ICJiYjczYzIwYS01ZmY4LTQ1MGYtODIyMy1kYTY3MjBhZGUyNjciLCAicG9saWNpZXNfY291bnQiOiAyfQ==","CreateIndex":4048564,"ModifyIndex":4048564}},{"KV":{"LockIndex":0,"Key":"test_service_component_name.unknown.unknown.unknown.dcae.onap.org:policies/items/DCAE_alex.Config_MS_alex_microservice","Flags":0,"Value":"eyJwb2xpY3lOYW1lIjogIkRDQUVfYWxleC5Db25maWdfTVNfYWxleF9taWNyb3NlcnZpY2UuMTMyLnhtbCIsICJwb2xpY3lDb25maWdNZXNzYWdlIjogIkNvbmZpZyBSZXRyaWV2ZWQhICIsICJyZXNwb25zZUF0dHJpYnV0ZXMiOiB7fSwgInBvbGljeUNvbmZpZ1N0YXR1cyI6ICJDT05GSUdfUkVUUklFVkVEIiwgIm1hdGNoaW5nQ29uZGl0aW9ucyI6IHsiT05BUE5hbWUiOiAiRENBRSIsICJOYW1lIjogIkRDQUUiLCAiQ29uZmlnTmFtZSI6ICJhbGV4X2NvbmZpZ19uYW1lIn0sICJjb25maWciOiB7InBvbGljeVNjb3BlIjogImFsZXhfcG9saWN5X3Njb3BlIiwgImNvbmZpZ05hbWUiOiAiYWxleF9jb25maWdfbmFtZSIsICJkZXNjcmlwdGlvbiI6ICJ0ZXN0IERDQUUgcG9saWN5LWhhbmRsZXIiLCAic2VydmljZSI6ICJhbGV4X3NlcnZpY2UiLCAicG9saWN5TmFtZSI6ICJhbGV4X3BvbGljeV9uYW1lIiwgInJpc2tMZXZlbCI6ICIzIiwgImtleTEiOiAidmFsdWUxIiwgInBvbGljeV9oZWxsbyI6ICJ3b3JsZCEiLCAiY29udGVudCI6IHsiZm9vIjogIm1pY3Jvc2VydmljZTMiLCAiZm9vX3VwZGF0ZWQiOiAiMjAxOC0wMS0zMFQxMzoyNTozMy4yMjJaIn0sICJyaXNrVHlwZSI6ICIxNzEyX0VURSIsICJndWFyZCI6ICJGYWxzZSIsICJ2ZXJzaW9uIjogIjAuMC4xIiwgImxvY2F0aW9uIjogIkNlbnRyYWwiLCAicG9saWN5X3VwZGF0ZWRfdHMiOiAiMjAxOC0wMi0xOVQxNTowOTo1NS4yMTdaIiwgInVwZGF0ZWRfcG9saWN5X2lkIjogIkRDQUVfYWxleC5Db25maWdfTVNfYWxleF9taWNyb3NlcnZpY2UiLCAicG9saWN5X3VwZGF0ZWRfdG9fdmVyIjogIjEzMiIsICJwcmlvcml0eSI6ICI0IiwgInBvbGljeV91cGRhdGVkX2Zyb21fdmVyIjogIjEzMSIsICJ0ZW1wbGF0ZVZlcnNpb24iOiAiMiIsICJ1dWlkIjogIjVlODdkN2M1LTBkYWYtNGI2Yi1hYjkyLTUzNjVjZjVkYjFlZiJ9LCAicHJvcGVydHkiOiBudWxsLCAidHlwZSI6ICJKU09OIiwgInBvbGljeVZlcnNpb24iOiAiMTMyIn0=","CreateIndex":4048564,"ModifyIndex":4065574}},{"KV":{"LockIndex":0,"Key":"test_service_component_name.unknown.unknown.unknown.dcae.onap.org:policies/items/DCAE_alex.Config_db_client_policy_id_value","Flags":0,"Value":"eyJwb2xpY3lOYW1lIjogIkRDQUVfYWxleC5Db25maWdfZGJfY2xpZW50X3BvbGljeV9pZF92YWx1ZS4xMzMueG1sIiwgInBvbGljeUNvbmZpZ01lc3NhZ2UiOiAiQ29uZmlnIFJldHJpZXZlZCEgIiwgInJlc3BvbnNlQXR0cmlidXRlcyI6IHt9LCAicG9saWN5Q29uZmlnU3RhdHVzIjogIkNPTkZJR19SRVRSSUVWRUQiLCAibWF0Y2hpbmdDb25kaXRpb25zIjogeyJPTkFQTmFtZSI6ICJEQ0FFIiwgIk5hbWUiOiAiRENBRSIsICJDb25maWdOYW1lIjogImFsZXhfY29uZmlnX25hbWUifSwgImNvbmZpZyI6IHsiZGJfY2xpZW50X3RzIjogIjIwMTctMTEtMjFUMTI6MTI6MTMuNjk2WiIsICJkYl9jbGllbnQiOiAiaXBzdW0iLCAicG9saWN5X2hlbGxvIjogIndvcmxkISIsICJwb2xpY3lfdXBkYXRlZF9mcm9tX3ZlciI6ICIxMzIiLCAidXBkYXRlZF9wb2xpY3lfaWQiOiAiRENBRV9hbGV4LkNvbmZpZ19kYl9jbGllbnRfcG9saWN5X2lkX3ZhbHVlIiwgInBvbGljeV91cGRhdGVkX3RzIjogIjIwMTgtMDItMTlUMTU6MDk6NTUuODEyWiIsICJwb2xpY3lfdXBkYXRlZF90b192ZXIiOiAiMTMzIn0sICJwcm9wZXJ0eSI6IG51bGwsICJ0eXBlIjogIkpTT04iLCAicG9saWN5VmVyc2lvbiI6ICIxMzMifQ==","CreateIndex":4048564,"ModifyIndex":4065570}},{"KV":{"LockIndex":0,"Key":"test_service_component_name.unknown.unknown.unknown.dcae.onap.org:rels","Flags":0,"Value":"WyJteS5hbWF6aW5nLnJlbGF0aW9uc2hpcCJd","CreateIndex":4051567,"ModifyIndex":4051567}}],"Errors":null,"Index":0,"LastContact":0,"KnownLeader":true}')
+ elif key == "scn_exists":
+ return FakeResponse(status_code=200, text='{"Results":[{"KV":{"LockIndex":0,"Key":"scn_exists","Flags":0,"Value":"eyJmb28zIiA6ICJiYXIzIn0=","CreateIndex":4067403,"ModifyIndex":4067403}},{"KV":{"LockIndex":0,"Key":"scn_exists:dmaap","Flags":0,"Value":"eyJmb280IiA6ICJiYXI0In0=","CreateIndex":4067410,"ModifyIndex":4067410}},{"KV":{"LockIndex":0,"Key":"scn_exists:rels","Flags":0,"Value":"WyJmb28iXQ==","CreateIndex":4067406,"ModifyIndex":4067406}},{"KV":{"LockIndex":0,"Key":"scn_exists_nord","Flags":0,"Value":"eyJmb281IiA6ICJiYXI1In0=","CreateIndex":4067340,"ModifyIndex":4067340}}],"Errors":null,"Index":0,"LastContact":0,"KnownLeader":true}')
+ elif key == "scn_exists_nord":
+ return FakeResponse(status_code=200, text='{"Results":[{"KV":{"LockIndex":0,"Key":"scn_exists_nord","Flags":0,"Value":"eyJmb281IiA6ICJiYXI1In0=","CreateIndex":4067340,"ModifyIndex":4067340}}],"Errors":null,"Index":0,"LastContact":0,"KnownLeader":true}')
+ elif key == "test_resolve_scn":
+ return FakeResponse(status_code=200, text='{"Results":[{"KV":{"LockIndex":0,"Key":"test_resolve_scn","Flags":0,"Value":"ewogICAgICAgICAgICAgICAgImRlZXAiIDogewogICAgICAgICAgICAgICAgICAgICJBTEwgWU9VUiBTRVJWSUNFIEJFTE9ORyBUTyBVUyIgOiAie3thbHBoYSxicmF2byxjaGFybGllfX0ifSwKICAgICAgICAgICAgICAgICJkb3VibGVkZWVwIiA6ICB7CiAgICAgICAgICAgICAgICAgICAgInNvZGVlcCIgOiB7ImhlbGxvIiA6ICI8PFdITz8+PiJ9fQogICAgICAgICAgICAgfQo=","CreateIndex":4068002,"ModifyIndex":4068002}},{"KV":{"LockIndex":0,"Key":"test_resolve_scn:dmaap","Flags":0,"Value":"eyJXSE8/IiA6ICJkYXJrbmVzcyJ9","CreateIndex":4068013,"ModifyIndex":4068013}},{"KV":{"LockIndex":0,"Key":"test_resolve_scn:rels","Flags":0,"Value":"WyJ0ZXN0aW5nX2FscGhhLnNvbWVkb21haW4uY29tIiwgInRlc3RpbmdfYnJhdm8uc29tZWRvbWFpbi5jb20iLCAidGVzdGluZ19jaGFybGllLnNvbWVkb21haW4uY29tIiwgInRlc3RpbmdfY2hhcmxpZS5zb21lZG9tYWluLmNvbSJd","CreateIndex":4068010,"ModifyIndex":4068010}}],"Errors":null,"Index":0,"LastContact":0,"KnownLeader":true}')
+ elif key == "cbs_test_messed_up":
+ return FakeResponse(status_code=200, text='{"Results":[{"KV":{"LockIndex":0,"Key":"cbs_test_messed_up","Flags":0,"Value":"eyJmb28iIDogImJhciJ9","CreateIndex":4864032,"ModifyIndex":4864052}},{"KV":{"LockIndex":0,"Key":"cbs_test_messed_up:badkey","Flags":0,"Value":"eyJub3QgYSBqc29ubm5uIFJFS1RUVFQ=","CreateIndex":4864075,"ModifyIndex":4864075}}],"Errors":null,"Index":0,"LastContact":0,"KnownLeader":true}')
+ elif key == "scn_NOTexists":
+ return FakeResponse(status_code=404, text="")
+ return _monkeyed_requests_put
diff --git a/app/app/tests/test_client.py b/app/app/tests/test_client.py
new file mode 100644
index 0000000..cd3287c
--- /dev/null
+++ b/app/app/tests/test_client.py
@@ -0,0 +1,168 @@
+from config_binding_service import client
+
+
+# pytest doesnt support objects in conftest
+class FakeReq(object):
+ """used to fake the logging params"""
+ def __init__(self):
+ self.path = "/unittest in {0}".format(__name__)
+ self.host = "localhost"
+ self.remote_addr = "6.6.6.6"
+
+
+def test_consul_get_all_as_transaction(monkeypatch, monkeyed_requests_put):
+ """tests _consul_get_all_as_transaction"""
+ monkeypatch.setattr('requests.put', monkeyed_requests_put)
+ allk = client._consul_get_all_as_transaction(
+ "test_service_component_name.unknown.unknown.unknown.dcae.onap.org", FakeReq(), "unit test xer")
+ assert allk == {
+ 'test_service_component_name.unknown.unknown.unknown.dcae.onap.org': {'my': 'amazing config'},
+ 'test_service_component_name.unknown.unknown.unknown.dcae.onap.org:dti': {'my': 'dti'},
+ 'test_service_component_name.unknown.unknown.unknown.dcae.onap.org:dmaap': {'foo': 'bar'},
+ 'test_service_component_name.unknown.unknown.unknown.dcae.onap.org:policies/event': {'action': 'gathered', 'timestamp': '2018-02-19 15:36:44.877380', 'update_id': 'bb73c20a-5ff8-450f-8223-da6720ade267', 'policies_count': 2},
+ 'test_service_component_name.unknown.unknown.unknown.dcae.onap.org:policies/items/DCAE_alex.Config_MS_alex_microservice': {'policyName': 'DCAE_alex.Config_MS_alex_microservice.132.xml', 'policyConfigMessage': 'Config Retrieved! ', 'responseAttributes': {}, 'policyConfigStatus': 'CONFIG_RETRIEVED', 'matchingConditions': {'ONAPName': 'DCAE', 'Name': 'DCAE', 'ConfigName': 'alex_config_name'}, 'config': {'policyScope': 'alex_policy_scope', 'configName': 'alex_config_name', 'description': 'test DCAE policy-handler', 'service': 'alex_service', 'policyName': 'alex_policy_name', 'riskLevel': '3', 'key1': 'value1', 'policy_hello': 'world!', 'content': {'foo': 'microservice3', 'foo_updated': '2018-01-30T13:25:33.222Z'}, 'riskType': '1712_ETE', 'guard': 'False', 'version': '0.0.1', 'location': 'Central', 'policy_updated_ts': '2018-02-19T15:09:55.217Z', 'updated_policy_id': 'DCAE_alex.Config_MS_alex_microservice', 'policy_updated_to_ver': '132', 'priority': '4', 'policy_updated_from_ver': '131', 'templateVersion': '2', 'uuid': '5e87d7c5-0daf-4b6b-ab92-5365cf5db1ef'}, 'property': None, 'type': 'JSON', 'policyVersion': '132'},
+ 'test_service_component_name.unknown.unknown.unknown.dcae.onap.org:policies/items/DCAE_alex.Config_db_client_policy_id_value': {'policyName': 'DCAE_alex.Config_db_client_policy_id_value.133.xml', 'policyConfigMessage': 'Config Retrieved! ', 'responseAttributes': {}, 'policyConfigStatus': 'CONFIG_RETRIEVED', 'matchingConditions': {'ONAPName': 'DCAE', 'Name': 'DCAE', 'ConfigName': 'alex_config_name'}, 'config': {'db_client_ts': '2017-11-21T12:12:13.696Z', 'db_client': 'ipsum', 'policy_hello': 'world!', 'policy_updated_from_ver': '132', 'updated_policy_id': 'DCAE_alex.Config_db_client_policy_id_value', 'policy_updated_ts': '2018-02-19T15:09:55.812Z', 'policy_updated_to_ver': '133'}, 'property': None, 'type': 'JSON', 'policyVersion': '133'},
+ 'test_service_component_name.unknown.unknown.unknown.dcae.onap.org:rels': ['my.amazing.relationship']
+ }
+
+ allk = client._consul_get_all_as_transaction("cbs_test_messed_up", FakeReq(), "unit test xer")
+ assert allk == {'cbs_test_messed_up': {'foo': 'bar'},
+ 'cbs_test_messed_up:badkey': 'INVALID JSON'}
+
+
+def test_get_config_rels_dmaap(monkeypatch, monkeyed_requests_put):
+ monkeypatch.setattr('requests.put', monkeyed_requests_put)
+ assert ({"foo3": "bar3"}, ["foo"], {"foo4": "bar4"}) == client._get_config_rels_dmaap("scn_exists", FakeReq(), "unit test xer")
+ assert ({"foo5": "bar5"}, [], {}) == client._get_config_rels_dmaap("scn_exists_nord", FakeReq(), "unit test xer")
+
+
+def test_bad_config_http():
+ test_config = {'yeahhhhh': "{{}}"}
+ test_rels = ["testing_bravo.somedomain.com"]
+ assert {'yeahhhhh': []} == client.resolve_override(test_config, test_rels)
+
+
+def test_bad_config_dmaap():
+ test_config = {'darkness': "<<>>"}
+ test_dmaap = {"WHO?": "darkness"}
+ assert {'darkness': {}} == client.resolve_override(test_config, test_dmaap)
+
+
+def test_config_with_list(monkeypatch, monkeyed_get_connection_info_from_consul):
+ monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul',
+ monkeyed_get_connection_info_from_consul)
+ test_config_1 = {"dcae_target_type": [
+ "vhss-ems", "pcrf-oam"], "downstream-laika": "{{ laika }}", "some-param": "Lorem ipsum dolor sit amet"}
+ test_rels_1 = ["3df5292249ae4a949f173063617cea8d_docker-snmp-polling-firstnet-m"]
+ test_bind_1 = client.resolve_override(test_config_1, test_rels_1, {})
+ assert(test_bind_1 == {'dcae_target_type': [
+ 'vhss-ems', 'pcrf-oam'], 'downstream-laika': [], 'some-param': 'Lorem ipsum dolor sit amet'})
+
+ test_config_2 = {"foo": ["{{cdap}}", "notouching", "<<yo>>"]}
+ test_rels_2 = ["cdap"]
+ test_dmaap_2 = {"yo": "im here"}
+ test_bind_2 = client.resolve_override(test_config_2, test_rels_2, test_dmaap_2)
+ assert(test_bind_2 == {"foo": [['666.666.666.666:666'], "notouching", "im here"]})
+
+
+def test_cdap(monkeypatch, monkeyed_get_connection_info_from_consul):
+ # user override to test CDAP functionality
+ monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul',
+ monkeyed_get_connection_info_from_consul)
+ test_rels = ["testing_alpha.somedomain.com", "testing_bravo.somedomain.com",
+ "testing_charlie.somedomain.com", "testing_charlie.somedomain.com", "cdap"]
+ test_config = {"streams_publishes": "{{alpha}}",
+ # should be dumped
+ "services_calls": [{"somekey": "{{charlie}}"}],
+ "cdap_to_manage": {'some_nested_thing': "{{cdap}}"}} # no dumps
+ test_bind_1 = client.resolve_override(test_config, test_rels)
+ assert test_bind_1 == {'services_calls': [{"somekey": ["5.5.5.5:555", "5.5.5.5:555"]}], 'streams_publishes': [
+ "6.6.6.6:666"], 'cdap_to_manage': {'some_nested_thing': ['666.666.666.666:666']}}
+ assert test_bind_1['services_calls'] == [{"somekey": ["5.5.5.5:555", "5.5.5.5:555"]}]
+ assert test_bind_1['streams_publishes'] == ["6.6.6.6:666"]
+
+
+def test_multiple_service_types(monkeypatch, monkeyed_get_connection_info_from_consul):
+ # test {{x,y,z}}
+ monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul',
+ monkeyed_get_connection_info_from_consul)
+
+ # test 1: they all resovle
+ test_rels = ["testing_alpha.somedomain.com", "testing_bravo.somedomain.com",
+ "testing_charlie.somedomain.com", "testing_charlie.somedomain.com"]
+ config = {"ALL YOUR SERVICE BELONG TO US": "{{alpha,bravo,charlie}}"}
+ test_bind_1 = client.resolve_override(config, test_rels)
+ assert(test_bind_1 == {"ALL YOUR SERVICE BELONG TO US": [
+ '6.6.6.6:666', '7.7.7.7:777', '5.5.5.5:555', '5.5.5.5:555']})
+
+ # test 2: two resolve, one is missing from rels key
+ config2 = {"two there one not exist": "{{alpha,bravo,notexist}}"}
+ test_bind_2 = client.resolve_override(config2, test_rels)
+ assert(test_bind_2 == {"two there one not exist": [
+ '6.6.6.6:666', '7.7.7.7:777']})
+
+ # test 3: two resolve, one is in rels key but not registered
+ config3 = {"two there one unregistered": "{{alpha,bravo,unregistered}}"}
+ test_rels3 = ["testing_alpha.somedomain.com",
+ "testing_bravo.somedomain.com", "unregistered.somedomain.com"]
+ test_bind_3 = client.resolve_override(config3, test_rels3)
+ assert(test_bind_3 == {"two there one unregistered": [
+ '6.6.6.6:666', '7.7.7.7:777']})
+
+
+def test_dmaap(monkeypatch):
+ # test resolving dmaap key
+ config = {"TODAY IS YOUR LUCKY DAY": "<<XXX>>"}
+ # does not match
+ test_bind = client.resolve_override(
+ config, dmaap={"XX": "ABSOLVEME"}) # XX != XXX
+ assert(test_bind == {"TODAY IS YOUR LUCKY DAY": {}})
+ # matches
+ test_bind_2 = client.resolve_override(config, dmaap={"XXX": "ABSOLVEME"})
+ assert(test_bind_2 == {"TODAY IS YOUR LUCKY DAY": "ABSOLVEME"})
+
+
+def test_config(monkeypatch, monkeyed_get_connection_info_from_consul):
+ # test config override
+ monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul',
+ monkeyed_get_connection_info_from_consul)
+ test_config = {"autoderegisterafter": "10m", "cdap_to_manage": {
+ 'some_nested_thing': "{{cdap}}"}, "bindingttw": 5, "hcinterval": "5s"}
+ test_rels = ["cdap"]
+ test_bind_1 = client.resolve_override(test_config, test_rels)
+ assert test_bind_1 == {'autoderegisterafter': '10m', 'cdap_to_manage': {
+ 'some_nested_thing': ['666.666.666.666:666']}, 'bindingttw': 5, 'hcinterval': '5s'}
+
+
+def test_non_existent(monkeypatch, monkeyed_get_connection_info_from_consul):
+ # test a valid config-rels but the key is not in Consul
+ monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul',
+ monkeyed_get_connection_info_from_consul)
+ test_config = {"you shall not be fufilled": "{{nonexistent_hope}}"}
+ # hopefully not registered in Consul..
+ test_rels = ["nonexistent_hope.rework-central.ecomp.somedomain.com"]
+ test_bind_1 = client.resolve_override(test_config, test_rels, {})
+ assert(test_bind_1 == {"you shall not be fufilled": []})
+
+
+def test_broker_redirect(monkeypatch, monkeyed_get_connection_info_from_consul):
+ # test the broker redirect
+ monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul',
+ monkeyed_get_connection_info_from_consul)
+ test_config = {"gimmie_dat_cdap": "{{cdap_serv}}"}
+ test_rels = ["cdap_serv.dcae.ecomp.somedomain.com"]
+ assert {"gimmie_dat_cdap": ['http://1.1.1.1:444/application/cdap_serv.dcae.ecomp.somedomain.com']
+ } == client.resolve_override(test_config, test_rels)
+
+
+def test_both(monkeypatch, monkeyed_get_connection_info_from_consul, expected_config):
+ # test rels and http
+ monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul',
+ monkeyed_get_connection_info_from_consul)
+ test_rels = ["testing_alpha.somedomain.com", "testing_bravo.somedomain.com",
+ "testing_charlie.somedomain.com", "testing_charlie.somedomain.com"]
+ test_dmaap = {"WHO?": "darkness"}
+ config = {"deep": {"ALL YOUR SERVICE BELONG TO US": "{{alpha,bravo,charlie}}"},
+ "doubledeep": {"sodeep": {"hello": "<<WHO?>>"}}}
+ test_bind_1 = client.resolve_override(config, test_rels, test_dmaap)
+ assert(test_bind_1 == expected_config)
diff --git a/app/app/tests/test_controller.py b/app/app/tests/test_controller.py
new file mode 100644
index 0000000..0650a38
--- /dev/null
+++ b/app/app/tests/test_controller.py
@@ -0,0 +1,120 @@
+# ============LICENSE_START=======================================================
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+import json
+import pytest
+from config_binding_service import client, controller
+
+
+# pytest doesnt support objects in conftest
+class FakeConnexion(object):
+ def __init__(self, headers, path, host, remote_addr):
+ self.headers = headers
+ self.path = path
+ self.host = host
+ self.remote_addr = remote_addr
+
+
+# pytest doesnt support objects in conftest
+class FakeReq(object):
+ """used to fake the logging params"""
+ def __init__(self):
+ self.path = "/unittest in {0}".format(__name__)
+ self.host = "localhost"
+ self.remote_addr = "6.6.6.6"
+
+
+def test_bind_config_for_scn(monkeypatch, monkeyed_requests_put):
+ monkeypatch.setattr('requests.put', monkeyed_requests_put)
+ monkeypatch.setattr('connexion.request', FakeConnexion({"x-onap-requestid": 123456789}, "/service_component", "mytestingmachine", "myremoteclient"))
+
+ assert(client.resolve("scn_exists", FakeReq(), "unit test xer") == {"foo3": "bar3"})
+ with pytest.raises(client.CantGetConfig):
+ client.resolve("scn_NOTexists", FakeReq(), "unit test xer")
+
+ R = controller.bind_config_for_scn("scn_exists")
+ assert(json.loads(R.data) == {"foo3": "bar3"})
+ assert(R.status_code == 200)
+ assert(R.headers["x-onap-requestid"] == "123456789")
+
+ R = controller.bind_config_for_scn("scn_NOTexists")
+ assert(R.status_code == 404)
+ assert(R.headers["x-onap-requestid"] == "123456789")
+
+ R = controller.bind_config_for_scn("asdfasdf")
+ assert(R.status_code == 500)
+ assert(R.headers["x-onap-requestid"] == "123456789")
+
+
+def test_generic(monkeypatch, monkeyed_requests_get, monkeyed_requests_put):
+ monkeypatch.setattr('requests.put', monkeyed_requests_put)
+ monkeypatch.setattr('requests.get', monkeyed_requests_get)
+ assert client.get_key("dti", "test_service_component_name.unknown.unknown.unknown.dcae.onap.org", FakeReq(), "unit test xer") == json.loads('{"my": "dti"}')
+ with pytest.raises(client.CantGetConfig):
+ client.get_key("nokeyforyou", "test_service_component_name.unknown.unknown.unknown.dcae.onap.org", FakeReq(), "unit test xer")
+
+ monkeypatch.setattr('connexion.request', FakeConnexion({}, "/get_key", "mytestingmachine", "myremoteclient"))
+
+ R = controller.get_key(
+ "dti", "test_service_component_name.unknown.unknown.unknown.dcae.onap.org")
+ assert(json.loads(R.data) == {"my": "dti"})
+ assert(R.status_code == 200)
+ assert "x-onap-requestid" in R.headers
+
+ R = controller.get_key(
+ "nokeyforyou", "test_service_component_name.unknown.unknown.unknown.dcae.onap.org")
+ assert(R.status_code == 404)
+ assert "x-onap-requestid" in R.headers
+
+ R = controller.get_key(
+ "policies", "test_service_component_name.unknown.unknown.unknown.dcae.onap.org")
+ assert(R.status_code == 400)
+ assert "x-onap-requestid" in R.headers
+
+
+def test_resolve_all(monkeypatch, monkeyed_requests_put, monkeyed_get_connection_info_from_consul, expected_config):
+ monkeypatch.setattr('requests.put', monkeyed_requests_put)
+ allk = client.resolve_all("test_service_component_name.unknown.unknown.unknown.dcae.onap.org", FakeReq(), "unit test xer")
+ withstuff = {'config': {'my': 'amazing config'},
+ 'dti': {'my': 'dti'},
+ 'policies': {'items': [{'policyName': 'DCAE_alex.Config_MS_alex_microservice.132.xml', 'policyConfigMessage': 'Config Retrieved! ', 'responseAttributes': {}, 'policyConfigStatus': 'CONFIG_RETRIEVED', 'matchingConditions': {'ONAPName': 'DCAE', 'Name': 'DCAE', 'ConfigName': 'alex_config_name'}, 'config': {'policyScope': 'alex_policy_scope', 'configName': 'alex_config_name', 'description': 'test DCAE policy-handler', 'service': 'alex_service', 'policyName': 'alex_policy_name', 'riskLevel': '3', 'key1': 'value1', 'policy_hello': 'world!', 'content': {'foo': 'microservice3', 'foo_updated': '2018-01-30T13:25:33.222Z'}, 'riskType': '1712_ETE', 'guard': 'False', 'version': '0.0.1', 'location': 'Central', 'policy_updated_ts': '2018-02-19T15:09:55.217Z', 'updated_policy_id': 'DCAE_alex.Config_MS_alex_microservice', 'policy_updated_to_ver': '132', 'priority': '4', 'policy_updated_from_ver': '131', 'templateVersion': '2', 'uuid': '5e87d7c5-0daf-4b6b-ab92-5365cf5db1ef'}, 'property': None, 'type': 'JSON', 'policyVersion': '132'}, {'policyName': 'DCAE_alex.Config_db_client_policy_id_value.133.xml', 'policyConfigMessage': 'Config Retrieved! ', 'responseAttributes': {}, 'policyConfigStatus': 'CONFIG_RETRIEVED', 'matchingConditions': {'ONAPName': 'DCAE', 'Name': 'DCAE', 'ConfigName': 'alex_config_name'}, 'config': {'db_client_ts': '2017-11-21T12:12:13.696Z', 'db_client': 'ipsum', 'policy_hello': 'world!', 'policy_updated_from_ver': '132', 'updated_policy_id': 'DCAE_alex.Config_db_client_policy_id_value', 'policy_updated_ts': '2018-02-19T15:09:55.812Z', 'policy_updated_to_ver': '133'}, 'property': None, 'type': 'JSON', 'policyVersion': '133'}], 'event': {'action': 'gathered', 'timestamp': '2018-02-19 15:36:44.877380', 'update_id': 'bb73c20a-5ff8-450f-8223-da6720ade267', 'policies_count': 2}}}
+ assert allk == withstuff
+
+ monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul',
+ monkeyed_get_connection_info_from_consul)
+ allk = client.resolve_all("test_resolve_scn", FakeReq(), "unit test xer")
+ assert allk == {"config": expected_config}
+
+ monkeypatch.setattr('connexion.request', FakeConnexion({}, "/service_component_all", "mytestingmachine", "myremoteclient"))
+
+ R = controller.bind_all(
+ "test_service_component_name.unknown.unknown.unknown.dcae.onap.org")
+ assert(json.loads(R.data) == withstuff)
+ assert(R.status_code == 200)
+ assert "x-onap-requestid" in R.headers
+
+ R = controller.bind_all("test_resolve_scn")
+ assert(json.loads(R.data) == {"config": expected_config})
+ assert(R.status_code == 200)
+
+ R = controller.bind_all("scn_NOTexists")
+ assert(R.status_code == 404)
+ assert "x-onap-requestid" in R.headers
+
+ R = controller.bind_all("asdfasdf")
+ assert(R.status_code == 500)
diff --git a/app/app/tox.ini b/app/app/tox.ini
new file mode 100644
index 0000000..c58161e
--- /dev/null
+++ b/app/app/tox.ini
@@ -0,0 +1,26 @@
+# content of: tox.ini , put in same dir as setup.py
+[tox]
+envlist = py36,flake8
+
+[testenv]
+deps=
+ pytest
+ coverage
+ pytest-cov
+setenv =
+ CONSUL_HOST = 8.8.8.8
+ HOSTNAME = config_binding_service
+ PYTHONPATH={toxinidir}
+commands=
+ pytest --junitxml xunit-results.xml --cov config_binding_service --cov-report xml --cov-report term
+ coverage xml -i
+
+[testenv:flake8]
+basepython = python3.6
+skip_install = true
+deps = flake8
+commands = flake8 setup.py config_binding_service tests
+
+[flake8]
+ignore = E501
+
diff --git a/app/pom.xml b/app/pom.xml
new file mode 100644
index 0000000..52387e6
--- /dev/null
+++ b/app/pom.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<!--
+================================================================================
+Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
+================================================================================
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+============LICENSE_END=========================================================
+
+ECOMP is a trademark and service mark of AT&T Intellectual Property.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.onap.dcaegen2.platform</groupId>
+ <artifactId>configbinding</artifactId>
+ <version>2.2.3-SNAPSHOT</version>
+ </parent>
+
+ <!--- CHANGE THE FOLLOWING 3 OBJECTS for your own repo -->
+ <groupId>org.onap.dcaegen2.platform.configbinding</groupId>
+ <artifactId>app</artifactId>
+ <name>dcaegen2-platform-configbinding-app</name>
+ <version>2.2.3-SNAPSHOT</version>
+ <url>http://maven.apache.org</url>
+
+ <packaging>pom</packaging>
+ <modules>
+ <module>app</module>
+ </modules>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <!-- customize the SONARQUBE URL -->
+ <sonar.host.url>http://localhost:9000</sonar.host.url>
+ <!-- taken care of in the children -->
+ </properties>
+ <build>
+ <finalName>${project.artifactId}-${project.version}</finalName>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>1.2.1</version>
+ <configuration>
+ <executable>${session.executionRootDirectory}/mvn-phase-script.sh</executable>
+ <environmentVariables>
+ <!-- make mvn properties as env for our script -->
+ <!-- make mvn properties as env for our script -->
+ <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID>
+ <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID>
+ <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION>
+ <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY>
+ <MVN_RAWREPO_BASEURL_UPLOAD>${onap.nexus.rawrepo.baseurl.upload}</MVN_RAWREPO_BASEURL_UPLOAD>
+ <MVN_RAWREPO_BASEURL_DOWNLOAD>${onap.nexus.rawrepo.baseurl.download}</MVN_RAWREPO_BASEURL_DOWNLOAD>
+ <MVN_RAWREPO_SERVERID>${onap.nexus.rawrepo.serverid}</MVN_RAWREPO_SERVERID>
+ <MVN_DOCKERREGISTRY_SNAPSHOT>${onap.nexus.dockerregistry.snapshot}</MVN_DOCKERREGISTRY_SNAPSHOT>
+ <MVN_DOCKERREGISTRY_RELEASE>${onap.nexus.dockerregistry.release}</MVN_DOCKERREGISTRY_RELEASE>
+ <MVN_DOCKERREGISTRY_SNAPSHOT_SERVERID>${onap.nexus.dockerregistry.snapshot.serverid}</MVN_DOCKERREGISTRY_SNAPSHOT_SERVERID>
+ <MVN_DOCKERREGISTRY_RELEASE_SERVERID>${onap.nexus.dockerregistry.release.serverid}</MVN_DOCKERREGISTRY_RELEASE_SERVERID>
+ <MVN_PYPISERVER_BASEURL>${onap.nexus.pypiserver.baseurl}</MVN_PYPISERVER_BASEURL>
+ <MVN_PYPISERVER_SERVERID>${onap.nexus.pypiserver.serverid}</MVN_PYPISERVER_SERVERID>
+ </environmentVariables>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ </build>
+
+</project>
diff --git a/app/uwsgi.ini b/app/uwsgi.ini
new file mode 100644
index 0000000..f514897
--- /dev/null
+++ b/app/uwsgi.ini
@@ -0,0 +1,3 @@
+[uwsgi]
+module = app.main
+callable = app
diff --git a/bin/run.py b/bin/run.py
deleted file mode 100755
index 7d30a00..0000000
--- a/bin/run.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python3
-
-import connexion
-import sys
-from config_binding_service import get_logger
-
-_logger = get_logger(__name__)
-
-if __name__ == '__main__':
- try:
- app = connexion.App(__name__, specification_dir='../config_binding_service/swagger/')
- app.add_api('swagger.yaml', arguments={'title': 'Config Binding Service'})
- app.run(host='0.0.0.0', port=10000, debug=False)
- except Exception as e:
- _logger.error("Fatal error. Could not start webserver due to: {0}".format(e))
diff --git a/config_binding_service/client.py b/config_binding_service/client.py
deleted file mode 100644
index 02354ee..0000000
--- a/config_binding_service/client.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# ============LICENSE_START=======================================================
-# org.onap.dcae
-# ================================================================================
-# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-import re
-import requests
-import copy
-import base64
-import json
-import six
-from config_binding_service import get_consul_uri, get_logger
-from functools import partial, reduce
-
-_logger = get_logger(__name__)
-CONSUL = get_consul_uri()
-
-template_match_rels = re.compile("\{{2}([^\}\{]*)\}{2}")
-template_match_dmaap = re.compile("<{2}([^><]*)>{2}")
-
-###
-# Cusom Exception
-###
-class CantGetConfig(Exception):
- def __init__(self, code, response):
- self.code = code
- self.response = response
-###
-# Private Functions
-###
-def _consul_get_key(key):
- """
- Try to fetch a key from Consul.
- No error checking here, let caller deal with it
- """
- _logger.info("Fetching {0}".format(key))
- response = requests.get("{0}/v1/kv/{1}".format(CONSUL, key))
- response.raise_for_status()
- D = json.loads(response.text)[0]
- return json.loads(base64.b64decode(D["Value"]).decode("utf-8"))
-
-def _get_config_rels_dmaap(service_component_name):
- try:
- config = _consul_get_key(service_component_name) #not ok if no config
- except requests.exceptions.HTTPError as e:
- #might be a 404, or could be not even able to reach consul (503?), bubble up the requests error
- raise CantGetConfig(e.response.status_code, e.response.text)
-
- rels = []
- dmaap = {}
- try: #Not all nodes have relationships, so catch the error here and return [] if so
- rels = _consul_get_key("{0}:rel".format(service_component_name))
- except requests.exceptions.HTTPError: #ok if no rels key, might just have dmaap key
- pass
- try:
- dmaap = _consul_get_key("{0}:dmaap".format(service_component_name))
- except requests.exceptions.HTTPError: #ok if no dmaap key
- pass
- return config, rels, dmaap
-
-def _get_connection_info_from_consul(service_component_name):
- """
- Call consul's catalog
- TODO: currently assumes there is only one service
-
- TODO: WARNING: FIXTHIS: CALLINTHENATIONALARMY:
- This tries to determine that a service_component_name is a cdap application by inspecting service_component_name and name munging. However, this would force all CDAP applications to have cdap_app in their name. A much better way to do this is to do some kind of catalog_lookup here, OR MAYBE change this API so that the component_type is passed in somehow. THis is a gaping TODO.
- """
- _logger.info("Retrieving connection information for {0}".format(service_component_name))
- res = requests.get("{0}/v1/catalog/service/{1}".format(CONSUL, service_component_name))
- res.raise_for_status()
- services = res.json()
- if services == []:
- _logger.info("Warning: config and rels keys were both valid, but there is no component named {0} registered in Consul!".format(service_component_name))
- return None #later will get filtered out
- else:
- ip = services[0]["ServiceAddress"]
- port = services[0]["ServicePort"]
- if "cdap_app" in service_component_name:
- redirectish_url = "http://{0}:{1}/application/{2}".format(ip, port, service_component_name)
- _logger.info("component is a CDAP application; trying the broker redirect on {0}".format(redirectish_url))
- r = requests.get(redirectish_url)
- r.raise_for_status()
- details = r.json()
- # Pick out the details to expose to the component developers. These keys come from the broker API
- return { key: details[key] for key in ["connectionurl", "serviceendpoints"] }
- else:
- return "{0}:{1}".format(ip, port)
-
-def _replace_rels_template(rels, template_identifier):
- """
- The magic. Replaces a template identifier {{...}} with the entrie(s) from the rels keys
- NOTE: There was a discussion over whether the CBS should treat {{}} as invalid. Mike asked that
- it resolve to the empty list. So, it does resolve it to empty list.
- """
- returnl = []
- for r in rels:
- if template_identifier in r and template_identifier is not "":
- returnl.append(r)
- #returnl now contains a list of DNS names (possible empty), now resolve them (or not if they are not regustered)
- return list(filter(lambda x: x is not None, map(_get_connection_info_from_consul, returnl)))
-
-def _replace_dmaap_template(dmaap, template_identifier):
- """
- This one liner could have been just put inline in the caller but maybe this will get more complex in future
- Talked to Mike, default value if key is not found in dmaap key should be {}
- """
- return {} if (template_identifier not in dmaap or template_identifier == "<<>>") else dmaap[template_identifier]
-
-def _replace_value(v, rels, dmaap):
- """
- Takes a value v that was some value in the templatized configuration, determines whether it needs replacement (either {{}} or <<>>), and if so, replaces it.
- Otherwise just returns v
-
- implementation notes:
- - the split below sees if we have v = x,y,z... so we can support {{x,y,z,....}}
- - the lambda is because we can't fold operators in Python, wanted fold(+, L) where + when applied to lists in python is list concatenation
- """
- if isinstance(v, six.string_types): #do not try to replace anything that is not a string
- match_on_rels = re.match(template_match_rels, v)
- if match_on_rels:
- template_identifier = match_on_rels.groups()[0].strip() #now holds just x,.. of {{x,...}}
- rtpartial = partial(_replace_rels_template, rels)
- return reduce(lambda a,b: a+b, map(rtpartial, template_identifier.split(",")), [])
- match_on_dmaap = re.match(template_match_dmaap, v)
- if match_on_dmaap:
- template_identifier = match_on_dmaap.groups()[0].strip()
- """
- Here is what Mike said:
- 1) want simple replacement of "<< >>" with dmaap key value
- 2) never need to support <<f1,f2>> whereas we do support {{sct1,sct2}}
- The consequence is that if you give the CBS a dmaap key like {"foo" : {...}} you are going to get back {...}, but rels always returns [...].
- So now component developers have to possible handle dicts and [], and we have to communicate that to them
- """
- return _replace_dmaap_template(dmaap, template_identifier)
- return v #was not a match or was not a string, return value as is
-
-def _recurse(config, rels, dmaap):
- for key in config:
- v = config[key]
- if isinstance(v, list):
- replacement = [_recurse(item, rels, dmaap) for item in v]
- elif isinstance(v,dict):
- replacement = _recurse(v, rels, dmaap)
- else:
- replacement = _replace_value(config[key], rels, dmaap)
- config[key] = replacement
- return config
-
-#########
-# PUBLIC API
-#########
-def resolve(service_component_name):
- """
- Return the bound config of service_component_name
- """
- config, rels, dmaap = _get_config_rels_dmaap(service_component_name)
- _logger.info("Fetching {0}: config={1}, rels={2}".format(service_component_name, json.dumps(config), rels))
- return _recurse(config, rels, dmaap)
-
-def resolve_override(config, rels=[], dmaap={}):
- """
- Explicitly take in a config, rels, dmaap and try to resolve it.
- Useful for testing where you dont want to put the test values in consul
- """
- #use deepcopy to make sure that config is not touched
- return _recurse(copy.deepcopy(config), rels, dmaap)
diff --git a/config_binding_service/controller.py b/config_binding_service/controller.py
deleted file mode 100644
index a74d60f..0000000
--- a/config_binding_service/controller.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# ============LICENSE_START=======================================================
-# org.onap.dcae
-# ================================================================================
-# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-from config_binding_service import client, get_consul_uri, get_logger
-import requests
-from flask import request, Response
-import json
-
-def bind_config_for_scn(service_component_name):
- try:
- bound = client.resolve(service_component_name)
- return Response(response=json.dumps(bound),
- status=200,
- mimetype="application/json")
- except client.CantGetConfig as e:
- return Response(status=e.code,
- response=e.response)
- except Exception as e: #should never happen...
- _logger.error(e)
- return Response(response="Please report this error",
- status=500)
-
-def healthcheck():
- #got this far, I must be alive... check my connection to Consul by checking myself
- CONSUL = get_consul_uri()
- res = requests.get("{0}/v1/catalog/service/config_binding_service".format(CONSUL))
- if res.status_code == 200:
- return Response(response = "CBS is alive and Consul connection OK",
- status = 200)
- else:
- return Response(response = "CBS is alive but cannot reach Consul",
- status = 503)
-
diff --git a/config_binding_service/swagger/swagger.yaml b/config_binding_service/swagger/swagger.yaml
deleted file mode 100644
index 208e441..0000000
--- a/config_binding_service/swagger/swagger.yaml
+++ /dev/null
@@ -1,33 +0,0 @@
----
-swagger: "2.0"
-info:
- version: "1.0.0"
- title: "Config Binding Service"
-paths:
- /service_component/{service_component_name}:
- parameters:
- - name: "service_component_name"
- in: "path"
- description: "Service Component Name. service_component_name and service_component_name:rels must be keys in consul."
- required: true
- type: "string"
- get:
- description: "Binds the configuration for service_component_name and returns the bound configuration as a JSON"
- operationId: "config_binding_service.controller.bind_config_for_scn"
- responses:
- 200:
- description: OK; the bound config is returned as an object
- schema:
- type: object
- 404:
- description: there is no configuration in Consul for this component
- /healthcheck:
- get:
- description: "This is the health check endpoint. If this returns a 200, the server is alive and consul can be reached. If not a 200, either dead, or no connection to consul"
- operationId: "config_binding_service.controller.healthcheck"
- parameters: []
- responses:
- 200:
- description: Successful response
- 503:
- description: the config binding service cannot reach Consul
diff --git a/doc/cbs_diagram.png b/doc/cbs_diagram.png
deleted file mode 100644
index 67287d0..0000000
--- a/doc/cbs_diagram.png
+++ /dev/null
Binary files differ
diff --git a/mvn-phase-script.sh b/mvn-phase-script.sh
index abfa93c..86a0dd6 100755
--- a/mvn-phase-script.sh
+++ b/mvn-phase-script.sh
@@ -1,7 +1,7 @@
#!/bin/bash
# ================================================================================
-# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,8 +15,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+set -ex
echo "running script: [$0] for module [$1] at stage [$2]"
@@ -24,280 +24,31 @@ echo "running script: [$0] for module [$1] at stage [$2]"
MVN_PROJECT_MODULEID="$1"
MVN_PHASE="$2"
+PROJECT_ROOT=$(dirname $0)
-FQDN="${MVN_PROJECT_GROUPID}.${MVN_PROJECT_ARTIFACTID}"
-if [ "$MVN_PROJECT_MODULEID" == "__" ]; then
- MVN_PROJECT_MODULEID=""
-fi
-
-if [[ "$MVN_PROJECT_VERSION" == *SNAPSHOT ]]; then
- echo "=> for SNAPSHOT artifact build"
- MVN_DEPLOYMENT_TYPE='SNAPSHOT'
+set -e
+RELEASE_TAG=${MVN_RELEASE_TAG:-R3}
+if [ "$RELEASE_TAG" != "R1" ]; then
+ RELEASE_TAGGED_DIR="${RELEASE_TAG}/"
else
- echo "=> for STAGING/RELEASE artifact build"
- MVN_DEPLOYMENT_TYPE='STAGING'
+ RELEASE_TAGGED_DIR="releases"
fi
-echo "MVN_DEPLOYMENT_TYPE is [$DEPLOYMENT_TYPE]"
-
-
-TIMESTAMP=$(date +%C%y%m%dT%H%M%S)
-
-# expected environment variables
-if [ -z "${MVN_NEXUSPROXY}" ]; then
- echo "MVN_NEXUSPROXY environment variable not set. Cannot proceed"
- exit
+if ! wget -O ${PROJECT_ROOT}/mvn-phase-lib.sh \
+ "$MVN_RAWREPO_BASEURL_DOWNLOAD"/org.onap.dcaegen2.utils/${RELEASE_TAGGED_DIR}scripts/mvn-phase-lib.sh; then
+ echo "Fail to download mvn-phase-lib.sh"
+ exit 1
fi
-MVN_NEXUSPROXY_HOST=$(echo "$MVN_NEXUSPROXY" |cut -f3 -d'/' | cut -f1 -d':')
-echo "=> Nexus Proxy at $MVN_NEXUSPROXY_HOST, $MVN_NEXUSPROXY"
-
-if [ -z "$WORKSPACE" ]; then
- WORKSPACE=$(pwd)
-fi
-
-# mvn phase in life cycle
-MVN_PHASE="$2"
-
-
-echo "MVN_PROJECT_MODULEID is [$MVN_PROJECT_MODULEID]"
-echo "MVN_PHASE is [$MVN_PHASE]"
-echo "MVN_PROJECT_GROUPID is [$MVN_PROJECT_GROUPID]"
-echo "MVN_PROJECT_ARTIFACTID is [$MVN_PROJECT_ARTIFACTID]"
-echo "MVN_PROJECT_VERSION is [$MVN_PROJECT_VERSION]"
-echo "MVN_NEXUSPROXY is [$MVN_NEXUSPROXY]"
-echo "MVN_RAWREPO_BASEURL_UPLOAD is [$MVN_RAWREPO_BASEURL_UPLOAD]"
-echo "MVN_RAWREPO_BASEURL_DOWNLOAD is [$MVN_RAWREPO_BASEURL_DOWNLOAD]"
-MVN_RAWREPO_HOST=$(echo "$MVN_RAWREPO_BASEURL_UPLOAD" | cut -f3 -d'/' |cut -f1 -d':')
-echo "MVN_RAWREPO_HOST is [$MVN_RAWREPO_HOST]"
-echo "MVN_RAWREPO_SERVERID is [$MVN_RAWREPO_SERVERID]"
-echo "MVN_DOCKERREGISTRY_DAILY is [$MVN_DOCKERREGISTRY_DAILY]"
-echo "MVN_DOCKERREGISTRY_RELEASE is [$MVN_DOCKERREGISTRY_RELEASE]"
-
-expand_templates()
-{
- # set up env variables, get ready for template resolution
- export ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_plugins_releases="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.ccsdk.plugins/releases"
- export ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_plugins_snapshots="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.ccsdk.plugins/snapshots"
- export ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_blueprints_releases="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.ccsdk.blueprints/releases"
- export ONAPTEMPLATE_RAWREPOURL_org_onap_ccsdk_platform_blueprints_snapshots="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.ccsdk.blueprints/snapshots" export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_releases="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2/releases"
- export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_snapshots="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2/snapshots" export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_plugins_releases="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2.platform.plugins/releases"
- export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_plugins_snapshots="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2.platform.plugins/snapshots"
- export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_blueprints_releases="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2.platform.blueprints/releases"
- export ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2_platform_blueprints_snapshots="$MVN_RAWREPO_BASEURL_DOWNLOAD/org.onap.dcaegen2.platform.blueprints/snapshots"
-
- export ONAPTEMPLATE_PYPIURL_org_onap_dcaegen2="${MVN_NEXUSPROXY}/content/sites/pypi"
-
- export ONAPTEMPLATE_DOCKERREGURL_org_onap_dcaegen2_releases="$MVN_DOCKERREGISTRY_DAILY"
- export ONAPTEMPLATE_DOCKERREGURL_org_onap_dcaegen2_snapshots="$MVN_DOCKERREGISTRY_DAILY/snapshots"
-
-
- TEMPLATES=$(env |grep ONAPTEMPLATE)
- echo "====> Resolving the following temaplate from environment variables "
- echo "[$TEMPLATES]"
- SELFFILE=$(echo "$0" | rev | cut -f1 -d '/' | rev)
- for TEMPLATE in $TEMPLATES; do
- KEY=$(echo "$TEMPLATE" | cut -f1 -d'=')
- VALUE=$(echo "$TEMPLATE" | cut -f2 -d'=')
- VALUE2=$(echo "$TEMPLATE" | cut -f2 -d'=' |sed 's/\//\\\//g')
- FILES=$(grep -rl "$KEY")
-
- # assuming FILES is not longer than 2M bytes, the limit for variable value max size on this VM
- for F in $FILES; do
- if [[ $F == *"$SELFFILE" ]]; then
- continue
- fi
- echo "======> Resolving template $KEY to value $VALUE for file $F"
- sed -i "s/{{[[:space:]]*$KEY[[:space:]]*}}/$VALUE2/g" "$F"
-
- #cat "$F"
- done
-
- #if [ ! -z "$FILES" ]; then
- # echo "====> Resolving template $VALUE to value $VALUE"
- # #CMD="grep -rl \"$VALUE\" | tr '\n' '\0' | xargs -0 sed -i \"s/{{[[:space:]]*$VALUE[[:space:]]*}}/$VALUE/g\""
- # grep -rl "$KEY" | tr '\n' '\0' | xargs -0 sed -i 's/$KEY/$VALUE2/g'
- # #echo $CMD
- # #eval $CMD
- #fi
- done
- echo "====> Done template reolving"
-}
-
-
-run_tox_test()
-{
- set -x
- CURDIR=$(pwd)
- TOXINIS=$(find . -name "tox.ini")
- for TOXINI in "${TOXINIS[@]}"; do
- DIR=$(echo "$TOXINI" | rev | cut -f2- -d'/' | rev)
- cd "${CURDIR}/${DIR}"
- rm -rf ./venv-tox ./.tox
- virtualenv ./venv-tox
- source ./venv-tox/bin/activate
- pip install --upgrade pip
- pip install --upgrade tox argparse
- pip freeze
- tox
- deactivate
- rm -rf ./venv-tox ./.tox
- done
-}
-
-build_wagons()
-{
- rm -rf ./*.wgn venv-pkg
-
- SETUPFILES=$(find . -name "setup.py")
- for SETUPFILE in $SETUPFILES; do
- PLUGIN_DIR=$(echo "$SETUPFILE" |rev | cut -f 2- -d '/' |rev)
- PLUGIN_NAME=$(grep 'name' "$SETUPFILE" | cut -f2 -d'=' | sed 's/[^0-9a-zA-Z\.]*//g')
- PLUGIN_VERSION=$(grep 'version' "$SETUPFILE" | cut -f2 -d'=' | sed 's/[^0-9\.]*//g')
-
- echo "In $PLUGIN_DIR, $PLUGIN_NAME, $PLUGIN_VERSION"
-
- virtualenv ./venv-pkg
- source ./venv-pkg/bin/activate
- pip install --upgrade pip
- pip install wagon
- wagon create --format tar.gz "$PLUGIN_DIR"
- deactivate
- rm -rf venv-pkg
-
- PKG_FILE_NAMES=( "${PLUGIN_NAME}-${PLUGIN_VERSION}"*.wgn )
- echo Built package: "${PKG_FILE_NAMES[@]}"
- done
-}
-
-
-upload_raw_file()
-{
- # Extract the username and password to the nexus repo from the settings file
- USER=$(xpath -q -e "//servers/server[id='$MVN_RAWREPO_SERVERID']/username/text()" "$SETTINGS_FILE")
- PASS=$(xpath -q -e "//servers/server[id='$MVN_RAWREPO_SERVERID']/password/text()" "$SETTINGS_FILE")
- NETRC=$(mktemp)
- echo "machine $MVN_RAWREPO_HOST login $USER password $PASS" > "$NETRC"
-
- REPO="$MVN_RAWREPO_BASEURL_UPLOAD"
-
- OUTPUT_FILE=$1
- EXT=$(echo "$OUTPUT_FILE" | rev |cut -f1 -d '.' |rev)
- if [ "$EXT" == 'yaml' ]; then
- OUTPUT_FILE_TYPE='text/x-yaml'
- elif [ "$EXT" == 'gz' ]; then
- OUTPUT_FILE_TYPE='application/gzip'
- elif [ "$EXT" == 'wgn' ]; then
- OUTPUT_FILE_TYPE='application/gzip'
- else
- OUTPUT_FILE_TYPE='application/octet-stream'
- fi
-
-
- if [ "$MVN_DEPLOYMENT_TYPE" == 'SNAPSHOT' ]; then
- SEND_TO="${REPO}/${FQDN}/snapshots"
- elif [ "$MVN_DEPLOYMENT_TYPE" == 'STAGING' ]; then
- SEND_TO="${REPO}/${FQDN}/releases"
- else
- echo "Unreconfnized deployment type, quit"
- exit
- fi
-
- echo "Sending ${OUTPUT_FILE} to Nexus: ${SEND_TO}"
- curl -vkn --netrc-file "${NETRC}" --upload-file "${OUTPUT_FILE}" -X PUT -H "Content-Type: $OUTPUT_FILE_TYPE" "${SEND_TO}/${OUTPUT_FILE}-${MVN_PROJECT_VERSION}-${TIMESTAMP}"
- curl -vkn --netrc-file "${NETRC}" --upload-file "${OUTPUT_FILE}" -X PUT -H "Content-Type: $OUTPUT_FILE_TYPE" "${SEND_TO}/${OUTPUT_FILE}-${MVN_PROJECT_VERSION}"
- curl -vkn --netrc-file "${NETRC}" --upload-file "${OUTPUT_FILE}" -X PUT -H "Content-Type: $OUTPUT_FILE_TYPE" "${SEND_TO}/${OUTPUT_FILE}"
-}
-
-
-
-upload_wagons_and_type_yamls()
-{
- WAGONS=$(ls -1 ./*.wgn)
- for WAGON in "${WAGONS[@]}" ; do
- WAGON_NAME=$(echo "$WAGON" | cut -f1 -d '-')
- WAGON_VERSION=$(echo "$WAGON" | cut -f2 -d '-')
- WAGON_TYPEFILE=$(grep -rl "$WAGON_NAME" | grep yaml | head -1)
-
- upload_raw_file "$WAGON"
- upload_raw_file "$WAGON_TYPEFILE"
- done
-}
-
-
-
-build_and_push_docker()
-{
- IMAGENAME="onap/${FQDN}.${MVN_PROJECT_MODULEID}"
- IMAGENAME=$(echo "$IMAGENAME" | sed -e 's/_*$//g' -e 's/\.*$//g')
-
- # use the major and minor version of the MVN artifact version as docker image version
- VERSION="${MVN_PROJECT_VERSION//[^0-9.]/}"
- VERSION2=$(echo "$VERSION" | cut -f1-2 -d'.')
-
- LFQI="${IMAGENAME}:${VERSION}-${TIMESTAMP}"
- BUILD_PATH="${WORKSPACE}"
- # build a docker image
- docker build --rm -f "${WORKSPACE}"/Dockerfile -t "${LFQI}" "${BUILD_PATH}"
-
- REPO=""
- if [ $MVN_DEPLOYMENT_TYPE == "SNAPSHOT" ]; then
- REPO=$MVN_DOCKERREGISTRY_DAILY
- elif [ $MVN_DEPLOYMENT_TYPE == "STAGING" ]; then
- # there seems to be no staging docker registry? set to use SNAPSHOT also
- #REPO=$MVN_DOCKERREGISTRY_RELEASE
- REPO=$MVN_DOCKERREGISTRY_DAILY
- else
- echo "Fail to determine DEPLOYMENT_TYPE"
- REPO=$MVN_DOCKERREGISTRY_DAILY
- fi
- echo "DEPLOYMENT_TYPE is: $MVN_DEPLOYMENT_TYPE, repo is $REPO"
-
- if [ ! -z "$REPO" ]; then
- USER=$(xpath -e "//servers/server[id='$REPO']/username/text()" "$SETTINGS_FILE")
- PASS=$(xpath -e "//servers/server[id='$REPO']/password/text()" "$SETTINGS_FILE")
- if [ -z "$USER" ]; then
- echo "Error: no user provided"
- fi
- if [ -z "$PASS" ]; then
- echo "Error: no password provided"
- fi
- [ -z "$PASS" ] && PASS_PROVIDED="<empty>" || PASS_PROVIDED="<password>"
- echo docker login "$REPO" -u "$USER" -p "$PASS_PROVIDED"
- docker login "$REPO" -u "$USER" -p "$PASS"
-
- if [ $MVN_DEPLOYMENT_TYPE == "SNAPSHOT" ]; then
- REPO="$REPO/snapshots"
- elif [ $MVN_DEPLOYMENT_TYPE == "STAGING" ]; then
- # there seems to be no staging docker registry? set to use SNAPSHOT also
- #REPO=$MVN_DOCKERREGISTRY_RELEASE
- REPO="$REPO"
- else
- echo "Fail to determine DEPLOYMENT_TYPE"
- REPO="$REPO/unknown"
- fi
-
- OLDTAG="${LFQI}"
- PUSHTAGS="${REPO}/${IMAGENAME}:${VERSION2}-${TIMESTAMP} ${REPO}/${IMAGENAME}:${VERSION2} ${REPO}/${IMAGENAME}:${VERSION2}-latest"
- for NEWTAG in ${PUSHTAGS}
- do
- echo "tagging ${OLDTAG} to ${NEWTAG}"
- docker tag "${OLDTAG}" "${NEWTAG}"
- echo "pushing ${NEWTAG}"
- docker push "${NEWTAG}"
- OLDTAG="${NEWTAG}"
- done
- fi
-
-}
-
-
-
+source "${PROJECT_ROOT}"/mvn-phase-lib.sh
+# Customize the section below for each project
case $MVN_PHASE in
clean)
echo "==> clean phase script"
- rm -rf ./venv-*
+ clean_templated_files
+ clean_tox_files
+ rm -rf ./venv-* ./*.wgn ./site
;;
generate-sources)
echo "==> generate-sources phase script"
@@ -318,11 +69,8 @@ install)
;;
deploy)
echo "==> deploy phase script"
- #upload_wagons_and_type_yamls
- build_and_push_docker
;;
*)
echo "==> unprocessed phase"
;;
esac
-
diff --git a/nginxhttps.conf b/nginxhttps.conf
new file mode 100644
index 0000000..9795f19
--- /dev/null
+++ b/nginxhttps.conf
@@ -0,0 +1,17 @@
+server {
+ listen 443 ssl;
+
+ location / {
+ try_files $uri @app;
+ }
+ location @app {
+ include uwsgi_params;
+ uwsgi_pass unix:///tmp/uwsgi.sock;
+ }
+ location /static {
+ alias /app/static;
+ }
+ server_name configbinding;
+ ssl_certificate /etc/nginx/ssl/nginx.crt;
+ ssl_certificate_key /etc/nginx/ssl/nginx.key;
+}
diff --git a/pom.xml b/pom.xml
index aaeca9d..2eef1f0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -21,116 +21,33 @@ ECOMP is a trademark and service mark of AT&T Intellectual Property.
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.onap.dcaegen2</groupId>
+ <groupId>org.onap</groupId>
<artifactId>dcaegen2</artifactId>
- <version>1.0.0-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<!--- CHANGE THE FOLLOWING 3 OBJECTS for your own repo -->
<groupId>org.onap.dcaegen2.platform</groupId>
<artifactId>configbinding</artifactId>
<name>dcaegen2-platform-configbinding</name>
- <version>1.2.0</version>
+ <version>2.2.3-SNAPSHOT</version>
<url>http://maven.apache.org</url>
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <!-- sonar -->
- <sonar.skip>false</sonar.skip>
- <sonar.sources>.</sonar.sources>
- <sonar.junit.reportspath>xunit-reports/xunit-result-configbinding.xml</sonar.junit.reportspath>
- <!--
- <sonar.python.coverage.reportpath>coverage.xml</sonar.python.coverage.reportpath>
- see https://docs.sonarqube.org/display/plug/python+coverage+results+import
- ant pattern describing the path to coverage reports, relative to projects root. leave unset to use the default ("coverage-reports/coverage-*.xml").
- -->
- <sonar.language>py</sonar.language>
- <sonar.pluginname>python</sonar.pluginname>
- <sonar.inclusions>config_binding_service/**.py</sonar.inclusions>
- <sonar.host.url>http://135.205.228.63:9000</sonar.host.url>
+ <packaging>pom</packaging>
+ <modules>
+ <module>app</module>
+ </modules>
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <!-- customize the SONARQUBE URL -->
+ <sonar.host.url>http://localhost:9000</sonar.host.url>
+ <!-- taken care of in the children -->
+ <sonar.exclusions>**</sonar.exclusions>
</properties>
<build>
<finalName>${project.artifactId}-${project.version}</finalName>
<pluginManagement>
<plugins>
- <!-- the following plugins are invoked from oparent, we do not need them -->
- <plugin>
- <groupId>org.sonatype.plugins</groupId>
- <artifactId>nexus-staging-maven-plugin</artifactId>
- <version>1.6.7</version>
- <configuration>
- <skipNexusStagingDeployMojo>true</skipNexusStagingDeployMojo>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-deploy-plugin</artifactId>
- <!-- This version supports the "deployAtEnd" parameter -->
- <version>2.8</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-enforcer-plugin</artifactId>
- <version>3.0.0-M1</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- first disable the default Java plugins at various stages -->
- <!-- maven-resources-plugin is called during "*resource" phases by default behavior. it prepares the resources
- dir. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-resources-plugin</artifactId>
- <version>2.6</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- maven-compiler-plugin is called during "compile" phases by default behavior. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>3.1</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- maven-jar-plugin is called during "compile" phase by default behavior. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <version>2.4</version>
- <executions>
- <execution>
- <id>default-jar</id>
- <phase/>
- </execution>
- </executions>
- </plugin>
- <!-- maven-install-plugin is called during "install" phase by default behavior. it tries to copy stuff under
- target dir to ~/.m2. we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-install-plugin</artifactId>
- <version>2.4</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- maven-surefire-plugin is called during "test" phase by default behavior. it triggers junit test.
- we do not need it -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.12.4</version>
- <configuration>
- <skipTests>true</skipTests>
- </configuration>
- </plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
@@ -139,6 +56,7 @@ ECOMP is a trademark and service mark of AT&T Intellectual Property.
<executable>${session.executionRootDirectory}/mvn-phase-script.sh</executable>
<environmentVariables>
<!-- make mvn properties as env for our script -->
+ <!-- make mvn properties as env for our script -->
<MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID>
<MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID>
<MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION>
@@ -146,131 +64,17 @@ ECOMP is a trademark and service mark of AT&T Intellectual Property.
<MVN_RAWREPO_BASEURL_UPLOAD>${onap.nexus.rawrepo.baseurl.upload}</MVN_RAWREPO_BASEURL_UPLOAD>
<MVN_RAWREPO_BASEURL_DOWNLOAD>${onap.nexus.rawrepo.baseurl.download}</MVN_RAWREPO_BASEURL_DOWNLOAD>
<MVN_RAWREPO_SERVERID>${onap.nexus.rawrepo.serverid}</MVN_RAWREPO_SERVERID>
- <MVN_DOCKERREGISTRY_DAILY>${onap.nexus.dockerregistry.daily}</MVN_DOCKERREGISTRY_DAILY>
+ <MVN_DOCKERREGISTRY_SNAPSHOT>${onap.nexus.dockerregistry.snapshot}</MVN_DOCKERREGISTRY_SNAPSHOT>
<MVN_DOCKERREGISTRY_RELEASE>${onap.nexus.dockerregistry.release}</MVN_DOCKERREGISTRY_RELEASE>
+ <MVN_DOCKERREGISTRY_SNAPSHOT_SERVERID>${onap.nexus.dockerregistry.snapshot.serverid}</MVN_DOCKERREGISTRY_SNAPSHOT_SERVERID>
+ <MVN_DOCKERREGISTRY_RELEASE_SERVERID>${onap.nexus.dockerregistry.release.serverid}</MVN_DOCKERREGISTRY_RELEASE_SERVERID>
+ <MVN_PYPISERVER_BASEURL>${onap.nexus.pypiserver.baseurl}</MVN_PYPISERVER_BASEURL>
+ <MVN_PYPISERVER_SERVERID>${onap.nexus.pypiserver.serverid}</MVN_PYPISERVER_SERVERID>
</environmentVariables>
</configuration>
</plugin>
</plugins>
</pluginManagement>
- <plugins>
- <!-- plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>2.4.1</version>
- <configuration>
- <descriptors>
- <descriptor>assembly/dep.xml</descriptor>
- </descriptors>
- </configuration>
- <executions>
- <execution>
- <id>make-assembly</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin -->
- <!-- now we configure custom action (calling a script) at various lifecycle phases -->
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>exec-maven-plugin</artifactId>
- <version>1.2.1</version>
- <executions>
- <execution>
- <id>clean phase script</id>
- <phase>clean</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>__</argument>
- <argument>clean</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>generate-sources script</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>__</argument>
- <argument>generate-sources</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>compile script</id>
- <phase>compile</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>__</argument>
- <argument>compile</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>package script</id>
- <phase>package</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>__</argument>
- <argument>package</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>test script</id>
- <phase>test</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>__</argument>
- <argument>test</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>install script</id>
- <phase>install</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>__</argument>
- <argument>install</argument>
- </arguments>
- </configuration>
- </execution>
- <execution>
- <id>deploy script</id>
- <phase>deploy</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <arguments>
- <argument>__</argument>
- <argument>deploy</argument>
- </arguments>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
</build>
+
</project>
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index 9aeb224..0000000
--- a/requirements.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Flask==0.12.2
-connexion==1.1.12
-requests==2.18.2
-six==1.10.0
diff --git a/tests/test_binding.py b/tests/test_binding.py
deleted file mode 100644
index 3bbe5d9..0000000
--- a/tests/test_binding.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# ============LICENSE_START=======================================================
-# org.onap.dcae
-# ================================================================================
-# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-from config_binding_service import client
-import pytest
-import json
-
-def monkeyed_get_connection_info_from_consul(service_component_name):
- #shared monkeypatch. probably somewhat lazy because the function htis patches can be broken up.
- if service_component_name == "cdap":
- return '666.666.666.666:666'
- elif service_component_name == "testing_bravo.somedomain.com":
- return '7.7.7.7:777'
- elif service_component_name == "testing_alpha.somedomain.com":
- return '6.6.6.6:666'
- elif service_component_name == "testing_charlie.somedomain.com":
- return '5.5.5.5:555'
- elif service_component_name == "nonexistent_hope":
- return None #the real function returns None here
- elif service_component_name == "cdap_serv.dcae.ecomp.somedomain.com":
- broker_ip = '1.1.1.1'
- broker_port = 444
- return "http://{0}:{1}/application/{2}".format(broker_ip, broker_port, service_component_name)
-
-def test_bad_config_http():
- test_config = {'yeahhhhh' : "{{}}"}
- test_rels = ["testing_bravo.somedomain.com"]
- assert {'yeahhhhh' : []} == client.resolve_override(test_config, test_rels)
-
-def test_bad_config_dmaap():
- test_config = {'darkness' : "<<>>"}
- test_dmaap = {"WHO?" : "darkness"}
- assert {'darkness' : {}} == client.resolve_override(test_config, test_dmaap)
-
-def test_config(monkeypatch):
- #test config override
- monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul', monkeyed_get_connection_info_from_consul)
- test_config = {"autoderegisterafter": "10m", "cdap_to_manage": {'some_nested_thing' : "{{cdap}}"}, "bindingttw": 5, "hcinterval": "5s"}
- test_rels = ["cdap"]
- test_bind_1 = client.resolve_override(test_config, test_rels)
- assert test_bind_1 == {'autoderegisterafter': '10m', 'cdap_to_manage': {'some_nested_thing': ['666.666.666.666:666']}, 'bindingttw': 5, 'hcinterval': '5s'}
-
-def test_non_existent(monkeypatch):
- #test a valid config-rels but the key is not in Consul
- monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul', monkeyed_get_connection_info_from_consul)
- test_config = {"you shall not be fufilled" : "{{nonexistent_hope}}"}
- test_rels = ["nonexistent_hope.rework-central.ecomp.somedomain.com"] #hopefully not registered in Consul..
- test_bind_1 = client.resolve_override(test_config, test_rels, {})
- assert(test_bind_1 == {"you shall not be fufilled" : []})
-
-def test_cdap(monkeypatch):
- #user override to test CDAP functionality
- monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul', monkeyed_get_connection_info_from_consul)
- test_rels = ["testing_alpha.somedomain.com", "testing_bravo.somedomain.com", "testing_charlie.somedomain.com", "testing_charlie.somedomain.com", "cdap"]
- test_config = { "streams_publishes" : "{{alpha}}",
- "services_calls" : [{"somekey" : "{{charlie}}"}], #should be dumped
- "cdap_to_manage": {'some_nested_thing' : "{{cdap}}"} #no dumps
- }
- test_bind_1 = client.resolve_override(test_config, test_rels)
- assert test_bind_1 == {'services_calls': [{"somekey": ["5.5.5.5:555", "5.5.5.5:555"]}], 'streams_publishes': ["6.6.6.6:666"], 'cdap_to_manage': {'some_nested_thing': ['666.666.666.666:666']}}
- assert test_bind_1['services_calls'] == [{"somekey" : ["5.5.5.5:555", "5.5.5.5:555"]}]
- assert test_bind_1['streams_publishes'] == ["6.6.6.6:666"]
-
-def test_broker_redirect(monkeypatch):
- #test the broker redirect
- monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul', monkeyed_get_connection_info_from_consul)
- test_config = {"gimmie_dat_cdap" : "{{cdap_serv}}"}
- test_rels = ["cdap_serv.dcae.ecomp.somedomain.com"]
- assert {"gimmie_dat_cdap" : ['http://1.1.1.1:444/application/cdap_serv.dcae.ecomp.somedomain.com']} == client.resolve_override(test_config, test_rels)
-
-def test_multiple_service_types(monkeypatch):
- #test {{x,y,z}}
- monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul', monkeyed_get_connection_info_from_consul)
-
- #test 1: they all resovle
- test_rels = ["testing_alpha.somedomain.com", "testing_bravo.somedomain.com", "testing_charlie.somedomain.com", "testing_charlie.somedomain.com"]
- config = {"ALL YOUR SERVICE BELONG TO US" : "{{alpha,bravo,charlie}}"}
- test_bind_1 = client.resolve_override(config, test_rels)
- assert(test_bind_1 == {"ALL YOUR SERVICE BELONG TO US" : ['6.6.6.6:666', '7.7.7.7:777', '5.5.5.5:555', '5.5.5.5:555']})
-
- #test 2: two resolve, one is missing from rels key
- config2 = {"two there one not exist" : "{{alpha,bravo,notexist}}"}
- test_bind_2 = client.resolve_override(config2, test_rels)
- assert(test_bind_2 == {"two there one not exist" : ['6.6.6.6:666', '7.7.7.7:777']})
-
- #test 3: two resolve, one is in rels key but not registered
- config3 = {"two there one unregistered" : "{{alpha,bravo,unregistered}}"}
- test_rels3 = ["testing_alpha.somedomain.com", "testing_bravo.somedomain.com", "unregistered.somedomain.com"]
- test_bind_3 = client.resolve_override(config3, test_rels3)
- assert(test_bind_3 == {"two there one unregistered" : ['6.6.6.6:666', '7.7.7.7:777']})
-
-def test_dmaap(monkeypatch):
- #test resolving dmaap key
- config = {"TODAY IS YOUR LUCKY DAY" : "<<XXX>>"}
- #does not match
- test_bind = client.resolve_override(config, dmaap={"XX" : "ABSOLVEME"}) #XX != XXX
- assert(test_bind == {"TODAY IS YOUR LUCKY DAY" : {}})
- #matches
- test_bind_2 = client.resolve_override(config, dmaap={"XXX" : "ABSOLVEME"})
- assert(test_bind_2 == {"TODAY IS YOUR LUCKY DAY" : "ABSOLVEME"})
-
-
-def test_both(monkeypatch):
- #test rels and http
- monkeypatch.setattr('config_binding_service.client._get_connection_info_from_consul', monkeyed_get_connection_info_from_consul)
- test_rels = ["testing_alpha.somedomain.com", "testing_bravo.somedomain.com", "testing_charlie.somedomain.com", "testing_charlie.somedomain.com"]
- test_dmaap = {"WHO?" : "darkness"}
- config = {
- "deep" : {
- "ALL YOUR SERVICE BELONG TO US" : "{{alpha,bravo,charlie}}"},
- "doubledeep" : {
- "sodeep" : {"hello" : "<<WHO?>>"}}
- }
- test_bind_1 = client.resolve_override(config, test_rels, test_dmaap)
- expected_config = {
- "deep" : {
- "ALL YOUR SERVICE BELONG TO US" : ['6.6.6.6:666', '7.7.7.7:777', '5.5.5.5:555', '5.5.5.5:555']},
- "doubledeep" : {
- "sodeep" : {"hello" : "darkness"}}
- }
- assert(test_bind_1 == expected_config)
-
-
diff --git a/tox-local.ini b/tox-local.ini
index 978634d..be6fb90 100644
--- a/tox-local.ini
+++ b/tox-local.ini
@@ -1,15 +1,25 @@
+# content of: tox.ini , put in same dir as setup.py
[tox]
-envlist = py27,py36
+envlist = py36,flake8
+setupdir=app/app
[testenv]
deps=
- -rrequirements.txt
pytest
coverage
pytest-cov
-setenv =
+setenv =
CONSUL_HOST = 8.8.8.8
HOSTNAME = config_binding_service
-commands=pytest --cov {envsitepackagesdir}/config_binding_service --cov-report html
+commands=
+ pytest --verbose --cov config_binding_service --cov-report=html
+[testenv:flake8]
+basepython = python3.7
+skip_install = true
+deps = flake8
+commands = flake8 setup.py config_binding_service tests
+
+[flake8]
+ignore = E501,E265,E262,E261
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index aeadddf..0000000
--- a/tox.ini
+++ /dev/null
@@ -1,14 +0,0 @@
-# content of: tox.ini , put in same dir as setup.py
-[tox]
-envlist = py27,py36
-
-[testenv]
-deps=
- -rrequirements.txt
- pytest
- coverage
- pytest-cov
-setenv =
- CONSUL_HOST = 8.8.8.8
- HOSTNAME = config_binding_service
-commands=pytest --junitxml xunit-reports/xunit-result-configbinding.xml --cov {envsitepackagesdir}/config_binding_service --cov-report=xml
diff --git a/version.properties b/version.properties
index 07578e5..e50aee3 100644
--- a/version.properties
+++ b/version.properties
@@ -1,6 +1,6 @@
-major=1
-minor=2
-patch=0
-base_version=${major}.${minor}.${patch}
-release_version=${base_version}
-snapshot_version=${base_version}-SNAPSHOT
+major=2
+minor=2
+patch=2
+base_version=${major}.${minor}.${patch}
+release_version=${base_version}
+snapshot_version=${base_version}-SNAPSHOT