summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--cdap/Changelog.md3
-rwxr-xr-xcdap/cdap_types.yaml13
-rw-r--r--cdap/cdapplugin/cdapcloudify/cdap_plugin.py92
-rw-r--r--cdap/cdapplugin/requirements.txt2
-rw-r--r--cdap/cdapplugin/setup.py7
-rw-r--r--cdap/demo_blueprints/cdap_hello_world.yaml33
-rw-r--r--docker/ChangeLog.md8
-rw-r--r--docker/docker-node-type.yaml19
-rw-r--r--docker/dockerplugin/discovery.py50
-rw-r--r--docker/dockerplugin/tasks.py45
-rw-r--r--docker/examples/blueprint-laika.yaml15
-rw-r--r--docker/setup.py2
-rw-r--r--docker/tests/test_discovery.py15
-rw-r--r--docker/tests/test_tasks.py36
14 files changed, 271 insertions, 69 deletions
diff --git a/cdap/Changelog.md b/cdap/Changelog.md
index fa505e4..b0006be 100644
--- a/cdap/Changelog.md
+++ b/cdap/Changelog.md
@@ -4,6 +4,9 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
+## [14.2.0]
+* Integrate with Policy handler. Policy handling for CDAP is done.
+
## [14.1.0]
* Merge the broker deleter function into here; no need for seperate plugin
diff --git a/cdap/cdap_types.yaml b/cdap/cdap_types.yaml
index 083d011..f27b4b1 100755
--- a/cdap/cdap_types.yaml
+++ b/cdap/cdap_types.yaml
@@ -6,7 +6,7 @@ plugins:
cdap_deploy:
executor: central_deployment_agent
package_name: cdapcloudify
- package_version: 14.1.0
+ package_version: 14.2.0
data_types:
cdap_connections:
@@ -68,6 +68,17 @@ node_types:
cdap_deploy.cdapcloudify.cdap_plugin.deploy_and_start_application
delete:
cdap_deploy.cdapcloudify.cdap_plugin.stop_and_undeploy_application
+
+ dcae.interfaces.policy:
+ policy_update:
+ implementation:
+ cdap_deploy.cdapcloudify.cdap_plugin.policy_update
+ inputs:
+ updated_policies:
+ description: "list of policy objects"
+ default: []
+
+ #TODO: These can probably go away after policy_update is implemented
reconfiguration:
app_config_reconfigure:
implementation: cdap_deploy.cdapcloudify.cdap_plugin.app_config_reconfigure
diff --git a/cdap/cdapplugin/cdapcloudify/cdap_plugin.py b/cdap/cdapplugin/cdapcloudify/cdap_plugin.py
index b4d22cd..1de0173 100644
--- a/cdap/cdapplugin/cdapcloudify/cdap_plugin.py
+++ b/cdap/cdapplugin/cdapcloudify/cdap_plugin.py
@@ -5,9 +5,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -16,10 +16,11 @@
# ============LICENSE_END=========================================================
#
# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+from dcaepolicy import Policies
import requests
from cloudify import ctx
-from cloudify.decorators import operation
+from cloudify.decorators import operation
from cloudify.exceptions import NonRecoverableError
import time
import uuid
@@ -27,9 +28,6 @@ import re
from cdapcloudify import discovery
import json
-"""
-TODO: Tons of crappy URL forming going on here...
-"""
# Property keys
SERVICE_COMPONENT_NAME = "service_component_name"
SELECTED_BROKER = "selected_broker"
@@ -45,6 +43,19 @@ class BadConnections(NonRecoverableError):
pass
+def _trigger_update(updated_policies):
+ """
+ Helper function for reconfiguring after a policy update
+ """
+ for p in updated_policies:
+ ctx.logger.info("Reconfiguring CDAP application via smart interface")
+ return discovery.reconfigure_in_broker(cdap_broker_name = ctx.instance.runtime_properties[SELECTED_BROKER],
+ service_component_name = ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME],
+ config = p,
+ reconfiguration_type = "program-flowlet-smart",
+ logger = ctx.logger)
+
+
def _validate_conns(connections):
"""
Cloudify allows you to type spec a data type in a type file, however it does not appear to do strict checking on blueprints against that.
@@ -62,10 +73,10 @@ def _validate_conns(connections):
assert(s["type"] in ["message_router", "data_router"])
if s["type"] == "message_router":
_assert_ks_in_d(["aaf_username", "aaf_password", "client_role"], s) #I am not checking that these are not blank. I will leave it possible for you to put empty values for these, but force you to acknowledge that you are doing so by not allowing these to be ommited.
- #nothing extra for DR; no AAF, no client role.
+ #nothing extra for DR; no AAF, no client role.
except:
raise BadConnections("Bad Connections definition in blueprint") #is a NoneRecoverable
-
+
def _streams_iterator(streams):
"""
helper function for iterating over streams_publishes and subscribes
@@ -73,7 +84,7 @@ def _streams_iterator(streams):
"""
for_config = {}
for s in streams:
- if s["type"] == "message_router":
+ if s["type"] == "message_router":
#set the properties the DMaaP plugin needs
ctx.instance.runtime_properties[s["name"]] = {"client_role" : s["client_role"], "location" : s["location"]}
#form (or append to) the dict the component will get, including the template for the CBS
@@ -96,6 +107,19 @@ def _services_calls_iterator(services_calls):
for_config[s["config_key"]] = "{{ " + s["service_component_type"] + " }}" #will get bound by CBS
return for_config
+
+######################
+# TEMPORARY!!!!!!
+# THIS WILL GO AWAY ONCE ALEX HAS A NODE TYPE AND PLUGIN
+######################
+@operation
+@Policies.populate_policy_on_node
+def policy_get(**kwargs):
+ """decorate with @Policies.populate_policy_on_node on dcae.policy node to
+ retrieve the latest policy_body for policy_id property and save it in runtime_properties
+ """
+ pass
+
######################
# Cloudify Operations
######################
@@ -108,19 +132,19 @@ def create(connected_broker_dns_name, **kwargs):
#fail fast
_validate_conns(ctx.node.properties["connections"])
-
+
#The config binding service needs to know whether cdap or docker. Currently (aug 1 2018) it looks for "cdap_app" in the name
service_component_name = "{0}_cdap_app_{1}".format(str(uuid.uuid4()).replace("-",""), ctx.node.properties["service_component_type"])
#set this into a runtime dictionary
ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME] = service_component_name
-
+
#fetch the broker name from inputs and set it in runtime properties so other functions can use it
ctx.instance.runtime_properties[SELECTED_BROKER] = connected_broker_dns_name
-
+
#set the properties the DMaap plugin expects for message router
#see the README for the structures of these keys
- #NOTE! This has to be done in create because Jack's DMaaP plugin expects to do it's thing in preconfigure.
+ #NOTE! This has to be done in create because Jack's DMaaP plugin expects to do it's thing in preconfigure.
# and we need to get this key into consul before start
#set this as a runtime property for start to use
ctx.instance.runtime_properties[PUB_C] = _streams_iterator(ctx.node.properties["connections"][STREAMS_PUBLISHES])
@@ -128,6 +152,7 @@ def create(connected_broker_dns_name, **kwargs):
ctx.instance.runtime_properties[SER_C] = _services_calls_iterator(ctx.node.properties["connections"][SERVICES_CALLS])
@operation
+@Policies.gather_policies_to_node
def deploy_and_start_application(**kwargs):
"""
pushes the application into the workspace and starts it
@@ -158,17 +183,24 @@ def deploy_and_start_application(**kwargs):
programs = ctx.node.properties["programs"],
program_preferences = ctx.node.properties["program_preferences"],
logger = ctx.logger)
-
- except Exception as e:
- ctx.logger.error("Error depploying CDAP app: {er}".format(er=e))
+
+ #TODO! Would be better to do an initial merge first before deploying, but the merge is complicated for CDAP
+ #because of app config vs. app preferences. So, for now, let the broker do the work with an immediate reconfigure
+ #get policies that may have changed prior to this blueprint deployment
+ policy_configs = Policies.get_policy_configs()
+ ctx.logger.info("Updated policy configs: {0}".format(policy_configs))
+ _trigger_update(policy_configs)
+
+ except Exception as e:
+ ctx.logger.error("Error depploying CDAP app: {er}".format(er=e))
raise NonRecoverableError(e)
@operation
def stop_and_undeploy_application(**kwargs):
#per jack Lucas, do not raise Nonrecoverables on any delete operation. Keep going on them all, cleaning up as much as you can.
#bombing would also bomb the deletion of the rest of the blueprint
- ctx.logger.info("Undeploying CDAP application")
-
+ ctx.logger.info("Undeploying CDAP application")
+
try: #deregister with the broker, which will also take down the service from consul
discovery.delete_on_broker(ctx.instance.runtime_properties[SELECTED_BROKER],
ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME],
@@ -190,8 +222,8 @@ def app_config_reconfigure(new_config_template, **kwargs):
"""
try:
ctx.logger.info("Reconfiguring CDAP application via app_config")
- discovery.reconfigure_in_broker(cdap_broker_name = ctx.instance.runtime_properties[SELECTED_BROKER],
- service_component_name = ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME],
+ discovery.reconfigure_in_broker(cdap_broker_name = ctx.instance.runtime_properties[SELECTED_BROKER],
+ service_component_name = ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME],
config = new_config_template, #This keyname will likely change per policy handler
reconfiguration_type = "program-flowlet-app-config",
logger = ctx.logger)
@@ -205,8 +237,8 @@ def app_preferences_reconfigure(new_config_template, **kwargs):
"""
try:
ctx.logger.info("Reconfiguring CDAP application via app_preferences")
- discovery.reconfigure_in_broker(cdap_broker_name = ctx.instance.runtime_properties[SELECTED_BROKER],
- service_component_name = ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME],
+ discovery.reconfigure_in_broker(cdap_broker_name = ctx.instance.runtime_properties[SELECTED_BROKER],
+ service_component_name = ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME],
config = new_config_template, #This keyname will likely change per policy handler
reconfiguration_type = "program-flowlet-app-preferences",
logger = ctx.logger)
@@ -220,8 +252,8 @@ def app_smart_reconfigure(new_config_template, **kwargs):
"""
try:
ctx.logger.info("Reconfiguring CDAP application via smart interface")
- discovery.reconfigure_in_broker(cdap_broker_name = ctx.instance.runtime_properties[SELECTED_BROKER],
- service_component_name = ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME],
+ discovery.reconfigure_in_broker(cdap_broker_name = ctx.instance.runtime_properties[SELECTED_BROKER],
+ service_component_name = ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME],
config = new_config_template, #This keyname will likely change per policy handler
reconfiguration_type = "program-flowlet-smart",
logger = ctx.logger)
@@ -229,6 +261,18 @@ def app_smart_reconfigure(new_config_template, **kwargs):
raise NonRecoverableError("CDAP Reconfigure error: {0}".format(e))
@operation
+@Policies.update_policies_on_node(configs_only=True)
+def policy_update(updated_policies, **kwargs):
+ #its already develiered through policy
+ ctx.logger.info("Policy update recieved. updated policies: {0}".format(updated_policies))
+ try:
+ #TODO! In the future, if we really have many different policies, would be more efficient to do a single merge here.
+ #However all use cases today are a single policy so OK with this for loop for now.
+ _trigger_update(updated_policies)
+ except Exception as e:
+ raise NonRecoverableError("CDAP Reconfigure error: {0}".format(e))
+
+@operation
def delete_all_registered_apps(connected_broker_dns_name, **kwargs):
"""
Used in the cdap broker deleter node.
diff --git a/cdap/cdapplugin/requirements.txt b/cdap/cdapplugin/requirements.txt
index 1128300..48ff425 100644
--- a/cdap/cdapplugin/requirements.txt
+++ b/cdap/cdapplugin/requirements.txt
@@ -1 +1,3 @@
+--extra-index-url {{ TODO }}/simple
+dcaepolicy==0.0.3
uuid==1.30
diff --git a/cdap/cdapplugin/setup.py b/cdap/cdapplugin/setup.py
index 53a9a2b..8498473 100644
--- a/cdap/cdapplugin/setup.py
+++ b/cdap/cdapplugin/setup.py
@@ -5,9 +5,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -22,7 +22,7 @@ from setuptools import setup, find_packages
setup(
name = "cdapcloudify",
- version = "14.1.0",
+ version = "14.2.0",
packages=find_packages(),
author = "Tommy Carpenter",
author_email = "tommy at research dot eh tee tee dot com",
@@ -32,6 +32,7 @@ setup(
url = "https://gerrit.onap.org/r/#/admin/projects/dcaegen2/platform/plugins",
zip_safe=False,
install_requires = [
+ "dcaepolicy==0.0.3",
"uuid==1.30"
]
)
diff --git a/cdap/demo_blueprints/cdap_hello_world.yaml b/cdap/demo_blueprints/cdap_hello_world.yaml
index 1b7ff90..197603f 100644
--- a/cdap/demo_blueprints/cdap_hello_world.yaml
+++ b/cdap/demo_blueprints/cdap_hello_world.yaml
@@ -2,7 +2,9 @@ tosca_definitions_version: cloudify_dsl_1_3
imports:
- http://www.getcloudify.org/spec/cloudify/3.4/types.yaml
- - {{ ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2 }}/type_files/cdap/14.0.2/cdap_types.yaml
+ #- {{ ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2 }}/type_files/cdap/14.0.2/cdap_types.yaml
+ - {{ ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2 }}/type_files/cdap/14.2.0/cdap_types.yaml
+ #TODO: IMPORT ALEX'S TYPE FILE
inputs:
hello_world_jar_url:
@@ -11,7 +13,30 @@ inputs:
type: string
default : "cdap_broker"
+#TODO: THIS WILL GO AWAY ONCE TYPE FILE IS IMP[ORTED
+node_types:
+ dcae.nodes.policy:
+ derived_from: cloudify.nodes.Root
+ properties:
+ policy_id:
+ description: PK to policy
+ type: string
+ default: DCAE_alex.Config_empty-policy
+ policy_apply_mode:
+ description: choice of how to apply the policy update - none or script
+ type: string
+ default: none
+ interfaces:
+ cloudify.interfaces.lifecycle:
+ create:
+ implementation: cdap_deploy.cdapcloudify.cdap_plugin.policy_get
+#TODO: THIS WILL GO AWAY ONCE TYPE FILE IS IMP[ORTED
+
node_templates:
+ hw_app_policy_test:
+ type: dcae.nodes.policy
+ properties:
+ policy_id : DCAE_alex.Config_test_cdap_policy
hw_cdap_app:
type: dcae.nodes.MicroService.cdap
@@ -28,11 +53,17 @@ node_templates:
'who'
service_endpoints:
[{"service_name" : "Greeting", "service_endpoint" : "greet", "endpoint_method" : "GET"}]
+ app_config: {"foo" : "foo-test"}
+ app_preferences: {"foo_updated" : "foo-pref-test"}
+
interfaces:
cloudify.interfaces.lifecycle:
create:
inputs:
connected_broker_dns_name: { get_input: connected_broker_dns_name }
+ relationships:
+ - target: hw_app_policy_test
+ type: cloudify.relationships.depends_on
outputs:
hw_cdap_app_name:
diff --git a/docker/ChangeLog.md b/docker/ChangeLog.md
index 673e672..5094816 100644
--- a/docker/ChangeLog.md
+++ b/docker/ChangeLog.md
@@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
+## [2.3.0+t.0.3]
+
+* Enhance `SelectedDockerHost` node type with `name_search` and add default to `docker_host_override`
+* Implement the functionality in the `select_docker_host` task to query Consul given location id and name search
+* Deprecate `location_id` on the `DockerContainerForComponents*` node types
+* Change `service_id` to be optional for `DockerContainerForComponents*` node types
+* Add deployment id as a tag for registration on the component
+
## [2.3.0]
* Rip out dockering and use common python-dockering library
diff --git a/docker/docker-node-type.yaml b/docker/docker-node-type.yaml
index 5fb0e27..b1bf64c 100644
--- a/docker/docker-node-type.yaml
+++ b/docker/docker-node-type.yaml
@@ -7,7 +7,7 @@ plugins:
docker:
executor: 'central_deployment_agent'
package_name: dockerplugin
- package_version: 2.3.0
+ package_version: 2.3.0+t.0.3
node_types:
# The DockerContainerForComponents node type is to be used for DCAE service components that
@@ -29,11 +29,18 @@ node_types:
service_id:
type: string
- description: Unique id for this DCAE service instance this component belongs to
+ description: >
+ Unique id for this DCAE service instance this component belongs to. This value
+ will be applied as a tag in the registration of this component with Consul.
+ default: Null
location_id:
type: string
- description: Location id of where to run the container
+ description: >
+ Location id of where to run the container.
+ DEPRECATED - No longer used. Infer the location from the docker host service
+ and/or node.
+ default: Null
service_component_name_override:
type: string
@@ -228,12 +235,18 @@ node_types:
type: string
description: Location id of the Docker host to use
+ name_search:
+ type: string
+ description: String to use when matching for names
+ default: component-dockerhost
+
# REVIEW: This field should really be optional but because there's no functionality
# that provides the dynamic solution sought after yet, it has been promoted to be
# required.
docker_host_override:
type: string
description: Docker hostname here is used as a manual override
+ default: Null
interfaces:
cloudify.interfaces.lifecycle:
diff --git a/docker/dockerplugin/discovery.py b/docker/dockerplugin/discovery.py
index 32a8cd0..03a51f6 100644
--- a/docker/dockerplugin/discovery.py
+++ b/docker/dockerplugin/discovery.py
@@ -47,19 +47,17 @@ def _wrap_consul_call(consul_func, *args, **kwargs):
raise DiscoveryConnectionError(e)
-def generate_service_component_name(service_component_type, service_id, location_id):
+def generate_service_component_name(service_component_type):
"""Generate service component id used to pass into the service component
instance and used as the key to the service component configuration.
Format:
- <service component id>.<service component type>.<service id>.<location id>.dcae.com
-
- TODO: The format will evolve.
+ <service component id>_<service component type>
"""
# Random generated
- service_component_id = str(uuid.uuid4())
- return "{0}.{1}.{2}.{3}.dcae.com".format(
- service_component_id, service_component_type, service_id, location_id)
+ # Copied from cdap plugin
+ return "{0}_{1}".format(str(uuid.uuid4()).replace("-",""),
+ service_component_type)
def create_kv_conn(host):
@@ -204,3 +202,41 @@ def add_to_entry(conn, key, add_name, add_value):
updated = conn.kv.put(key, new_vstring, cas=mod_index) # if the key has changed since retrieval, this will return false
if updated:
return v
+
+
+def _find_matching_services(services, name_search, tags):
+ """Find matching services given search criteria"""
+ def is_match(service):
+ srv_name, srv_tags = service
+ return name_search in srv_name and \
+ all(map(lambda tag: tag in srv_tags, tags))
+
+ return [ srv[0] for srv in services.items() if is_match(srv) ]
+
+def search_services(conn, name_search, tags):
+ """Search for services that match criteria
+
+ Args:
+ -----
+ name_search: (string) Name to search for as a substring
+ tags: (list) List of strings that are tags. A service must match **all** the
+ tags in the list.
+
+ Retruns:
+ --------
+ List of names of services that matched
+ """
+ # srvs is dict where key is service name and value is list of tags
+ catalog_get_services_func = partial(_wrap_consul_call, conn.catalog.services)
+ index, srvs = catalog_get_services_func()
+
+ if srvs:
+ matches = _find_matching_services(srvs, name_search, tags)
+
+ if matches:
+ return matches
+
+ raise DiscoveryServiceNotFoundError(
+ "No matches found: {0}, {1}".format(name_search, tags))
+ else:
+ raise DiscoveryServiceNotFoundError("No services found")
diff --git a/docker/dockerplugin/tasks.py b/docker/dockerplugin/tasks.py
index a41f143..837c1e9 100644
--- a/docker/dockerplugin/tasks.py
+++ b/docker/dockerplugin/tasks.py
@@ -20,7 +20,7 @@
# Lifecycle interface calls for DockerContainer
-import json, time, copy
+import json, time, copy, random
from cloudify import ctx
from cloudify.decorators import operation
from cloudify.exceptions import NonRecoverableError, RecoverableError
@@ -71,14 +71,10 @@ def _setup_for_discovery(**kwargs):
def _generate_component_name(**kwargs):
"""Generate component name"""
service_component_type = kwargs['service_component_type']
- service_id = kwargs['service_id']
- location_id = kwargs['location_id']
-
name_override = kwargs['service_component_name_override']
kwargs['name'] = name_override if name_override \
- else dis.generate_service_component_name(service_component_type,
- service_id, location_id)
+ else dis.generate_service_component_name(service_component_type)
return kwargs
def _done_for_create(**kwargs):
@@ -297,6 +293,14 @@ def _create_and_start_container(container_name, image, docker_host,
raise DockerPluginDependencyNotReadyError(e)
+def _parse_cloudify_context(**kwargs):
+ """Parse Cloudify context
+
+ Extract what is needed. This is impure function because it requires ctx.
+ """
+ kwargs["deployment_id"] = ctx.deployment.id
+ return kwargs
+
def _enhance_docker_params(**kwargs):
"""Setup Docker envs"""
docker_config = kwargs.get("docker_config", {})
@@ -307,6 +311,14 @@ def _enhance_docker_params(**kwargs):
envs_healthcheck = doc.create_envs_healthcheck(docker_config) \
if "healthcheck" in docker_config else {}
envs.update(envs_healthcheck)
+
+ # Set tags on this component for its Consul registration as a service
+ tags = [kwargs.get("deployment_id", None), kwargs["service_id"]]
+ tags = [ str(tag) for tag in tags if tag is not None ]
+ # Registrator will use this to register this component with tags. Must be
+ # comma delimited.
+ envs["SERVICE_TAGS"] = ",".join(tags)
+
kwargs["envs"] = envs
def combine_params(key, docker_config, kwargs):
@@ -384,7 +396,8 @@ def create_and_start_container_for_components(**start_inputs):
_done_for_start(
**_verify_component(
**_create_and_start_component(
- **_enhance_docker_params(**start_inputs))))
+ **_enhance_docker_params(
+ **_parse_cloudify_context(**start_inputs)))))
def _update_delivery_url(**kwargs):
@@ -423,7 +436,8 @@ def create_and_start_container_for_components_with_streams(**start_inputs):
**_update_delivery_url(
**_verify_component(
**_create_and_start_component(
- **_enhance_docker_params(**start_inputs)))))
+ **_enhance_docker_params(
+ **_parse_cloudify_context(**start_inputs))))))
@wrap_error_handling_start
@@ -546,15 +560,28 @@ def cleanup_discovery(**kwargs):
# Lifecycle interface calls for dcae.nodes.DockerHost
+
+@monkeypatch_loggers
@operation
def select_docker_host(**kwargs):
selected_docker_host = ctx.node.properties['docker_host_override']
+ name_search = ctx.node.properties['name_search']
+ location_id = ctx.node.properties['location_id']
if selected_docker_host:
ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME] = selected_docker_host
ctx.logger.info("Selected Docker host: {0}".format(selected_docker_host))
else:
- raise NonRecoverableError("Failed to find a suitable Docker host")
+ try:
+ conn = dis.create_kv_conn(CONSUL_HOST)
+ names = dis.search_services(conn, name_search, [location_id])
+ ctx.logger.info("Docker hosts found: {0}".format(names))
+ # Randomly choose one
+ ctx.instance.runtime_properties[SERVICE_COMPONENT_NAME] = random.choice(names)
+ except (dis.DiscoveryConnectionError, dis.DiscoveryServiceNotFoundError) as e:
+ raise RecoverableError(e)
+ except Exception as e:
+ raise NonRecoverableError(e)
@operation
def unselect_docker_host(**kwargs):
diff --git a/docker/examples/blueprint-laika.yaml b/docker/examples/blueprint-laika.yaml
index 9a8dc46..98d27af 100644
--- a/docker/examples/blueprint-laika.yaml
+++ b/docker/examples/blueprint-laika.yaml
@@ -9,14 +9,9 @@ imports:
- {{ ONAPTEMPLATE_RAWREPOURL_org_onap_dcaegen2 }}/type_files/relationship/1.0.0/node-type.yaml
inputs:
-
- service_id:
- description: Unique id used for an instance of this DCAE service. Use deployment id
- default: 'foobar'
laika_image:
type: string
-
node_templates:
laika-zero:
@@ -24,10 +19,10 @@ node_templates:
properties:
service_component_type:
'laika'
- service_id:
- { get_input: service_id }
location_id:
'rework-central'
+ service_id:
+ 'foo-service'
application_config:
some-param: "Lorem ipsum dolor sit amet"
downstream-laika: "{{ laika }}"
@@ -62,10 +57,6 @@ node_templates:
properties:
service_component_type:
'laika'
- service_id:
- { get_input: service_id }
- location_id:
- 'rework-central'
application_config:
some-param: "Lorem ipsum dolor sit amet"
image: { get_input : laika_image }
@@ -85,5 +76,5 @@ node_templates:
properties:
location_id:
'rework-central'
- docker_host_override:
+ name_search:
'platform_dockerhost'
diff --git a/docker/setup.py b/docker/setup.py
index 9cdef0e..65ac0e9 100644
--- a/docker/setup.py
+++ b/docker/setup.py
@@ -24,7 +24,7 @@ from setuptools import setup
setup(
name='dockerplugin',
description='Cloudify plugin for applications run in Docker containers',
- version="2.3.0",
+ version="2.3.0+t.0.3",
author='Michael Hwang, Tommy Carpenter',
packages=['dockerplugin'],
zip_safe=False,
diff --git a/docker/tests/test_discovery.py b/docker/tests/test_discovery.py
index 9a18519..cee75b1 100644
--- a/docker/tests/test_discovery.py
+++ b/docker/tests/test_discovery.py
@@ -38,3 +38,18 @@ def test_wrap_consul_call():
wrapped_foo = partial(dis._wrap_consul_call, foo_connection_error)
with pytest.raises(dis.DiscoveryConnectionError):
wrapped_foo("a", "b", "c")
+
+
+def test_find_matching_services():
+ services = { "component_dockerhost_1": ["foo", "bar"],
+ "platform_dockerhost": [], "component_dockerhost_2": ["baz"] }
+ assert sorted(["component_dockerhost_1", "component_dockerhost_2"]) \
+ == sorted(dis._find_matching_services(services, "component_dockerhost", []))
+
+ assert ["component_dockerhost_1"] == dis._find_matching_services(services, \
+ "component_dockerhost", ["foo", "bar"])
+
+ assert ["component_dockerhost_1"] == dis._find_matching_services(services, \
+ "component_dockerhost", ["foo"])
+
+ assert [] == dis._find_matching_services(services, "unknown", ["foo"])
diff --git a/docker/tests/test_tasks.py b/docker/tests/test_tasks.py
index 74482c6..6661532 100644
--- a/docker/tests/test_tasks.py
+++ b/docker/tests/test_tasks.py
@@ -25,6 +25,17 @@ import dockerplugin
from dockerplugin import tasks
+def test_generate_component_name():
+ kwargs = { "service_component_type": "doodle",
+ "service_component_name_override": None }
+
+ assert "doodle" in tasks._generate_component_name(**kwargs)["name"]
+
+ kwargs["service_component_name_override"] = "yankee"
+
+ assert "yankee" == tasks._generate_component_name(**kwargs)["name"]
+
+
def test_parse_streams(monkeypatch):
# Good case for streams_publishes
test_input = { "streams_publishes": [{"name": "topic00", "type": "message_router"},
@@ -166,33 +177,42 @@ def test_update_delivery_url(monkeypatch):
def test_enhance_docker_params():
# Good - Test empty docker config
- test_kwargs = { "docker_config": {} }
+ test_kwargs = { "docker_config": {}, "service_id": None }
actual = tasks._enhance_docker_params(**test_kwargs)
- assert actual == {'envs': {}, 'docker_config': {}}
+ assert actual == {'envs': {"SERVICE_TAGS": ""}, 'docker_config': {}, "service_id": None }
# Good - Test just docker config ports and volumes
test_kwargs = { "docker_config": { "ports": ["1:1", "2:2"],
- "volumes": [{"container": "somewhere", "host": "somewhere else"}] } }
+ "volumes": [{"container": "somewhere", "host": "somewhere else"}] },
+ "service_id": None }
actual = tasks._enhance_docker_params(**test_kwargs)
- assert actual == {'envs': {}, 'docker_config': {'ports': ['1:1', '2:2'],
+ assert actual == {'envs': {"SERVICE_TAGS": ""}, 'docker_config': {'ports': ['1:1', '2:2'],
'volumes': [{'host': 'somewhere else', 'container': 'somewhere'}]},
'ports': ['1:1', '2:2'], 'volumes': [{'host': 'somewhere else',
- 'container': 'somewhere'}]}
+ 'container': 'somewhere'}], "service_id": None}
# Good - Test just docker config ports and volumes with overrrides
test_kwargs = { "docker_config": { "ports": ["1:1", "2:2"],
"volumes": [{"container": "somewhere", "host": "somewhere else"}] },
"ports": ["3:3", "4:4"], "volumes": [{"container": "nowhere", "host":
- "nowhere else"}]}
+ "nowhere else"}],
+ "service_id": None }
actual = tasks._enhance_docker_params(**test_kwargs)
- assert actual == {'envs': {}, 'docker_config': {'ports': ['1:1', '2:2'],
+ assert actual == {'envs': {"SERVICE_TAGS": ""}, 'docker_config': {'ports': ['1:1', '2:2'],
'volumes': [{'host': 'somewhere else', 'container': 'somewhere'}]},
'ports': ['1:1', '2:2', '3:3', '4:4'], 'volumes': [{'host': 'somewhere else',
'container': 'somewhere'}, {'host': 'nowhere else', 'container':
- 'nowhere'}]}
+ 'nowhere'}], "service_id": None}
+
+ # Good
+
+ test_kwargs = { "docker_config": {}, "service_id": "zed",
+ "deployment_id": "abc" }
+ actual = tasks._enhance_docker_params(**test_kwargs)
+ assert actual["envs"] == {"SERVICE_TAGS": "abc,zed"}