aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--README.md21
-rw-r--r--operations/dcae/README.md56
-rw-r--r--operations/dcae/blueprints/README.md33
-rw-r--r--operations/dcae/blueprints/k8s-datacollector.yaml80
-rw-r--r--operations/dcae/blueprints/k8s-sleepingcelldetector.yaml58
-rw-r--r--operations/dcae/dcae-cli.py544
-rw-r--r--operations/dcae/dcae.sh127
-rw-r--r--operations/dcae/inputs_database_password.sh8
-rw-r--r--operations/dcae/rapps.sh103
-rw-r--r--operations/scripts/k8s_get_node_ip.sh11
10 files changed, 1041 insertions, 0 deletions
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..8405100
--- /dev/null
+++ b/README.md
@@ -0,0 +1,21 @@
+# A1 Policy Enforcement use-case repository
+
+Goal of this use case is to present that:
+
+- A1-PE-Simulator can send VES events based on which a future sleeping cell can be detected
+- R-APP can predict a sleeping cell
+- R-APP can enforce an A1 policy for critical devices to avoid the cell for which a failure has been predicted
+- R-APPs can be deployed in the DCAE Framework
+
+Assumptions:
+
+- R-APPs are DCAE Applications
+- R-APP will provides REST API to the event store
+- ONAP Policy Management Service and SDNR/A1 Adapter are used to enforce the A1 Policy
+- Configuration of topology and user equipment are configured in A1-PE Simulator json file
+- A1-PE Simulator is a CNF
+
+Repository contain:
+
+- operations/dcae - DCAE CLI that deploys RAPPs in the DCAE framework
+- operations/scripts - helper scripts to support the use case
diff --git a/operations/dcae/README.md b/operations/dcae/README.md
new file mode 100644
index 0000000..a3c8339
--- /dev/null
+++ b/operations/dcae/README.md
@@ -0,0 +1,56 @@
+# Install R-APPs as DCAE Microservices
+
+DCAE NBI Dashboard API is used to deploy R-APPs.
+
+## Install
+
+Install all R-APPs using cli:
+
+```
+./rapps.sh deploy
+```
+
+## Uninstall
+
+To uninstall all R-APPs using cli, issue command:
+
+```
+./rapps.sh undeploy
+```
+
+## Environment variables to Override
+
+There are some environment variables that can be overriden when using shell scripts in this directory.
+
+- NODE_IP
+
+Kubernetes node ip address or hostname. By default this is resolved with another script (../scripts/k8s_get_node_ip.sh)
+and that script is using kubectl for that. If you don't have kubectl installed and configured towards this particular
+Kubernetes deployment, exporting this variable with your wanted value can be helpful.
+
+- DCAE_DASHBOARD_NODEPORT
+
+By default port 30418 is used for dcae-dashboard.
+
+- DATABASE_PASSWORD
+
+By default kubectl used to resolve value for this from certain ONAP database in script inputs_database_password.sh.
+
+- BLUEPRINTS_DIR
+
+By default 'blueprints' dir is use under this script directory. Can be replaced with absolute/relative path for the directory with the blueprint files.
+
+Also, the list of the blueprint file name is hardcoded in the rapps.sh script:
+
+```shell script
+declare -a rapp_blueprint_files=(
+ ${BLUEPRINTS_DIR}/k8s-datacollector.yaml
+ ${BLUEPRINTS_DIR}/k8s-sleepingcelldetector.yaml
+)
+```
+
+If new blueprint file will be added this list should be adjusted.
+
+- DEPLOYMENT_ID_PREFIX
+
+By default the "samsung" prefix is used.
diff --git a/operations/dcae/blueprints/README.md b/operations/dcae/blueprints/README.md
new file mode 100644
index 0000000..ecbb3b8
--- /dev/null
+++ b/operations/dcae/blueprints/README.md
@@ -0,0 +1,33 @@
+# Blueprint files for Cloudify
+
+Cloudify nodes types supported by ONAP Guilin are here:
+https://gerrit.onap.org/r/gitweb?p=dcaegen2/platform/plugins.git;a=blob;f=k8s/k8s-node-type.yaml;h=c14623aaf528db68f6aa960a18c54c603a1f943d;hb=refs/heads/guilin
+
+R-APP blueprints are based on node type: `dcae.nodes.ContainerizedServiceComponent`
+
+Following properties has meanings:
+
+- service_component_type
+ This comes as a name of the POD in the Kubernetes.
+- service_id
+ Unique id for this DCAE service instance this component belongs to.
+ This value will be applied as a tag in the registration of this component with Consul.
+ It will be visible in POD ENV as SERVICE_TAGS value.
+
+POD ENV
+
+Environment:
+
+- DCAE_CA_CERTPATH: /opt/dcae/cacert/cacert.pem
+- CONSUL_HOST: consul-server.onap
+- SERVICE_TAGS: rapp-service_id
+- CONFIG_BINDING_SERVICE: config-binding-service
+- CBS_CONFIG_URL: https://config-binding-service:10443/service_component_all/s8def4b1fc2ad4c05ba635289452860ee-componenttype-rapp
+
+POD Labels:
+app=s8def4b1fc2ad4c05ba635289452860ee-componenttype-rapp --> name of the POD without prefix
+cfydeployment=samsung_samsung-rapp-1 --> Service ID/Deployment Ref. / DeploymentId given to API when creating deployment
+cfynode=rapp-cloudify-node-template --> Blueprint node-template definition
+cfynodeinstance=rapp-cloudify-node-template_zbhke6
+k8sdeployment=dep-s8def4b1fc2ad4c05ba635289452860ee-componenttype-rapp --> Complete POD name
+pod-template-hash=6cdcd77994
diff --git a/operations/dcae/blueprints/k8s-datacollector.yaml b/operations/dcae/blueprints/k8s-datacollector.yaml
new file mode 100644
index 0000000..7cc28a4
--- /dev/null
+++ b/operations/dcae/blueprints/k8s-datacollector.yaml
@@ -0,0 +1,80 @@
+#
+# ============LICENSE_START====================================================
+# =============================================================================
+# Copyright (c) 2020 © Samsung Electronics Co., Ltd.
+# =============================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=====================================================
+
+tosca_definitions_version: cloudify_dsl_1_3
+imports:
+ - https://www.getcloudify.org/spec/cloudify/4.5.5/types.yaml
+ - plugin:k8splugin?version=3.4.2
+inputs:
+ service_id:
+ type: string
+ description: service id
+ default: "rapp-datacollector-service"
+ image:
+ type: string
+ description: docker image name and version
+ default: "nexus3.onap.org:10001/onap/datacollector/datacollector:0.0.1-SNAPSHOT"
+ database_host:
+ type: string
+ description: Database host.
+ default: "mariadb-galera"
+ database_username:
+ type: string
+ description: Database username.
+ default: "root"
+ database_password:
+ type: string
+ description: Database password.
+ # Detect: kubectl get secret `kubectl get secrets | grep mariadb-galera-db-root-password | awk '{print $1}'` -o jsonpath="{.data.password}" | base64 --decode
+ default: "DepdDuza6%Venu"
+ dmaap_host:
+ type: string
+ description: DMaaP host.
+ default: "message-router"
+ ves_measurements_topic_name:
+ type: string
+ default: "unauthenticated.VES_MEASUREMENT_OUTPUT"
+
+
+node_templates:
+ rapp-datacollector:
+ type: dcae.nodes.ContainerizedServiceComponent
+ interfaces:
+ cloudify.interfaces.lifecycle:
+ start:
+ inputs:
+ envs:
+ DMAAP_HOST: { get_input: dmaap_host }
+ DMAAP_PORT: "3904"
+ DMAAP_PROTOCOL: "http"
+ # "events/unauthenticated.VES_MEASUREMENT_OUTPUT/mygroup/mycus"
+ DMAAP_MEASUREMENTS_TOPIC: {concat: ["events/", { get_input: ves_measurements_topic_name }, "/mygroup/mycus"]}
+ DATABASE_HOST: { get_input: database_host }
+ DATABASE_PORT: "3306"
+ DATABASE_NAME: "ves"
+ DATABASE_USERNAME: { get_input: database_username }
+ DATABASE_PASSWORD: { get_input: database_password }
+ ports:
+ - "8087:30980"
+ properties:
+ service_component_type: "rapp-datacollector"
+ service_component_name_override: "rapp-datacollector"
+ service_id:
+ get_input: service_id
+ image:
+ get_input: image
diff --git a/operations/dcae/blueprints/k8s-sleepingcelldetector.yaml b/operations/dcae/blueprints/k8s-sleepingcelldetector.yaml
new file mode 100644
index 0000000..e06b508
--- /dev/null
+++ b/operations/dcae/blueprints/k8s-sleepingcelldetector.yaml
@@ -0,0 +1,58 @@
+#
+# ============LICENSE_START====================================================
+# =============================================================================
+# Copyright (c) 2020 © Samsung Electronics Co., Ltd.
+# =============================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=====================================================
+
+tosca_definitions_version: cloudify_dsl_1_3
+imports:
+ - https://www.getcloudify.org/spec/cloudify/4.5.5/types.yaml
+ - plugin:k8splugin?version=3.4.2
+inputs:
+ service_id:
+ type: string
+ description: service id
+ default: "rapp-sleepingcelldetector-service"
+ image:
+ type: string
+ description: docker image name and version
+ default: "nexus3.onap.org:10001/onap/sleepingcelldetector/sleepingcelldetector:0.0.1-SNAPSHOT"
+
+node_templates:
+ rapp-sleepingcelldetector:
+ type: dcae.nodes.ContainerizedServiceComponent
+ interfaces:
+ cloudify.interfaces.lifecycle:
+ start:
+ inputs:
+ envs:
+ A1_PROTOCOL: "http"
+ A1_HOST: "a1policymanagement"
+ A1_PORT: "8081"
+ A1_URL: "policy"
+ DATACOLLECTOR_PROTOCOL: "http"
+ DATACOLLECTOR_HOST: "rapp-datacollector"
+ DATACOLLECTOR_PORT: "8087"
+ DATACOLLECTOR_VERSION: "v1"
+ SLEEPINGCELLDETECTOR_PREFIX: "emergency"
+ SLEEPINGCELLDETECTOR_SLOT: "10"
+ SLEEPINGCELLDETECTOR_COUNT: "12"
+ properties:
+ service_component_type: "rapp-sleepingcelldetector"
+ service_component_name_override: "rapp-sleepingcelldetector"
+ service_id:
+ get_input: service_id
+ image:
+ get_input: image
diff --git a/operations/dcae/dcae-cli.py b/operations/dcae/dcae-cli.py
new file mode 100644
index 0000000..520037e
--- /dev/null
+++ b/operations/dcae/dcae-cli.py
@@ -0,0 +1,544 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2020 by Samsung Electronics Co., Ltd.
+#
+# This software is the confidential and proprietary information of Samsung Electronics co., Ltd.
+# ("Confidential Information"). You shall not disclose such Confidential Information and shall use
+# it only in accordance with the terms of the license agreement you entered into with Samsung.
+
+"""Cli application for ONAP DCAE Dashboard for managing DCAE Microservices.
+
+Implements core parts of the API defined here:
+https://git.onap.org/ccsdk/dashboard/tree/ccsdk-app-os/src/main/resources/swagger.json
+"""
+import argparse
+import base64
+
+import requests
+import json
+import yaml
+import time
+import os
+import sys
+import re
+
+try:
+ from urllib.parse import quote
+except ImportError:
+ from urllib import pathname2url as quote
+
+# Suppress https ignoring warning to be printed on screen
+# InsecureRequestWarning: Unverified HTTPS request is being made...
+requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
+
+ROOT_PATH = "/ccsdk-app/nb-api/v2"
+BLUEPRINTS_URL = ROOT_PATH + "/blueprints"
+DEPLOYMENTS_URL = ROOT_PATH + "/deployments"
+EMPTY_CHAR = "-"
+
+# Deployment operations (used in executions)
+DEPLOYMENT_INSTALL = "install"
+DEPLOYMENT_UNINSTALL = "uninstall"
+DEPLOYMENT_UPDATE = "update"
+
+USER_LOGIN = "su1234"
+USER_PASSWORD = "fusion"
+
+
+def get_url(postfix):
+ return args.base_url.strip('/') + postfix
+
+def read_json_file(file_path):
+ with open(file_path) as f:
+ return json.load(f)
+
+def read_yaml_file(file_path):
+ with open(file_path) as f:
+ return yaml.safe_load(f)
+
+def print_rows_formatted(matrix):
+ """Prints 2 dimensional array data (matrix) formatted to screen.
+ """
+ col_width = max(len(word) for row in matrix for word in row) + 2 # padding
+ for row in matrix:
+ print("".join(word.ljust(col_width) for word in row))
+ print
+
+def epoch_2_date(epoch):
+ if len(epoch) > 10:
+ epoch = epoch[:10]
+ return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(epoch)))
+
+
+def create_filter(_filter):
+ return quote(json.dumps(_filter))
+
+
+def http(verb, url, params=None, body=None):
+ print(verb + " to " + url)
+ if args.verbose:
+ if params:
+ print("PARAMS: ")
+ print(params)
+ print
+ if body:
+ print("BODY: ")
+ print(body)
+ print
+
+ headers = {'Authorization': "Basic " + base64_authorization_value,
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'}
+
+ r = s.request(verb,
+ url,
+ headers=headers,
+ params=params,
+ # accept all server TLS certs
+ verify=False,
+ json=body)
+
+ if r.status_code != 200 and r.status_code != 202:
+ print("Request params:")
+ print("Headers: " + str(r.request.headers))
+ print
+ print("Body: " + str(r.request.body))
+ print
+ raise RuntimeError('Response status code: {} with message: {}'
+ .format(r.status_code, str(r.content)))
+ if args.verbose:
+ print("RESPONSE: ")
+ print(r.json())
+ print
+ print("SUCCESSFUL " + verb)
+ print
+ return r
+
+
+def list_blueprints():
+ r = http('GET', get_url(BLUEPRINTS_URL))
+ total_items = r.json()['totalItems']
+ items = r.json()['items']
+ list_headers = ['Blueprint Id', 'Blueprint Name', 'Blueprint version', 'Application/Component/Owner']
+ data = [list_headers]
+ for bp in items:
+ application = bp.get("application", EMPTY_CHAR)
+ component = bp.get("component", EMPTY_CHAR)
+ row = [bp['typeId'], bp['typeName'], str(bp['typeVersion']), application + '/' + component + '/' + bp['owner']]
+ data.append(row)
+ # Print it
+ print_rows_formatted(data)
+ print("Total " + str(total_items) + " blueprints.")
+ return r
+
+def create_blueprint(body):
+ r = http('POST', get_url(BLUEPRINTS_URL), body=body)
+ if "error" in r.json():
+ err_msg = json.loads(r.json()['error'])["message"]
+ if re.match('^DCAE services of type.*are still running:.*', err_msg):
+ print("Blueprint already exists and cannot update it as there are deployments related to that running. First delete deployments for this blueprint!")
+ else:
+ print(r.json()['error'])
+ sys.exit(1)
+ print("typeId: " + str(r.json()['typeId']))
+ return r
+
+
+def get_blueprint(blueprint_name):
+ blueprint_filter = create_filter({
+ "name": blueprint_name
+ })
+ return check_error(http('GET', get_url(BLUEPRINTS_URL) + "/?filters=" + blueprint_filter))
+
+
+def delete_blueprint(blueprint_id):
+ return check_error(http('DELETE', get_url(BLUEPRINTS_URL) + "/" + blueprint_id))
+
+def list_deployments():
+ r = http('GET', get_url(DEPLOYMENTS_URL))
+ total_items = r.json()['totalItems']
+ r_json = r.json()
+ list_headers = ['Service Id', 'Created', 'Modified']
+ data = [list_headers]
+ if "items" in r_json:
+ for dep in r_json["items"]:
+ row = [dep['service_id'], epoch_2_date(dep['created']), epoch_2_date(dep['modified'])]
+ data.append(row)
+ print_rows_formatted(data)
+ print("Total " + str(total_items) + " deployments.")
+ return r
+
+def get_deployment(deployment_id):
+ return check_error(http('GET', get_url(DEPLOYMENTS_URL) + "/" + deployment_id))
+
+def get_deployment_inputs(deployment_id, tenant):
+ return check_error(http('GET', get_url(DEPLOYMENTS_URL) + "/" + deployment_id + "/inputs?tenant=" + tenant))
+
+def create_deployment(body):
+ return check_error(http('POST', get_url(DEPLOYMENTS_URL), body=body))
+
+def update_deployment(deployment_id, body):
+ return check_error(http('PUT', get_url(DEPLOYMENTS_URL) + "/" + deployment_id + "/update", body=body))
+
+def delete_deployment(deployment_id, tenant):
+ return check_error(http('DELETE', get_url(DEPLOYMENTS_URL) + "/" + deployment_id + "?tenant=" + tenant),
+ fail_msg="Cannot delete deployment if install still ongoing.")
+
+def executions_status(deployment_id, tenant):
+ return check_error(http('GET', get_url(DEPLOYMENTS_URL) + "/" + deployment_id + "/executions?tenant=" + tenant))
+
+def deployment_exists(deployment_id, print_non_existence=True, fail_it=True):
+ """Checks if deployment with given deployment-id exists.
+ """
+ r = get_deployment(deployment_id)
+ exists = check_deployment_exists(deployment_id, r.json())
+ msg = "Given deployment '" + deployment_id + "' " + ("does not exist!" if print_non_existence else "already/still exists!")
+ if bool(print_non_existence) != bool(exists):
+ # Separate checking of deployment existence is needed as API DELETE operation is success even if deployment does not exist.
+ print(msg)
+ if fail_it:
+ sys.exit(1)
+ return exists
+
+
+def check_deployment_exists(deployment_id, deployments):
+ """deployments is the json [{deployment}, ...] payload of get_deployment method
+ """
+ if not deployments:
+ return False
+
+ exists = False
+ for dep in deployments:
+ if "id" in dep and dep["id"] == deployment_id:
+ exists = True
+
+ return exists
+
+
+def check_error(response, fail_it=True, fail_msg=""):
+ if "error" in response.json():
+ print(response.json()['error'])
+ print(fail_msg)
+ if fail_it:
+ sys.exit(1)
+ return response
+
+def print_get_payload(payload):
+ print(json.dumps(payload.json()['items'], indent=2))
+
+def get_executions_items(payload, key, value=None):
+ """Executions array may have e.g. following content
+ [
+ {
+ "status": "terminated",
+ "tenant_name": "default_tenant",
+ "created_at": "2020-07-16T14:09:34.881Z",
+ "workflow_id": "create_deployment_environment",
+ "deployment_id": "dcae_k8s-datacollector",
+ "id": "fb66d5f7-e957-4c75-bc11-f9ef9e2ae2ac"
+ },
+ {
+ "status": "failed",
+ "tenant_name": "default_tenant",
+ "created_at": "2020-07-16T14:10:05.933Z",
+ "workflow_id": "install",
+ "deployment_id": "dcae_k8s-datacollector",
+ "id": "75dfe2e9-929a-46d0-9a8d-06e0e435051c"
+ }
+ ]
+
+ This function returns array of maps filtered (with key and optional value)
+ from the given source executions array.
+ """
+ executions = payload.json()['items']
+ results = []
+ for execution in executions:
+ if key in execution:
+ if value:
+ if execution[key] == value:
+ results.append(execution)
+ else:
+ results.append(execution)
+ return results
+
+class Timeout:
+ def __init__(self, timeout, timeout_msg, sleep_time=2):
+ self.counter = 0
+ self.timeout = timeout
+ self.timeout_msg = timeout_msg
+ self.sleep_time = sleep_time
+
+ def expired(self):
+ if self.counter > self.timeout:
+ print("Timeout " + str(self.timeout) + " seconds expired while waiting " + self.timeout_msg)
+ return True
+ time.sleep(int(self.sleep_time))
+ self.counter += int(self.sleep_time)
+ return False
+
+def wait_deployment(deployment_id, operation, timeout=240):
+
+ def get_workflow_id(deployment_id, operation):
+ r = executions_status(deployment_id, args.tenant)
+ print_get_payload(r)
+ return get_executions_items(r, "workflow_id", operation)
+
+ failed = False
+ result = ""
+ op_timeout = Timeout(timeout, "deployment " + deployment_id + " operation " + operation)
+ while True:
+ wf = get_workflow_id(deployment_id, operation)
+ if not wf:
+ status = wf[0]["status"]
+ if status in ["terminated", "failed"]:
+ result = "SUCCESS" if status == "terminated" else "FAILED"
+ if status == "failed":
+ failed = True
+ break
+ if op_timeout.expired():
+ failed = True
+ result = "FAILED"
+ break
+
+ # For uninstall wait executions to be removed by Cloudify as it will bother re-cretion of same deployment
+ # There would be also "workflow_id": "delete_deployment_environment" state we should follow, but that can be disappearing so fast
+ # so better to just wait all executions are removed.
+ if operation == DEPLOYMENT_UNINSTALL and not failed:
+ ex_timeout = Timeout(timeout, "deployment " + deployment_id + " operation " + operation + " executions to be removed.")
+ while True:
+ if not get_workflow_id(deployment_id, operation):
+ if not deployment_exists(args.deployment_id, print_non_existence=False, fail_it=False):
+ # Still wait a moment as deployment-handler may still have the deployment and
+ # would return "409 Conflict" in case of creating again deployment with same name.
+ time.sleep(7)
+ break
+ if ex_timeout.expired():
+ failed = True
+ result = "FAILED"
+ break
+
+ print("Deployment " + deployment_id + " operation " + operation + " was " + result)
+ if failed:
+ sys.exit(1)
+
+def append_deployment_inputs_key_values(key_values, inputs):
+ pairs = key_values.split(",")
+ for key_value in pairs:
+ key, value = key_value.split("=", 1)
+ inputs[key] = value
+ return inputs
+
+def parse_deployment_inputs(deployment_inputs, deployment_inputs_key_value):
+ inputs = {}
+ if deployment_inputs:
+ inputs = read_json_file(deployment_inputs)
+ if deployment_inputs_key_value:
+ inputs = append_deployment_inputs_key_values(deployment_inputs_key_value, inputs)
+ return inputs
+
+
+def parse_args():
+ parser = argparse.ArgumentParser(description=__doc__,
+ formatter_class=argparse.RawTextHelpFormatter,
+ epilog='''
+Example commands:
+ python dcae-cli.py --base_url https://infra:30983 --operation create_blueprint --blueprint_file my-blueprint.yaml
+ python dcae-cli.py --base_url https://infra:30983 --operation create_blueprint --body_file create_blueprint.json --blueprint_file my-blueprint.yaml
+ python dcae-cli.py --base_url https://infra:30983 --operation list_blueprints
+ python dcae-cli.py --base_url https://infra:30983 --operation get_blueprint --blueprint_name my-blueprint
+ python dcae-cli.py --base_url https://infra:30983 --operation delete_blueprint --blueprint_id bf31992b-8643-44ed-b9d1-f6f5a806e505
+ python dcae-cli.py --base_url https://infra:30983 --operation create_deployment --blueprint_id bf31992b-8643-44ed-b9d1-f6f5a806e505 --deployment_id samuli-testi --deployment_inputs inputs.yaml
+ python dcae-cli.py --base_url https://infra:30983 --operation list_deployments
+ python dcae-cli.py --base_url https://infra:30983 --operation delete_deployment --deployment_id dcae_samuli-testi
+ python dcae-cli.py --base_url https://infra:30983 --operation executions_status --deployment_id dcae_samuli-testi
+ ''')
+ parser.add_argument('-u', '--base_url', required=True, help='Base url of the DCAE Dashboard API (e.g. http://127.0.0.1:30228)')
+ parser.add_argument('-o', '--operation', required=True,
+ choices=['list_blueprints',
+ 'create_blueprint',
+ 'get_blueprint',
+ 'delete_blueprint',
+ 'list_deployments',
+ 'get_deployment',
+ 'get_deployment_inputs',
+ 'get_deployment_input',
+ 'create_deployment',
+ 'update_deployment',
+ 'delete_deployment',
+ 'executions_status'], help='Operation to execute towards DCAE Dashboard')
+ parser.add_argument('-b', '--body_file', help="""File path for the body of the DCAE Dashboard operation. Json format file
+ Given as file path to a file having the main body parameters for blueprint creation as json format.
+
+ Example:
+ {
+ "typeName": "my-blueprint", # this is the blueprint name
+ "typeVersion": 12345, # this is blueprint version
+ "application": "DCAE-app", # OPTIONAL
+ "component": "dcae-comp", # OPTIONAL
+ "owner": "Samsung Guy" # Blueprint owner
+ }
+ Used for create_blueprint operation.
+ Parameter is optional and by default following values are used:
+
+ {
+ "typeName": Filename of --blueprint_file parameter
+ "typeVersion": 1,
+ "application": "DCAE",
+ "component": "dcae",
+ "owner": "Samsung"
+ }
+ """)
+ parser.add_argument('-bp', '--blueprint_file', help='File path for the Cloudify Blueprint file used as payload in DCAE Dashboard operation. Yaml format file')
+ parser.add_argument('-id', '--blueprint_id', help='Blueprint Id (typeId) string parameter e.g. for delete_blueprint and create_deployment operations')
+ parser.add_argument('-name', '--blueprint_name', help='Blueprint name string parameter given as "typeName" in --body_file when creating Blueprint. If --body_file is not given blueprint name is by default the name of the given blueprint file.')
+ parser.add_argument('-tag', '--deployment_id', help='''Deployment tag / Service Id / Deployment Ref / Deployment Id.
+ Many names for the identification of the deployment started from the blueprint.
+ Used for create_deployment operation and for delete_deployment.
+ Needs to uniquely identify deployment.
+ This identification is labeled to Kubernetes resources e.g. PODs with key cfydeployment''')
+ parser.add_argument('-prefix', '--deployment_id_prefix', default="samsung", help='Deployment Id Prefix is the optional component name prefixed to cfydeployment with underscore. If not given string "samsung" used by default.')
+ parser.add_argument('-i', '--deployment_inputs', help='''Deployment input parameters for the Coudify blueprint.
+ Given as file path to a file having input parameters as json format.
+ Parameters given depends on the blueprint definition.
+
+ Example:
+ {
+ "host_port": "30243",
+ "service_id_name": "samsung-ves-rapp",
+ "component_type_name": "samsung-rapp-service"
+ }
+
+ Used for create_deployment and update_deployment operations.
+ Parameter is optional and by default no input parameters given for the blueprint.''')
+ parser.add_argument('-kv', '--deployment_inputs_key_value', help='''Deployment input parameters for the Coudify blueprint.
+ Same as --deployment_inputs but given on command line parameter with format of key value.
+
+ key=value,key2=value2
+
+ Parameters given depends on the blueprint definition.
+
+ Example:
+ "host_port=30243,service_id_name=samsung-ves-rapp,component_type_name=samsung-rapp-service"
+
+ Used for create_deployment and update_deployment operations.
+ Parameter is optional and by default no input parameters given for the blueprint.''')
+ parser.add_argument('-k', '--deployment_input_key', help='''Deployment input parameter key for the Coudify blueprint.
+ Key string used for the deployment input. Used in get_deployment_input operation to identify what input parameter is wanted.
+ ''')
+ parser.add_argument('-t', '--tenant', default='default_tenant', help='''Tenant used for Cloudify.
+ Optional, if not given default value "default_tenant" is used.
+ Used for create_deployment and delete_deployment operations.''')
+ parser.add_argument('-v', '--verbose', action='store_true', help='Output more')
+ args = parser.parse_args()
+
+ if args.blueprint_id is None and args.operation in ['create_deployment',
+ 'delete_blueprint']:
+ parser.error("--operation " + args.operation + " requires --blueprint_id argument.")
+ if args.blueprint_name is None and args.operation in ['get_blueprint', 'update_deployment']:
+ parser.error("--operation " + args.operation + " requires --blueprint_name argument.")
+ if args.operation == 'create_blueprint' and args.blueprint_file is None:
+ parser.error("--operation create_blueprint requires --blueprint_file arguments. Note also optional --body_file can be given.")
+ if args.deployment_id is None and args.operation in ['create_deployment',
+ 'get_deployment',
+ 'get_deployment_inputs',
+ 'get_deployment_input',
+ 'update_deployment',
+ 'delete_deployment',
+ 'executions_status']:
+ parser.error("--operation " + args.operation + " requires --deployment_id argument.")
+ if (args.deployment_inputs is None and args.deployment_inputs_key_value is None) and args.operation in ['update_deployment']:
+ parser.error("--operation " + args.operation + " requires --deployment_inputs or --deployment_inputs_key_value argument.")
+ if args.deployment_input_key is None and args.operation in ['get_deployment_input']:
+ parser.error("--operation " + args.operation + " requires --deployment_input_key argument.")
+ return parser.parse_args()
+
+
+def get_authorization_value(user_login=USER_LOGIN, user_password=USER_PASSWORD):
+ base64_bytes = base64.b64encode((user_login + ":" + user_password).encode('ascii'))
+ return base64_bytes.decode('ascii')
+
+
+def main():
+
+ global args, s, base64_authorization_value
+ args = parse_args()
+ s = requests.Session()
+ base64_authorization_value = get_authorization_value()
+
+ if args.operation == "list_blueprints":
+ list_blueprints()
+ elif args.operation == "create_blueprint":
+ bp_name = os.path.splitext(os.path.basename(args.blueprint_file))[0]
+ body = {
+ "typeName": bp_name,
+ "typeVersion": 1,
+ "application": "DCAE",
+ "component": "dcae",
+ "owner": USER_LOGIN
+ }
+ if args.body_file:
+ body = read_json_file(args.body_file)
+ blueprint = read_yaml_file(args.blueprint_file)
+ # create/replace blueprint part in body
+ body['blueprintTemplate'] = yaml.dump(blueprint)
+ create_blueprint(body)
+ elif args.operation == "get_blueprint":
+ print_get_payload(get_blueprint(args.blueprint_name))
+ elif args.operation == "delete_blueprint":
+ delete_blueprint(args.blueprint_id)
+ elif args.operation == "create_deployment":
+ full_deployment_id = args.deployment_id_prefix + "_" + args.deployment_id
+ deployment_exists(full_deployment_id, print_non_existence=False)
+ inputs = parse_deployment_inputs(args.deployment_inputs, args.deployment_inputs_key_value)
+ body = {
+ # component (deployment_id_prefix) will be prefixed to Kubernetes resources
+ # label key cfydeployment with underscore.
+ # E.g. cfydeployment=samsung_<deployment_id>
+ # where <deployment_id> is the given args.deployment_id.
+ "component": args.deployment_id_prefix,
+ "tag": args.deployment_id,
+ "blueprintId": args.blueprint_id,
+ "tenant": args.tenant,
+ "inputs": inputs
+ }
+ create_deployment(body)
+ print("DeploymentId: " + full_deployment_id)
+ wait_deployment(full_deployment_id, DEPLOYMENT_INSTALL)
+ elif args.operation == "list_deployments":
+ list_deployments()
+ elif args.operation == "get_deployment":
+ deployment_exists(args.deployment_id)
+ print_get_payload(get_deployment(args.deployment_id))
+ elif args.operation == "get_deployment_inputs":
+ deployment_exists(args.deployment_id)
+ print_get_payload(get_deployment_inputs(args.deployment_id, args.tenant))
+ elif args.operation == "get_deployment_input":
+ deployment_exists(args.deployment_id)
+ r = get_deployment_inputs(args.deployment_id, args.tenant)
+ print(r.json()[0]["inputs"][args.deployment_input_key])
+ elif args.operation == "update_deployment":
+ deployment_exists(args.deployment_id)
+ inputs = parse_deployment_inputs(args.deployment_inputs, args.deployment_inputs_key_value)
+ body = {
+ "component": args.deployment_id_prefix,
+ "tag": args.deployment_id,
+ "blueprintName": args.blueprint_name,
+ "blueprintVersion": 1,
+ "tenant": args.tenant,
+ "inputs": inputs
+ }
+ update_deployment(args.deployment_id, body)
+ wait_deployment(args.deployment_id, DEPLOYMENT_UPDATE)
+ elif args.operation == "delete_deployment":
+ if deployment_exists(args.deployment_id, fail_it=False):
+ delete_deployment(args.deployment_id, args.tenant)
+ wait_deployment(args.deployment_id, DEPLOYMENT_UNINSTALL)
+ elif args.operation == "executions_status":
+ print_get_payload(executions_status(args.deployment_id, args.tenant))
+ else:
+ print("No operation selected.")
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/operations/dcae/dcae.sh b/operations/dcae/dcae.sh
new file mode 100644
index 0000000..31bb334
--- /dev/null
+++ b/operations/dcae/dcae.sh
@@ -0,0 +1,127 @@
+#!/bin/bash
+# Copyright (C) 2019 by Samsung Electronics Co., Ltd.
+#
+# This software is the confidential and proprietary information of Samsung Electronics co., Ltd.
+# ("Confidential Information"). You shall not disclose such Confidential Information and shall use
+# it only in accordance with the terms of the license agreement you entered into with Samsung.
+
+#
+# Onboard R-APP blueprint and deploy R-APP as DCAE Microservice.
+#
+set -e
+
+NODE_IP=${NODE_IP:-$(../scripts/k8s_get_node_ip.sh)}
+DCAE_DASHBOARD_NODEPORT=${DCAE_DASHBOARD_NODEPORT:-30418}
+BASE_URL=https://${NODE_IP}:${DCAE_DASHBOARD_NODEPORT}
+
+dcae_cli(){
+ local parameters=("$@")
+ python -u dcae-cli.py --base_url ${BASE_URL} --operation "${parameters[@]}"
+}
+
+create_blueprint() {
+ local blueprint_file_path=$1
+ dcae_cli create_blueprint --blueprint_file ${blueprint_file_path}
+}
+
+delete_blueprint() {
+ local blueprint_name=$1
+ local blueprint_id=$(dcae_cli get_blueprint --blueprint_name ${blueprint_name} | grep typeId | cut -d '"' -f 4)
+ if [[ "$blueprint_id" != "" ]]; then
+ dcae_cli delete_blueprint --blueprint_id ${blueprint_id}
+ else
+ echo "Given blueprint '$blueprint_name' does not exist!"
+ fi
+}
+
+create_deployment() {
+ local blueprint_id=$1
+ local deployment_id=$2
+ local deployment_id_prefix=$3
+ local deployment_inputs_key_value=$4
+ local deployment_inputs=""
+ if [[ "$deployment_inputs_key_value" != "" ]]; then
+ deployment_inputs="--deployment_inputs_key_value ${deployment_inputs_key_value}"
+ fi
+ dcae_cli create_deployment --blueprint_id ${blueprint_id} --deployment_id ${deployment_id} --deployment_id_prefix ${deployment_id_prefix} ${deployment_inputs}
+}
+
+deploy() {
+ local blueprint_file_path=$1
+ local deployment_id=$2
+ local deployment_id_prefix=$3
+ local deployment_inputs_key_value=$4
+ out=$(create_blueprint ${blueprint_file_path})
+ echo "$out"
+ blueprint_id=$(echo "$out" | grep typeId | cut -d ' ' -f 2)
+ create_deployment ${blueprint_id} ${deployment_id} ${deployment_id_prefix} ${deployment_inputs_key_value}
+}
+
+undeploy() {
+ local deployment_id=$1
+ dcae_cli delete_deployment --deployment_id ${deployment_id}
+}
+
+operation=$1
+case "$operation" in
+ -h|--help|help|?|"")
+ echo "Script usage:"
+ echo "$0 deploy - Deploy Blueprint"
+ echo "$0 undeploy - Undeploy deployment instantiated from blueprint"
+ echo "$0 redeploy - Redeploy deployment"
+ echo "$0 delete - Delete blueprint"
+ echo "$0 list - List blueprints and deployments"
+ echo "$0 list_blueprints - List blueprints"
+ echo "$0 list_deployments - List deployments"
+ echo "$0 get_deployment_inputs - List all deployment input parameters given to a deployment"
+ echo "$0 get_deployment_input - List only single deployment input value with given key."
+ ;;
+ deploy)
+ blueprint_file_path=$2
+ deployment_id=$3
+ deployment_id_prefix=$4
+ deployment_inputs_key_value=$5
+ deploy ${blueprint_file_path} ${deployment_id} ${deployment_id_prefix} ${deployment_inputs_key_value}
+ ;;
+ undeploy)
+ undeploy $2
+ ;;
+ redeploy)
+ blueprint_file_path=$2
+ deployment_id=$3
+ deployment_inputs_key_value=$4
+ undeploy ${deployment_id}
+ # Note that deployment_id in creation does not have yet the prefix
+ # Split full deployment id in format "myprefix_mydeploymentid" to prefix and id part
+ deployment_id_prefix="${deployment_id%%_*}"
+ deployment_id_for_create="${deployment_id#*_}"
+ deploy ${blueprint_file_path} ${deployment_id_for_create} ${deployment_id_prefix} ${deployment_inputs_key_value}
+ ;;
+ delete)
+ blueprint_name=$2
+ delete_blueprint ${blueprint_name}
+ ;;
+ list)
+ dcae_cli list_blueprints
+ dcae_cli list_deployments
+ ;;
+ list_blueprints)
+ dcae_cli list_blueprints
+ ;;
+ list_deployments)
+ dcae_cli list_deployments
+ ;;
+ get_deployment_inputs)
+ deployment_id=$2
+ dcae_cli get_deployment_inputs --deployment_id ${deployment_id}
+ ;;
+ get_deployment_input)
+ deployment_id=$2
+ input_key=$3
+ dcae_cli get_deployment_input --deployment_id ${deployment_id} --deployment_input_key ${input_key} | tail -1
+ ;;
+ *)
+ echo "Wrong usage, check '$0 -h'" >&2
+ exit 1
+ ;;
+esac
diff --git a/operations/dcae/inputs_database_password.sh b/operations/dcae/inputs_database_password.sh
new file mode 100644
index 0000000..9e4f883
--- /dev/null
+++ b/operations/dcae/inputs_database_password.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+# Copyright (C) 2019 by Samsung Electronics Co., Ltd.
+#
+# This software is the confidential and proprietary information of Samsung Electronics co., Ltd.
+# ("Confidential Information"). You shall not disclose such Confidential Information and shall use
+# it only in accordance with the terms of the license agreement you entered into with Samsung.
+
+export DATABASE_PASSWORD=${DATABASE_PASSWORD:-$(kubectl get secret `kubectl get secrets | grep mariadb-galera-db-root-password | awk '{print $1}'` -o jsonpath="{.data.password}" | base64 --decode)}
diff --git a/operations/dcae/rapps.sh b/operations/dcae/rapps.sh
new file mode 100644
index 0000000..9af8284
--- /dev/null
+++ b/operations/dcae/rapps.sh
@@ -0,0 +1,103 @@
+#!/bin/bash
+# Copyright (C) 2019 by Samsung Electronics Co., Ltd.
+#
+# This software is the confidential and proprietary information of Samsung Electronics co., Ltd.
+# ("Confidential Information"). You shall not disclose such Confidential Information and shall use
+# it only in accordance with the terms of the license agreement you entered into with Samsung.
+
+set -e
+
+BLUEPRINTS_DIR=${BLUEPRINTS_DIR:-blueprints}
+DEPLOYMENT_ID_PREFIX=${DEPLOYMENT_ID_PREFIX:-"samsung"}
+
+declare -a rapp_blueprint_files=(
+ ${BLUEPRINTS_DIR}/k8s-datacollector.yaml
+ ${BLUEPRINTS_DIR}/k8s-sleepingcelldetector.yaml
+)
+
+# Define deployment id names for rapps
+declare -a rapp_deployment_ids=(
+ rapp-datacollector
+ rapp-sleepingcelldetector
+)
+
+exec_over_rapps() {
+ local action_func=$1
+ local rapp_filter=$2
+ for i in "${!rapp_blueprint_files[@]}"
+ do
+ if [[ "${DEPLOYMENT_ID_PREFIX}_${rapp_deployment_ids[$i]}" == *"$rapp_filter"* ]]; then
+ $action_func ${rapp_blueprint_files[$i]} ${rapp_deployment_ids[$i]}
+ fi
+ done
+}
+
+operation=$1
+case "$operation" in
+ -h|--help|help|?|"")
+ echo "Script usage:"
+ echo "$0 deploy - Deploy rapp(s)"
+ echo "$0 undeploy - Undeploy rapp(s)"
+ echo "$0 redeploy - Redeploy rapp(s)"
+ echo "$0 list - List rapps properties"
+ echo
+ echo "BLUEPRINTS_DIR and DEPLOYMENT_ID_PREFIX variables can be exported to override default value."
+ echo "BLUEPRINTS_DIR default value is 'blueprints'."
+ echo "DEPLOYMENT_ID_PREFIX is a string prefixed to given deployment_id in the deploy operation."
+ echo "In other operations prefixed form is used. DEPLOYMENT_ID_PREFIX default value is 'samsung'."
+ ;;
+ deploy)
+ rapp_filter=$2
+ # Create inputs. Currently the only input to be provided is database password and that is only
+ # applicable for datacollector r-app at the moment;
+ . ./inputs_database_password.sh
+ deployment_inputs="database_password=${DATABASE_PASSWORD}"
+ do_deploy() {
+ local blueprint_file=$1
+ local deployment_id=$2
+ if [[ "${deployment_id}" != "rapp-datacollector" ]]; then
+ deployment_inputs=""
+ fi
+ ./dcae.sh deploy "${blueprint_file}" "${deployment_id}" "${DEPLOYMENT_ID_PREFIX}" "${deployment_inputs}"
+ }
+ exec_over_rapps do_deploy ${rapp_filter}
+ ./dcae.sh list
+ ;;
+ undeploy)
+ rapp_filter=$2
+ do_undeploy() {
+ local blueprint_file=$1
+ local deployment_id=$2
+ ./dcae.sh undeploy ${DEPLOYMENT_ID_PREFIX}_${deployment_id}
+ ./dcae.sh delete $(basename ${blueprint_file} | cut -d'.' -f1)
+ }
+ exec_over_rapps do_undeploy ${rapp_filter}
+ ./dcae.sh list
+ ;;
+ redeploy)
+ rapp_filter=$2
+ deployment_inputs_key_value=$3
+ do_redeploy() {
+ local blueprint_file=$1
+ local deployment_id=$2
+ ./dcae.sh redeploy "${blueprint_file}" "${DEPLOYMENT_ID_PREFIX}_${deployment_id}" "${deployment_inputs_key_value}"
+ }
+ exec_over_rapps do_redeploy ${rapp_filter}
+ ./dcae.sh list
+ ;;
+ get_deployment_input)
+ property=$2
+ rapp_filter=$3
+ do_input() {
+ local blueprint_file=$1
+ local deployment_id=$2
+ local full_id=${DEPLOYMENT_ID_PREFIX}_${deployment_id}
+ echo "${full_id}" "$(./dcae.sh get_deployment_input ${full_id} ${property})"
+ }
+ exec_over_rapps do_input ${rapp_filter}
+ ;;
+ *)
+ echo "Wrong usage, check '$0 -h'" >&2
+ exit 1
+ ;;
+esac
diff --git a/operations/scripts/k8s_get_node_ip.sh b/operations/scripts/k8s_get_node_ip.sh
new file mode 100644
index 0000000..4f423e8
--- /dev/null
+++ b/operations/scripts/k8s_get_node_ip.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+# Copyright (C) 2018 by Samsung Electronics Co., Ltd.
+#
+# This software is the confidential and proprietary information of Samsung Electronics co., Ltd.
+# ("Confidential Information"). You shall not disclose such Confidential Information and shall use
+# it only in accordance with the terms of the license agreement you entered into with Samsung.
+
+#
+# Echo Kubernetes cluster first node's internal IP address
+#
+kubectl get nodes -o jsonpath='{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }' | cut -d ' ' -f 1