diff options
Diffstat (limited to 'mod')
96 files changed, 13555 insertions, 0 deletions
diff --git a/mod/component-json-schemas/README.md b/mod/component-json-schemas/README.md new file mode 100644 index 0000000..f1dfcd5 --- /dev/null +++ b/mod/component-json-schemas/README.md @@ -0,0 +1,36 @@ +# Component JSON Schemas + +This repository contains the custom JSON schemas to support the onboarding of components: + +* Component specification schema +* Auxilary component specification schema for Docker +* Auxilary component specification schema for CDAP +* Data formats schema + +## Testing changes + +Use the Python `jsonschema` command-line tool to do validation checks: + +Example: + +``` +$ jsonschema -i tests/component-spec-docker.json component-spec-schema.json +``` + +## Uploading to Nexus + +For the component specification schema: + +``` +curl -v --user <user>:<password> https://<your file server host>/schemas/component-specification/<tag>/component-spec-schema.json --upload-file component-spec-schema.json +``` + +For the data format schema: + +``` +curl -v --user <user>:<password> https://<your file server host>/schemas/data-format/<tag>/data-format-schema.json --upload-file data-format-schema.json +``` + +### `dcae-cli` + +The `dcae-cli` looks for these schemas under a tag that is of the format `dcae-cli-v<major version>` where the major version is an integer that is the major part of semver. For schema changes that are breaking, you must bump the `<major version>`. Otherwise, you can simply replace the existing schema by uploading using the same tag. diff --git a/mod/component-json-schemas/component-specification/dcae-cli-v1/component-spec-schema.json b/mod/component-json-schemas/component-specification/dcae-cli-v1/component-spec-schema.json new file mode 100644 index 0000000..46c2a2a --- /dev/null +++ b/mod/component-json-schemas/component-specification/dcae-cli-v1/component-spec-schema.json @@ -0,0 +1,732 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Component specification schema", + "type": "object", + "properties": { + "self": { + "type": "object", + "properties": { + "version": { + "$ref": "#/definitions/version" + }, + "description": { + "type": "string" + }, + "component_type": { + "type": "string", + "enum": [ + "docker", + "cdap" + ] + }, + "name": { + "$ref": "#/definitions/name" + } + }, + "required": [ + "version", + "name", + "description", + "component_type" + ] + }, + "streams": { + "type": "object", + "properties": { + "publishes": { + "type": "array", + "uniqueItems": true, + "items": { + "oneOf": [ + { "$ref": "#/definitions/publisher_http" }, + { "$ref": "#/definitions/publisher_message_router" }, + { "$ref": "#/definitions/publisher_data_router" } + ] + } + }, + "subscribes": { + "type": "array", + "uniqueItems": true, + "items": { + "oneOf": [ + { "$ref": "#/definitions/subscriber_http" }, + { "$ref": "#/definitions/subscriber_message_router" }, + { "$ref": "#/definitions/subscriber_data_router" } + ] + } + } + }, + "required": [ + "publishes", + "subscribes" + ] + }, + "services": { + "type": "object", + "properties": { + "calls": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/caller" + } + }, + "provides": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/provider" + } + } + }, + "required": [ + "calls", + "provides" + ] + }, + "parameters" : { + "anyOf" : [ + {"$ref": "#/definitions/docker-parameters"}, + {"$ref": "#/definitions/cdap-parameters"} + ] + }, + "auxilary": { + "oneOf" : [ + {"$ref": "#/definitions/auxilary_cdap"}, + {"$ref": "#/definitions/auxilary_docker"} + ] + }, + "artifacts": { + "type": "array", + "description": "List of component artifacts", + "items": { + "$ref": "#/definitions/artifact" + } + } + }, + "required": [ + "self", + "streams", + "services", + "parameters", + "auxilary", + "artifacts" + ], + "additionalProperties": false, + "definitions": { + "cdap-parameters": { + "description" : "There are three seperate ways to pass parameters to CDAP: app config, app preferences, program preferences. These are all treated as optional.", + "type": "object", + "properties" : { + "program_preferences": { + "description" : "A list of {program_id, program_type, program_preference} objects where program_preference is an object passed into program_id of type program_type", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/program_preference" + } + }, + "app_preferences" : { + "description" : "Parameters Passed down to the CDAP preference API", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + }, + "app_config" : { + "description" : "Parameters Passed down to the CDAP App Config", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + } + } + }, + "program_preference": { + "type": "object", + "properties": { + "program_type": { + "$ref": "#/definitions/program_type" + }, + "program_id": { + "type": "string" + }, + "program_pref":{ + "description" : "Parameters that the CDAP developer wants pushed to this program's preferences API. Optional", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + } + }, + "required": ["program_type", "program_id", "program_pref"] + }, + "program_type": { + "type": "string", + "enum": ["flows","mapreduce","schedules","spark","workflows","workers","services"] + }, + "docker-parameters": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + }, + "parameter": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "value": { + "description": "Default value for the parameter" + }, + "description": { + "description": "Description for the parameter.", + "type": "string" + }, + "type": { + "description": "The required data type for the parameter.", + "type": "string", + "enum": [ "string", "number", "boolean", "datetime" ] + }, + "required": { + "description": "An optional key that declares a parameter as required (true) or not (false). Default is true.", + "type": "boolean", + "default": true + }, + "constraints": { + "description": "The optional list of sequenced constraint clauses for the parameter.", + "type": "array", + "items": { + "$ref": "#/definitions/parameter-constraints" + } + }, + "entry_schema": { + "description": "used for complex data type in the future. 'type' must be map or array for entry_schema to kick_in. ", + "type": "string" + }, + "designer_editable": { + "description": "An optional key that declares a parameter to be editable by designer (true) or not (false). Default is true.", + "type": "boolean", + "default": true + }, + "policy_editable": { + "description": "An optional key that declares a parameter to be editable by policy (true) or not (false). Default is true.", + "type": "boolean", + "default": false + }, + "sourced_at_deployment": { + "description": "An optional key that declares a parameter's value to be assigned at deployment time (true). Default is false.", + "type": "boolean", + "default": false + }, + "policy_schema" :{ + "type": "array", + "uniqueItems": true, + "items": {"$ref": "#/definitions/policy_schema_parameter"} + } + }, + "required": [ + "name", + "value", + "description" + ], + "additionalProperties": false, + "dependencies": { "policy_schema": ["policy_editable"]} + }, + "policy_schema_parameter": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "value": { + "description": "Default value for the parameter" + }, + "description": { + "description": "Description for the parameter.", + "type": "string" + }, + "type": { + "description": "The required data type for the parameter.", + "type": "string", + "enum": [ "string", "number", "boolean", "datetime", "list", "map" ] + }, + "required": { + "description": "An optional key that declares a parameter as required (true) or not (false). Default is true.", + "type": "boolean", + "default": true + }, + "constraints": { + "description": "The optional list of sequenced constraint clauses for the parameter.", + "type": "array", + "items": { + "$ref": "#/definitions/parameter-constraints" + } + }, + "entry_schema": { + "description": "The optional key that is used to declare the name of the Datatype definition for entries of certain types. entry_schema must be defined when the type is either list or map. If the type is list and the entry type is a simple type (string, number, boolean, datetime), follow with a simple string to describe the entry type. If the type is list and the entry type is a map, follow with an array to describe the keys for the entry map. If the type is list and the entry type is also list, this is not currently supported here. If the type is map, then follow with an array to describe the keys for this map. ", + "type": "array", "uniqueItems": true, "items": {"$ref": "#/definitions/policy_schema_parameter"} + } + }, + "required": [ + "name", + "type" + ], + "additionalProperties": false + }, + "parameter-constraints": { + "type": "object", + "additionalProperties": false, + "properties": { + "equal": { + "description": "Constrains a property or parameter to a value equal to (‘=’) the value declared." + }, + "greater_than": { + "description": "Constrains a property or parameter to a value greater than (‘>’) the value declared.", + "type": "number" + }, + "greater_or_equal": { + "description": "Constrains a property or parameter to a value greater than or equal to (‘>=’) the value declared.", + "type": "number" + }, + "less_than": { + "description": "Constrains a property or parameter to a value less than (‘<’) the value declared.", + "type": "number" + }, + "less_or_equal": { + "description": "Constrains a property or parameter to a value less than or equal to (‘<=’) the value declared.", + "type": "number" + }, + "valid_values": { + "description": "Constrains a property or parameter to a value that is in the list of declared values.", + "type": "array" + }, + "length": { + "description": "Constrains the property or parameter to a value of a given length.", + "type": "number" + }, + "min_length": { + "description": "Constrains the property or parameter to a value to a minimum length.", + "type": "number" + }, + "max_length": { + "description": "Constrains the property or parameter to a value to a maximum length.", + "type": "number" + } + } + }, + "stream_message_router": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "config_key": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "message router", "message_router" + ] + } + }, + "required": [ + "format", + "version", + "config_key", + "type" + ] + }, + "publisher_http": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "config_key": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "http", + "https" + ] + } + }, + "required": [ + "format", + "version", + "config_key", + "type" + ] + }, + "publisher_message_router": { + "$ref": "#/definitions/stream_message_router" + }, + "publisher_data_router": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "config_key": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "data router", "data_router" + ] + } + }, + "required": [ + "format", + "version", + "config_key", + "type" + ] + }, + "subscriber_http": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "route": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "http", + "https" + ] + } + }, + "required": [ + "format", + "version", + "route", + "type" + ] + }, + "subscriber_message_router": { + "$ref": "#/definitions/stream_message_router" + }, + "subscriber_data_router": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "route": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "data router", "data_router" + ] + }, + "config_key": { + "description": "Data router subscribers require config info to setup their endpoints to handle requests. For example, needs username and password", + "type": "string" + } + }, + "required": [ + "format", + "version", + "route", + "type", + "config_key" + ] + }, + "provider" : { + "oneOf" : [ + {"$ref": "#/definitions/docker-provider"}, + {"$ref": "#/definitions/cdap-provider"} + ] + }, + "cdap-provider" : { + "type": "object", + "properties" : { + "request": { + "$ref": "#/definitions/formatPair" + }, + "response": { + "$ref": "#/definitions/formatPair" + }, + "service_name" : { + "type" : "string" + }, + "service_endpoint" : { + "type" : "string" + }, + "verb" : { + "type": "string", + "enum": ["GET", "PUT", "POST", "DELETE"] + } + }, + "required" : [ + "request", + "response", + "service_name", + "service_endpoint", + "verb" + ] + }, + "docker-provider": { + "type": "object", + "properties": { + "request": { + "$ref": "#/definitions/formatPair" + }, + "response": { + "$ref": "#/definitions/formatPair" + }, + "route": { + "type": "string" + }, + "verb": { + "type": "string", + "enum": ["GET", "PUT", "POST", "DELETE"] + } + }, + "required": [ + "request", + "response", + "route" + ] + }, + "caller": { + "type": "object", + "properties": { + "request": { + "$ref": "#/definitions/formatPair" + }, + "response": { + "$ref": "#/definitions/formatPair" + }, + "config_key": { + "type": "string" + } + }, + "required": [ + "request", + "response", + "config_key" + ] + }, + "formatPair": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + } + } + }, + "name": { + "type": "string" + }, + "version": { + "type": "string", + "pattern": "^(\\d+\\.)(\\d+\\.)(\\*|\\d+)$" + }, + "artifact": { + "type": "object", + "description": "Component artifact object", + "properties": { + "uri": { + "type": "string", + "description": "Uri to artifact" + }, + "type": { + "type": "string", + "enum": ["jar", "docker image"] + } + }, + "required": ["uri", "type"] + }, + + "auxilary_cdap": { + "title": "cdap component specification schema", + "type": "object", + "properties": { + "streamname": { + "type": "string" + }, + "artifact_name" : { + "type": "string" + }, + "artifact_version" : { + "type": "string", + "pattern": "^(\\d+\\.)(\\d+\\.)(\\*|\\d+)$" + }, + "namespace":{ + "type": "string", + "description" : "optional" + }, + "programs": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/cdap_program" + } + } + }, + "required": [ + "streamname", + "programs", + "artifact_name", + "artifact_version" + ] + }, + "cdap_program_type": { + "type": "string", + "enum": ["flows","mapreduce","schedules","spark","workflows","workers","services"] + }, + "cdap_program": { + "type": "object", + "properties": { + "program_type": { + "$ref": "#/definitions/cdap_program_type" + }, + "program_id": { + "type": "string" + } + }, + "required": ["program_type", "program_id"] + }, + + "auxilary_docker": { + "title": "Docker component specification schema", + "type": "object", + "properties": { + "healthcheck": { + "description": "Define the health check that Consul should perfom for this component", + "type": "object", + "oneOf": [ + { "$ref": "#/definitions/docker_healthcheck_http" }, + { "$ref": "#/definitions/docker_healthcheck_script" } + ] + }, + "ports": { + "description": "Port mapping to be used for Docker containers. Each entry is of the format <container port>:<host port>.", + "type": "array", + "items": { + "type": "string" + } + }, + "logging": { + "description": "Component specific details for logging", + "type": "object", + "properties": { + "log_directory": { + "description": "The path in the container where the component writes its logs. If the component is following the EELF requirements, this would be the directory where the four EELF files are being written. (Other logs can be placed in the directory--if their names in '.log', they'll also be sent into ELK.)", + "type": "string" + }, + "alternate_fb_path": { + "description": "By default, the log volume is mounted at /var/log/onap/<component_type> in the sidecar container's file system. 'alternate_fb_path' allows overriding the default. Will affect how the log data can be found in the ELK system.", + "type": "string" + } + }, + "additionalProperties": false + } + }, + "required": [ + "healthcheck" + ], + "additionalProperties": false + }, + "docker_healthcheck_http": { + "properties": { + "type": { + "description": "Consul health check type", + "type": "string", + "enum": [ + "http", + "https" + ] + }, + "interval": { + "description": "Interval duration in seconds i.e. 10s", + "default": "15s", + "type": "string" + }, + "timeout": { + "description": "Timeout in seconds i.e. 10s", + "default": "1s", + "type": "string" + }, + "endpoint": { + "description": "Relative endpoint used by Consul to check health by making periodic HTTP GET calls", + "type": "string" + } + }, + "required": [ + "type", + "endpoint" + ] + }, + "docker_healthcheck_script": { + "properties": { + "type": { + "description": "Consul health check type", + "type": "string", + "enum": [ + "script", + "docker" + ] + }, + "interval": { + "description": "Interval duration in seconds i.e. 10s", + "default": "15s", + "type": "string" + }, + "timeout": { + "description": "Timeout in seconds i.e. 10s", + "default": "1s", + "type": "string" + }, + "script": { + "description": "Script command that will be executed by Consul to check health", + "type": "string" + } + }, + "required": [ + "type", + "script" + ] + } + } +} diff --git a/mod/component-json-schemas/component-specification/dcae-cli-v2/component-spec-schema.json b/mod/component-json-schemas/component-specification/dcae-cli-v2/component-spec-schema.json new file mode 100644 index 0000000..1f1f75e --- /dev/null +++ b/mod/component-json-schemas/component-specification/dcae-cli-v2/component-spec-schema.json @@ -0,0 +1,860 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Component specification schema", + "type": "object", + "properties": { + "self": { + "type": "object", + "properties": { + "version": { + "$ref": "#/definitions/version" + }, + "description": { + "type": "string" + }, + "component_type": { + "type": "string", + "enum": [ + "docker", + "cdap" + ] + }, + "name": { + "$ref": "#/definitions/name" + } + }, + "required": [ + "version", + "name", + "description", + "component_type" + ] + }, + "streams": { + "type": "object", + "properties": { + "publishes": { + "type": "array", + "uniqueItems": true, + "items": { + "oneOf": [ + { "$ref": "#/definitions/publisher_http" }, + { "$ref": "#/definitions/publisher_message_router" }, + { "$ref": "#/definitions/publisher_data_router" } + ] + } + }, + "subscribes": { + "type": "array", + "uniqueItems": true, + "items": { + "oneOf": [ + { "$ref": "#/definitions/subscriber_http" }, + { "$ref": "#/definitions/subscriber_message_router" }, + { "$ref": "#/definitions/subscriber_data_router" } + ] + } + } + }, + "required": [ + "publishes", + "subscribes" + ] + }, + "services": { + "type": "object", + "properties": { + "calls": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/caller" + } + }, + "provides": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/provider" + } + } + }, + "required": [ + "calls", + "provides" + ] + }, + "parameters" : { + "anyOf" : [ + {"$ref": "#/definitions/docker-parameters"}, + {"$ref": "#/definitions/cdap-parameters"} + ] + }, + "auxilary": { + "oneOf" : [ + {"$ref": "#/definitions/auxilary_cdap"}, + {"$ref": "#/definitions/auxilary_docker"} + ] + }, + "artifacts": { + "type": "array", + "description": "List of component artifacts", + "items": { + "$ref": "#/definitions/artifact" + } + } + }, + "required": [ + "self", + "streams", + "services", + "parameters", + "auxilary", + "artifacts" + ], + "additionalProperties": false, + "definitions": { + "cdap-parameters": { + "description" : "There are three seperate ways to pass parameters to CDAP: app config, app preferences, program preferences. These are all treated as optional.", + "type": "object", + "properties" : { + "program_preferences": { + "description" : "A list of {program_id, program_type, program_preference} objects where program_preference is an object passed into program_id of type program_type", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/program_preference" + } + }, + "app_preferences" : { + "description" : "Parameters Passed down to the CDAP preference API", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + }, + "app_config" : { + "description" : "Parameters Passed down to the CDAP App Config", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + } + } + }, + "program_preference": { + "type": "object", + "properties": { + "program_type": { + "$ref": "#/definitions/program_type" + }, + "program_id": { + "type": "string" + }, + "program_pref":{ + "description" : "Parameters that the CDAP developer wants pushed to this program's preferences API. Optional", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + } + }, + "required": ["program_type", "program_id", "program_pref"] + }, + "program_type": { + "type": "string", + "enum": ["flows","mapreduce","schedules","spark","workflows","workers","services"] + }, + "docker-parameters": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + }, + "parameter": { + "oneOf": [ + {"$ref": "#/definitions/parameter-list"}, + {"$ref": "#/definitions/parameter-other"} + ] + }, + "parameter-list": { + "properties": { + "name": { + "type": "string" + }, + "value": { + "description": "Default value for the parameter" + }, + "description": { + "description": "Description for the parameter.", + "type": "string" + }, + "type": { + "description": "Only valid type is list, the entry_schema is required - which contains the type of the list element. All properties set for the parameter apply to all elements in the list at this time", + "type": "string", + "enum": ["list"] + }, + "required": { + "description": "An optional key that declares a parameter as required (true) or not (false). Default is true.", + "type": "boolean", + "default": true + }, + "constraints": { + "description": "The optional list of sequenced constraint clauses for the parameter.", + "type": "array", + "items": { + "$ref": "#/definitions/parameter-constraints" + } + }, + "entry_schema": { + "description": "The optional property used to declare the name of the Datatype definition for entries of certain types. entry_schema must be defined when the type is list. This is the only type it is currently supported for.", + "type": "object", + "uniqueItems": true, + "items": {"$ref": "#/definitions/list-parameter"} + }, + "designer_editable": { + "description": "A required property that declares a parameter as editable by designer in SDC Tool (true) or not (false).", + "type": "boolean" + }, + "sourced_at_deployment": { + "description": "A required property that declares that a parameter is assigned at deployment time (true) or not (false).", + "type": "boolean" + }, + "policy_editable": { + "description": "A required property that declares a parameter as editable by DevOps in Policy UI (true) or not (false).", + "type": "boolean" + }, + "policy_group": { + "description": "An optional property used to group policy_editable parameters into groups. Each group will become it's own policy model. Any parameters without this property will be grouped together to form their own policy model", + "type": "string" + }, + "policy_schema" :{ + "type": "array", + "uniqueItems": true, + "items": {"$ref": "#/definitions/policy_schema_parameter"} + } + }, + "required": [ + "name", + "value", + "description", + "designer_editable", + "policy_editable", + "sourced_at_deployment", + "entry_schema" + ], + "additionalProperties": false, + "dependencies": { + "policy_schema": ["policy_editable"] + } + }, + "parameter-other": { + "properties": { + "name": { + "type": "string" + }, + "value": { + "description": "Default value for the parameter" + }, + "description": { + "description": "Description for the parameter.", + "type": "string" + }, + "type": { + "description": "The required data type for the parameter.", + "type": "string", + "enum": [ "string", "number", "boolean", "datetime" ] + }, + "required": { + "description": "An optional key that declares a parameter as required (true) or not (false). Default is true.", + "type": "boolean", + "default": true + }, + "constraints": { + "description": "The optional list of sequenced constraint clauses for the parameter.", + "type": "array", + "items": { + "$ref": "#/definitions/parameter-constraints" + } + }, + "designer_editable": { + "description": "A required property that declares a parameter as editable by designer in SDC Tool (true) or not (false).", + "type": "boolean" + }, + "sourced_at_deployment": { + "description": "A required property that declares that a parameter is assigned at deployment time (true) or not (false).", + "type": "boolean" + }, + "policy_editable": { + "description": "A required property that declares a parameter as editable in Policy UI (true) or not (false).", + "type": "boolean" + }, + "policy_group": { + "description": "An optional property used to group policy_editable parameters into groups. Each group will become it's own policy model. Any parameters without this property will be grouped together to form their own policy model", + "type": "string" + }, + "policy_schema" :{ + "description": "An optional property used to define policy_editable parameters as lists or maps", + "type": "array", + "uniqueItems": true, + "items": {"$ref": "#/definitions/policy_schema_parameter"} + } + }, + "required": [ + "name", + "value", + "description", + "designer_editable", + "sourced_at_deployment", + "policy_editable" + ], + "additionalProperties": false, + "dependencies": { + "policy_schema": ["policy_editable"] + } + }, + "list-parameter": { + "type": "object", + "properties": { + "type": { + "description": "The required data type for each parameter in the list.", + "type": "string", + "enum": ["string", "number"] + } + }, + "required": [ + "type" + ], + "additionalProperties": false + }, + "policy_schema_parameter": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "value": { + "description": "Default value for the parameter" + }, + "description": { + "description": "Description for the parameter.", + "type": "string" + }, + "type": { + "description": "The required data type for the parameter.", + "type": "string", + "enum": [ "string", "number", "boolean", "datetime", "list", "map" ] + }, + "required": { + "description": "An optional key that declares a parameter as required (true) or not (false). Default is true.", + "type": "boolean", + "default": true + }, + "constraints": { + "description": "The optional list of sequenced constraint clauses for the parameter.", + "type": "array", + "items": { + "$ref": "#/definitions/parameter-constraints" + } + }, + "entry_schema": { + "description": "The optional key that is used to declare the name of the Datatype definition for entries of certain types. entry_schema must be defined when the type is either list or map. If the type is list and the entry type is a simple type (string, number, boolean, datetime), follow with a simple string to describe the entry type. If the type is list and the entry type is a map, follow with an array to describe the keys for the entry map. If the type is list and the entry type is also list, this is not currently supported here. If the type is map, then follow with an array to describe the keys for this map. ", + "type": "array", "uniqueItems": true, "items": {"$ref": "#/definitions/policy_schema_parameter"} + } + }, + "required": [ + "name", + "type" + ], + "additionalProperties": false + }, + "parameter-constraints": { + "type": "object", + "additionalProperties": false, + "properties": { + "equal": { + "description": "Constrains a property or parameter to a value equal to (‘=’) the value declared." + }, + "greater_than": { + "description": "Constrains a property or parameter to a value greater than (‘>’) the value declared.", + "type": "number" + }, + "greater_or_equal": { + "description": "Constrains a property or parameter to a value greater than or equal to (‘>=’) the value declared.", + "type": "number" + }, + "less_than": { + "description": "Constrains a property or parameter to a value less than (‘<’) the value declared.", + "type": "number" + }, + "less_or_equal": { + "description": "Constrains a property or parameter to a value less than or equal to (‘<=’) the value declared.", + "type": "number" + }, + "valid_values": { + "description": "Constrains a property or parameter to a value that is in the list of declared values.", + "type": "array" + }, + "length": { + "description": "Constrains the property or parameter to a value of a given length.", + "type": "number" + }, + "min_length": { + "description": "Constrains the property or parameter to a value to a minimum length.", + "type": "number" + }, + "max_length": { + "description": "Constrains the property or parameter to a value to a maximum length.", + "type": "number" + } + } + }, + "stream_message_router": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "config_key": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "message router", "message_router" + ] + } + }, + "required": [ + "format", + "version", + "config_key", + "type" + ] + }, + "publisher_http": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "config_key": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "http", + "https" + ] + } + }, + "required": [ + "format", + "version", + "config_key", + "type" + ] + }, + "publisher_message_router": { + "$ref": "#/definitions/stream_message_router" + }, + "publisher_data_router": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "config_key": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "data router", "data_router" + ] + } + }, + "required": [ + "format", + "version", + "config_key", + "type" + ] + }, + "subscriber_http": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "route": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "http", + "https" + ] + } + }, + "required": [ + "format", + "version", + "route", + "type" + ] + }, + "subscriber_message_router": { + "$ref": "#/definitions/stream_message_router" + }, + "subscriber_data_router": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "route": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "data router", "data_router" + ] + }, + "config_key": { + "description": "Data router subscribers require config info to setup their endpoints to handle requests. For example, needs username and password", + "type": "string" + } + }, + "required": [ + "format", + "version", + "route", + "type", + "config_key" + ] + }, + "provider" : { + "oneOf" : [ + {"$ref": "#/definitions/docker-provider"}, + {"$ref": "#/definitions/cdap-provider"} + ] + }, + "cdap-provider" : { + "type": "object", + "properties" : { + "request": { + "$ref": "#/definitions/formatPair" + }, + "response": { + "$ref": "#/definitions/formatPair" + }, + "service_name" : { + "type" : "string" + }, + "service_endpoint" : { + "type" : "string" + }, + "verb" : { + "type": "string", + "enum": ["GET", "PUT", "POST", "DELETE"] + } + }, + "required" : [ + "request", + "response", + "service_name", + "service_endpoint", + "verb" + ] + }, + "docker-provider": { + "type": "object", + "properties": { + "request": { + "$ref": "#/definitions/formatPair" + }, + "response": { + "$ref": "#/definitions/formatPair" + }, + "route": { + "type": "string" + }, + "verb": { + "type": "string", + "enum": ["GET", "PUT", "POST", "DELETE"] + } + }, + "required": [ + "request", + "response", + "route" + ] + }, + "caller": { + "type": "object", + "properties": { + "request": { + "$ref": "#/definitions/formatPair" + }, + "response": { + "$ref": "#/definitions/formatPair" + }, + "config_key": { + "type": "string" + } + }, + "required": [ + "request", + "response", + "config_key" + ] + }, + "formatPair": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + } + } + }, + "name": { + "type": "string" + }, + "version": { + "type": "string", + "pattern": "^(\\d+\\.)(\\d+\\.)(\\*|\\d+)$" + }, + "artifact": { + "type": "object", + "description": "Component artifact object", + "properties": { + "uri": { + "type": "string", + "description": "Uri to artifact" + }, + "type": { + "type": "string", + "enum": ["jar", "docker image"] + } + }, + "required": ["uri", "type"] + }, + + "auxilary_cdap": { + "title": "cdap component specification schema", + "type": "object", + "properties": { + "streamname": { + "type": "string" + }, + "artifact_name" : { + "type": "string" + }, + "artifact_version" : { + "type": "string", + "pattern": "^(\\d+\\.)(\\d+\\.)(\\*|\\d+)$" + }, + "namespace":{ + "type": "string", + "description" : "optional" + }, + "programs": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/cdap_program" + } + } + }, + "required": [ + "streamname", + "programs", + "artifact_name", + "artifact_version" + ] + }, + "cdap_program_type": { + "type": "string", + "enum": ["flows","mapreduce","schedules","spark","workflows","workers","services"] + }, + "cdap_program": { + "type": "object", + "properties": { + "program_type": { + "$ref": "#/definitions/cdap_program_type" + }, + "program_id": { + "type": "string" + } + }, + "required": ["program_type", "program_id"] + }, + + "auxilary_docker": { + "title": "Docker component specification schema", + "type": "object", + "properties": { + "healthcheck": { + "description": "Define the health check that Consul should perfom for this component", + "type": "object", + "oneOf": [ + { "$ref": "#/definitions/docker_healthcheck_http" }, + { "$ref": "#/definitions/docker_healthcheck_script" } + ] + }, + "ports": { + "description": "Port mapping to be used for Docker containers. Each entry is of the format <container port>:<host port>.", + "type": "array", + "items": { + "type": "string" + } + }, + "logging": { + "description": "Component specific details for logging", + "type": "object", + "properties": { + "log_directory": { + "description": "The path in the container where the component writes its logs. If the component is following the EELF requirements, this would be the directory where the four EELF files are being written. (Other logs can be placed in the directory--if their names in '.log', they'll also be sent into ELK.)", + "type": "string" + }, + "alternate_fb_path": { + "description": "By default, the log volume is mounted at /var/log/onap/<component_type> in the sidecar container's file system. 'alternate_fb_path' allows overriding the default. Will affect how the log data can be found in the ELK system.", + "type": "string" + } + }, + "additionalProperties": false + }, + "policy": { + "properties": { + "trigger_type": { + "description": "Only value of docker is supported at this time.", + "type": "string", + "enum": ["docker"] + }, + "script_path": { + "description": "Script command that will be executed for policy reconfiguration", + "type": "string" + } + }, + "required": [ + "trigger_type","script_path" + ], + "additionalProperties": false + }, + "volumes": { + "description": "Volume mapping to be used for Docker containers. Each entry is of the format below", + "type": "array", + "items": { + "type": "object", + "properties": { + "host":{ + "type":"object", + "path": {"type": "string"} + }, + "container":{ + "type":"object", + "bind": { "type": "string"}, + "mode": { "type": "string"} + } + } + } + } + }, + "required": [ + "healthcheck" + ], + "additionalProperties": false + }, + "docker_healthcheck_http": { + "properties": { + "type": { + "description": "Consul health check type", + "type": "string", + "enum": [ + "http", + "https" + ] + }, + "interval": { + "description": "Interval duration in seconds i.e. 10s", + "default": "15s", + "type": "string" + }, + "timeout": { + "description": "Timeout in seconds i.e. 10s", + "default": "1s", + "type": "string" + }, + "endpoint": { + "description": "Relative endpoint used by Consul to check health by making periodic HTTP GET calls", + "type": "string" + } + }, + "required": [ + "type", + "endpoint" + ] + }, + "docker_healthcheck_script": { + "properties": { + "type": { + "description": "Consul health check type", + "type": "string", + "enum": [ + "script", + "docker" + ] + }, + "interval": { + "description": "Interval duration in seconds i.e. 10s", + "default": "15s", + "type": "string" + }, + "timeout": { + "description": "Timeout in seconds i.e. 10s", + "default": "1s", + "type": "string" + }, + "script": { + "description": "Script command that will be executed by Consul to check health", + "type": "string" + } + }, + "required": [ + "type", + "script" + ] + } + } +} diff --git a/mod/component-json-schemas/data-format/dcae-cli-v1/data-format-schema.json b/mod/component-json-schemas/data-format/dcae-cli-v1/data-format-schema.json new file mode 100644 index 0000000..66aa2ab --- /dev/null +++ b/mod/component-json-schemas/data-format/dcae-cli-v1/data-format-schema.json @@ -0,0 +1,212 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Data format specification schema Version 1.0.1", + "type": "object", + "oneOf": [{ + "properties": { + "self": { + "$ref": "#/definitions/self" + }, + "dataformatversion": { + "$ref": "#/definitions/dataformatversion" + }, + "reference": { + + "type": "object", + "description": "A reference to an external schema - name/version or url, if specified, is used to access the artifact", + "properties": { + "name": { + "$ref": "#/definitions/name" + }, + "url": { + "$ref": "#/definitions/url" + }, + "version": { + "$ref": "#/definitions/version" + }, + "format": { + "$ref": "#/definitions/format" + } + }, + "required": [ + "name", + "version", + "format" + ], + "additionalProperties": false + } + }, + "required": ["self", "dataformatversion", "reference"], + "additionalProperties": false + }, { + "properties": { + "self": { + "$ref": "#/definitions/self" + }, + "dataformatversion": { + "$ref": "#/definitions/dataformatversion" + }, + "jsonschema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "The actual JSON schema for this data format" + } + + }, + "required": ["self", "dataformatversion", "jsonschema"], + "additionalProperties": false + }, { + "properties": { + "self": { + "$ref": "#/definitions/self" + }, + "dataformatversion": { + "$ref": "#/definitions/dataformatversion" + }, + "delimitedschema": { + "type": "object", + "description": "A JSON schema for delimited files", + "properties": { + "delimiter": { + "enum": [",", "|", "\t"] + }, + "fields": { + "type": "array", + "description": "Array of field descriptions", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "additionalProperties": false + } + }, + "required": ["self", "dataformatversion", "delimitedschema"], + "additionalProperties": false + }, { + "properties": { + "self": { + "$ref": "#/definitions/self" + }, + "dataformatversion": { + "$ref": "#/definitions/dataformatversion" + }, + "unstructured": { + "type": "object", + "description": "A JSON schema for unstructured text", + "properties": { + "encoding": { + "type": "string", + "enum": ["ASCII", "UTF-8", "UTF-16", "UTF-32"] + } + }, + "additionalProperties": false + + } + }, + "required": ["self", "dataformatversion", "unstructured"], + "additionalProperties": false + }], + "definitions": { + "url": { + "format": "uri" + }, + "name": { + "type": "string" + }, + "version": { + "type": "string", + "pattern": "^(\\d+\\.)(\\d+\\.)(\\*|\\d+)$" + }, + "self": { + "description": "Identifying Information for the Data Format - name/version can be used to access the artifact", + "type": "object", + "properties": { + "name": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "description": { + "type": "string" + } + }, + "required": [ + "name", + "version" + ], + "additionalProperties": false + }, + "format": { + "description": "Reference schema type", + "type": "string", + "enum": [ + "JSON", + "Delimited Format", + "XML", + "Protocol Buffer", + "Unstructured" + ] + }, + "field": { + "description": "A field definition for the delimited schema", + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "fieldtype": { + "description": "the field type - from the XML schema types", + "type": "string", + "enum": ["string", "boolean", + "decimal", "float", "double", + "duration", "dateTime", "time", + "date", "gYearMonth", "gYear", + "gMonthDay", "gDay", "gMonth", + "hexBinary", "base64Binary", + "anyURI", "QName", "NOTATION", + "normalizedString", "token", + "language", "IDREFS", "ENTITIES", + "NMTOKEN", "NMTOKENS", "Name", + "NCName", "ID", "IDREF", "ENTITY", + "integer", "nonPositiveInteger", + "negativeInteger", "long", "int", + "short", "byte", + "nonNegativeInteger", "unsignedLong", + "unsignedInt", "unsignedShort", + "unsignedByte", "positiveInteger" + + ] + }, + "fieldPattern": { + "description": "Regular expression that defines the field format", + "type": "integer" + }, + "fieldMaxLength": { + "description": "The maximum length of the field", + "type": "integer" + }, + "fieldMinLength": { + "description": "The minimum length of the field", + "type": "integer" + }, + "fieldMinimum": { + "description": "The minimum numeric value of the field", + "type": "integer" + }, + "fieldMaximum": { + "description": "The maximum numeric value of the field", + "type": "integer" + } + }, + "additionalProperties": false + }, + "dataformatversion": { + "type": "string", + "enum": ["1.0.0", "1.0.1"] + } + } +} diff --git a/mod/component-json-schemas/dcae-cli-v3-draft/component-spec-schema.json b/mod/component-json-schemas/dcae-cli-v3-draft/component-spec-schema.json new file mode 100644 index 0000000..c97d42f --- /dev/null +++ b/mod/component-json-schemas/dcae-cli-v3-draft/component-spec-schema.json @@ -0,0 +1,892 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#",, + "title": "Component specification schema", + "type": "object", + "properties": { + "self": { + "type": "object", + "properties": { + "version": { + "$ref": "#/definitions/version" + }, + "description": { + "type": "string" + }, + "component_type": { + "type": "string", + "enum": [ + "docker", + "cdap" + ] + }, + "name": { + "$ref": "#/definitions/name" + } + }, + "required": [ + "version", + "name", + "description", + "component_type" + ] + }, + "streams": { + "type": "object", + "properties": { + "publishes": { + "type": "array", + "uniqueItems": true, + "items": { + "oneOf": [ + { "$ref": "#/definitions/publisher_http" }, + { "$ref": "#/definitions/publisher_message_router" }, + { "$ref": "#/definitions/publisher_data_router" }, + { "$ref": "#/definitions/publisher_kafka" } + ] + } + }, + "subscribes": { + "type": "array", + "uniqueItems": true, + "items": { + "oneOf": [ + { "$ref": "#/definitions/subscriber_http" }, + { "$ref": "#/definitions/subscriber_message_router" }, + { "$ref": "#/definitions/subscriber_data_router" }, + { "$ref": "#/definitions/subscriber_kafka" } + ] + } + } + }, + "required": [ + "publishes", + "subscribes" + ] + }, + "services": { + "type": "object", + "properties": { + "calls": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/caller" + } + }, + "provides": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/provider" + } + } + }, + "required": [ + "calls", + "provides" + ] + }, + "parameters" : { + "anyOf" : [ + {"$ref": "#/definitions/docker-parameters"}, + {"$ref": "#/definitions/cdap-parameters"} + ] + }, + "auxilary": { + "oneOf" : [ + {"$ref": "#/definitions/auxilary_cdap"}, + {"$ref": "#/definitions/auxilary_docker"} + ] + }, + "artifacts": { + "type": "array", + "description": "List of component artifacts", + "items": { + "$ref": "#/definitions/artifact" + } + } + }, + "required": [ + "self", + "streams", + "services", + "parameters", + "auxilary", + "artifacts" + ], + "additionalProperties": false, + "definitions": { + "cdap-parameters": { + "description" : "There are three seperate ways to pass parameters to CDAP: app config, app preferences, program preferences. These are all treated as optional.", + "type": "object", + "properties" : { + "program_preferences": { + "description" : "A list of {program_id, program_type, program_preference} objects where program_preference is an object passed into program_id of type program_type", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/program_preference" + } + }, + "app_preferences" : { + "description" : "Parameters Passed down to the CDAP preference API", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + }, + "app_config" : { + "description" : "Parameters Passed down to the CDAP App Config", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + } + } + }, + "program_preference": { + "type": "object", + "properties": { + "program_type": { + "$ref": "#/definitions/program_type" + }, + "program_id": { + "type": "string" + }, + "program_pref":{ + "description" : "Parameters that the CDAP developer wants pushed to this program's preferences API. Optional", + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + } + }, + "required": ["program_type", "program_id", "program_pref"] + }, + "program_type": { + "type": "string", + "enum": ["flows","mapreduce","schedules","spark","workflows","workers","services"] + }, + "docker-parameters": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/parameter" + } + }, + "parameter": { + "oneOf": [ + {"$ref": "#/definitions/parameter-list"}, + {"$ref": "#/definitions/parameter-other"} + ] + }, + "parameter-list": { + "properties": { + "name": { + "type": "string" + }, + "value": { + "description": "Default value for the parameter" + }, + "description": { + "description": "Description for the parameter.", + "type": "string" + }, + "type": { + "description": "Only valid type is list, the entry_schema is required - which contains the type of the list element. All properties set for the parameter apply to all elements in the list at this time", + "type": "string", + "enum": ["list"] + }, + "required": { + "description": "An optional key that declares a parameter as required (true) or not (false). Default is true.", + "type": "boolean", + "default": true + }, + "constraints": { + "description": "The optional list of sequenced constraint clauses for the parameter.", + "type": "array", + "items": { + "$ref": "#/definitions/parameter-constraints" + } + }, + "entry_schema": { + "description": "The optional property used to declare the name of the Datatype definition for entries of certain types. entry_schema must be defined when the type is list. This is the only type it is currently supported for.", + "type": "object", + "uniqueItems": true, + "items": {"$ref": "#/definitions/list-parameter"} + }, + "designer_editable": { + "description": "A required property that declares a parameter as editable by designer in SDC Tool (true) or not (false).", + "type": "boolean" + }, + "sourced_at_deployment": { + "description": "A required property that declares that a parameter is assigned at deployment time (true) or not (false).", + "type": "boolean" + }, + "policy_editable": { + "description": "A required property that declares a parameter as editable by DevOps in Policy UI (true) or not (false).", + "type": "boolean" + }, + "policy_group": { + "description": "An optional property used to group policy_editable parameters into groups. Each group will become it's own policy model. Any parameters without this property will be grouped together to form their own policy model", + "type": "string" + }, + "policy_schema" :{ + "type": "array", + "uniqueItems": true, + "items": {"$ref": "#/definitions/policy_schema_parameter"} + } + }, + "required": [ + "name", + "value", + "description", + "designer_editable", + "policy_editable", + "sourced_at_deployment", + "entry_schema" + ], + "additionalProperties": false, + "dependencies": { + "policy_schema": ["policy_editable"] + } + }, + "parameter-other": { + "properties": { + "name": { + "type": "string" + }, + "value": { + "description": "Default value for the parameter" + }, + "description": { + "description": "Description for the parameter.", + "type": "string" + }, + "type": { + "description": "The required data type for the parameter.", + "type": "string", + "enum": [ "string", "number", "boolean", "datetime" ] + }, + "required": { + "description": "An optional key that declares a parameter as required (true) or not (false). Default is true.", + "type": "boolean", + "default": true + }, + "constraints": { + "description": "The optional list of sequenced constraint clauses for the parameter.", + "type": "array", + "items": { + "$ref": "#/definitions/parameter-constraints" + } + }, + "designer_editable": { + "description": "A required property that declares a parameter as editable by designer in SDC Tool (true) or not (false).", + "type": "boolean" + }, + "sourced_at_deployment": { + "description": "A required property that declares that a parameter is assigned at deployment time (true) or not (false).", + "type": "boolean" + }, + "policy_editable": { + "description": "A required property that declares a parameter as editable in Policy UI (true) or not (false).", + "type": "boolean" + }, + "policy_group": { + "description": "An optional property used to group policy_editable parameters into groups. Each group will become it's own policy model. Any parameters without this property will be grouped together to form their own policy model", + "type": "string" + }, + "policy_schema" :{ + "description": "An optional property used to define policy_editable parameters as lists or maps", + "type": "array", + "uniqueItems": true, + "items": {"$ref": "#/definitions/policy_schema_parameter"} + } + }, + "required": [ + "name", + "value", + "description", + "designer_editable", + "sourced_at_deployment", + "policy_editable" + ], + "additionalProperties": false, + "dependencies": { + "policy_schema": ["policy_editable"] + } + }, + "list-parameter": { + "type": "object", + "properties": { + "type": { + "description": "The required data type for each parameter in the list.", + "type": "string", + "enum": ["string", "number"] + } + }, + "required": [ + "type" + ], + "additionalProperties": false + }, + "policy_schema_parameter": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "value": { + "description": "Default value for the parameter" + }, + "description": { + "description": "Description for the parameter.", + "type": "string" + }, + "type": { + "description": "The required data type for the parameter.", + "type": "string", + "enum": [ "string", "number", "boolean", "datetime", "list", "map" ] + }, + "required": { + "description": "An optional key that declares a parameter as required (true) or not (false). Default is true.", + "type": "boolean", + "default": true + }, + "constraints": { + "description": "The optional list of sequenced constraint clauses for the parameter.", + "type": "array", + "items": { + "$ref": "#/definitions/parameter-constraints" + } + }, + "entry_schema": { + "description": "The optional key that is used to declare the name of the Datatype definition for entries of certain types. entry_schema must be defined when the type is either list or map. If the type is list and the entry type is a simple type (string, number, boolean, datetime), follow with a simple string to describe the entry type. If the type is list and the entry type is a map, follow with an array to describe the keys for the entry map. If the type is list and the entry type is also list, this is not currently supported here. If the type is map, then follow with an array to describe the keys for this map. ", + "type": "array", "uniqueItems": true, "items": {"$ref": "#/definitions/policy_schema_parameter"} + } + }, + "required": [ + "name", + "type" + ], + "additionalProperties": false + }, + "parameter-constraints": { + "type": "object", + "additionalProperties": false, + "properties": { + "equal": { + "description": "Constrains a property or parameter to a value equal to (‘=’) the value declared." + }, + "greater_than": { + "description": "Constrains a property or parameter to a value greater than (‘>’) the value declared.", + "type": "number" + }, + "greater_or_equal": { + "description": "Constrains a property or parameter to a value greater than or equal to (‘>=’) the value declared.", + "type": "number" + }, + "less_than": { + "description": "Constrains a property or parameter to a value less than (‘<’) the value declared.", + "type": "number" + }, + "less_or_equal": { + "description": "Constrains a property or parameter to a value less than or equal to (‘<=’) the value declared.", + "type": "number" + }, + "valid_values": { + "description": "Constrains a property or parameter to a value that is in the list of declared values.", + "type": "array" + }, + "length": { + "description": "Constrains the property or parameter to a value of a given length.", + "type": "number" + }, + "min_length": { + "description": "Constrains the property or parameter to a value to a minimum length.", + "type": "number" + }, + "max_length": { + "description": "Constrains the property or parameter to a value to a maximum length.", + "type": "number" + } + } + }, + "stream_message_router": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "config_key": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "message router", "message_router" + ] + } + }, + "required": [ + "format", + "version", + "config_key", + "type" + ] + }, + "stream_kafka": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "kafka" + ] + } + }, + "required": [ + "format", + "version", + "config_key", + "type" + ] + }, + "publisher_http": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "config_key": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "http", + "https" + ] + } + }, + "required": [ + "format", + "version", + "config_key", + "type" + ] + }, + "publisher_message_router": { + "$ref": "#/definitions/stream_message_router" + }, + "publisher_kafka": { + "$ref": "#/definitions/stream_kafka" + }, + "publisher_data_router": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "config_key": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "data router", "data_router" + ] + } + }, + "required": [ + "format", + "version", + "config_key", + "type" + ] + }, + "subscriber_http": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "route": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "http", + "https" + ] + } + }, + "required": [ + "format", + "version", + "route", + "type" + ] + }, + "subscriber_message_router": { + "$ref": "#/definitions/stream_message_router" + }, + "subscriber_kafka": { + "$ref": "#/definitions/stream_kafka" + }, + "subscriber_data_router": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + }, + "route": { + "type": "string" + }, + "type": { + "description": "Type of stream to be used", + "type": "string", + "enum": [ + "data router", "data_router" + ] + }, + "config_key": { + "description": "Data router subscribers require config info to setup their endpoints to handle requests. For example, needs username and password", + "type": "string" + } + }, + "required": [ + "format", + "version", + "route", + "type", + "config_key" + ] + }, + "provider" : { + "oneOf" : [ + {"$ref": "#/definitions/docker-provider"}, + {"$ref": "#/definitions/cdap-provider"} + ] + }, + "cdap-provider" : { + "type": "object", + "properties" : { + "request": { + "$ref": "#/definitions/formatPair" + }, + "response": { + "$ref": "#/definitions/formatPair" + }, + "service_name" : { + "type" : "string" + }, + "service_endpoint" : { + "type" : "string" + }, + "verb" : { + "type": "string", + "enum": ["GET", "PUT", "POST", "DELETE"] + } + }, + "required" : [ + "request", + "response", + "service_name", + "service_endpoint", + "verb" + ] + }, + "docker-provider": { + "type": "object", + "properties": { + "request": { + "$ref": "#/definitions/formatPair" + }, + "response": { + "$ref": "#/definitions/formatPair" + }, + "route": { + "type": "string" + }, + "verb": { + "type": "string", + "enum": ["GET", "PUT", "POST", "DELETE"] + } + }, + "required": [ + "request", + "response", + "route" + ] + }, + "caller": { + "type": "object", + "properties": { + "request": { + "$ref": "#/definitions/formatPair" + }, + "response": { + "$ref": "#/definitions/formatPair" + }, + "config_key": { + "type": "string" + } + }, + "required": [ + "request", + "response", + "config_key" + ] + }, + "formatPair": { + "type": "object", + "properties": { + "format": { + "$ref": "#/definitions/name" + }, + "version": { + "$ref": "#/definitions/version" + } + } + }, + "name": { + "type": "string" + }, + "version": { + "type": "string", + "pattern": "^(\\d+\\.)(\\d+\\.)(\\*|\\d+)$" + }, + "artifact": { + "type": "object", + "description": "Component artifact object", + "properties": { + "uri": { + "type": "string", + "description": "Uri to artifact" + }, + "type": { + "type": "string", + "enum": ["jar", "docker image"] + } + }, + "required": ["uri", "type"] + }, + + "auxilary_cdap": { + "title": "cdap component specification schema", + "type": "object", + "properties": { + "streamname": { + "type": "string" + }, + "artifact_name" : { + "type": "string" + }, + "artifact_version" : { + "type": "string", + "pattern": "^(\\d+\\.)(\\d+\\.)(\\*|\\d+)$" + }, + "namespace":{ + "type": "string", + "description" : "optional" + }, + "programs": { + "type": "array", + "uniqueItems": true, + "items": { + "$ref": "#/definitions/cdap_program" + } + } + }, + "required": [ + "streamname", + "programs", + "artifact_name", + "artifact_version" + ] + }, + "cdap_program_type": { + "type": "string", + "enum": ["flows","mapreduce","schedules","spark","workflows","workers","services"] + }, + "cdap_program": { + "type": "object", + "properties": { + "program_type": { + "$ref": "#/definitions/cdap_program_type" + }, + "program_id": { + "type": "string" + } + }, + "required": ["program_type", "program_id"] + }, + + "auxilary_docker": { + "title": "Docker component specification schema", + "type": "object", + "properties": { + "healthcheck": { + "description": "Define the health check that Consul should perfom for this component", + "type": "object", + "oneOf": [ + { "$ref": "#/definitions/docker_healthcheck_http" }, + { "$ref": "#/definitions/docker_healthcheck_script" } + ] + }, + "ports": { + "description": "Port mapping to be used for Docker containers. Each entry is of the format <container port>:<host port>.", + "type": "array", + "items": { + "type": "string" + } + }, + "logging": { + "description": "Component specific details for logging", + "type": "object", + "properties": { + "log_directory": { + "description": "The path in the container where the component writes its logs. If the component is following the EELF requirements, this would be the directory where the four EELF files are being written. (Other logs can be placed in the directory--if their names in '.log', they'll also be sent into ELK.)", + "type": "string" + }, + "alternate_fb_path": { + "description": "By default, the log volume is mounted at /var/log/onap/<component_type> in the sidecar container's file system. 'alternate_fb_path' allows overriding the default. Will affect how the log data can be found in the ELK system.", + "type": "string" + } + }, + "additionalProperties": false + }, + "policy": { + "properties": { + "trigger_type": { + "description": "Only value of docker is supported at this time.", + "type": "string", + "enum": ["docker"] + }, + "script_path": { + "description": "Script command that will be executed for policy reconfiguration", + "type": "string" + } + }, + "required": [ + "trigger_type","script_path" + ], + "additionalProperties": false + }, + "volumes": { + "description": "Volume mapping to be used for Docker containers. Each entry is of the format below", + "type": "array", + "items": { + "type": "object", + "properties": { + "host":{ + "type":"object", + "path": {"type": "string"} + }, + "container":{ + "type":"object", + "bind": { "type": "string"}, + "mode": { "type": "string"} + } + } + } + } + }, + "required": [ + "healthcheck" + ], + "additionalProperties": false + }, + "docker_healthcheck_http": { + "properties": { + "type": { + "description": "Consul health check type", + "type": "string", + "enum": [ + "http", + "https" + ] + }, + "interval": { + "description": "Interval duration in seconds i.e. 10s", + "default": "15s", + "type": "string" + }, + "timeout": { + "description": "Timeout in seconds i.e. 10s", + "default": "1s", + "type": "string" + }, + "endpoint": { + "description": "Relative endpoint used by Consul to check health by making periodic HTTP GET calls", + "type": "string" + } + }, + "required": [ + "type", + "endpoint" + ] + }, + "docker_healthcheck_script": { + "properties": { + "type": { + "description": "Consul health check type", + "type": "string", + "enum": [ + "script", + "docker" + ] + }, + "interval": { + "description": "Interval duration in seconds i.e. 10s", + "default": "15s", + "type": "string" + }, + "timeout": { + "description": "Timeout in seconds i.e. 10s", + "default": "1s", + "type": "string" + }, + "script": { + "description": "Script command that will be executed by Consul to check health", + "type": "string" + } + }, + "required": [ + "type", + "script" + ] + } + } +} diff --git a/mod/component-json-schemas/dcae-cli-v3-draft/dmaap-schema.json b/mod/component-json-schemas/dcae-cli-v3-draft/dmaap-schema.json new file mode 100644 index 0000000..1c36972 --- /dev/null +++ b/mod/component-json-schemas/dcae-cli-v3-draft/dmaap-schema.json @@ -0,0 +1,149 @@ +{ + "$id": "http://onap.org/dcaegen2/platform/dmaap", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Schema for dmaap inputs", + "type": "object", + + "oneOf": [ + { "$ref": "#/definitions/message_router" }, + { "$ref": "#/definitions/data_router" }, + { "$ref": "#/definitions/kafka" } + ], + + "definitions": { + + "message_router": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": ["message_router"] + }, + "aaf_credentials": { + "$ref": "#/definitions/aaf_credentials" + }, + "aaf_username": { + "description": "Used for backward compatibility. Use aaf_credentials object in new code.", + "type": "string" + }, + "aaf_password": { + "description": "Used for backward compatibility. Use aaf_credentials object in new code.", + "type": "string" + }, + "dmaap_info": { + "$ref":"#/definitions/dmaap_info" + } + }, + "required": [ + "type", + "dmaap_info" + ], + "additionalProperties": false + }, + + "data_router": { + "type": "object", + "$comment": "This is currently not specified in schema. However, it is specified in RTD documentation. TODO: specify schema for DMaaP DR", + "properties": { + "type": { + "type": "string", + "enum": ["data_router"] + } + }, + "required": [ + "type" + ], + "additionalProperties": true + }, + + "dmaap_info": { + "type": "object", + "properties": { + "client_role": { + "type": "string" + }, + "client_id": { + "type": "string" + }, + "location": { + "type": "string" + }, + "topic_url": { + "type": "string" + } + }, + "required": [ + "topic_url" + ], + "additionalProperties": false + }, + + "kafka": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": ["kafka"] + }, + "aaf_credentials": { + "$ref": "#/definitions/aaf_credentials" + }, + "kafka_info": { + "$ref": "#/definitions/kafka_info" + } + }, + "required": [ + "type", + "kafka_info" + ], + "additionalProperties": false + }, + + "kafka_info": { + "type": "object", + "description": "Defines Kafka-specific topid properties", + "properties": { + "client_role": { + "type": "string" + }, + "client_id": { + "type": "string" + }, + "bootstrap_servers": { + "description": "Comma separated list of Kafka bootstrap servers as defined in Kafka documentation", + "type": "string" + }, + "topic_name": { + "description": "Name of the topic", + "type": "string" + }, + "consumer_group_id": { + "description": "Can be specifed only for subscribers. More details in Kafka documentation on group.id property", + "type": "string" + }, + "max_payload_size_bytes": { + "description": "Maximum payload size in bytes. Can be used for validation or creating Kafka client properties.", + "type": "string" + } + }, + "required": [ + "bootstrap_servers", + "topic_name" + ], + "additionalProperties": false + }, + + "aaf_credentials": { + "type": "object", + "properties": { + "username": { + "type": "string" + }, + "password": { + "type": "string" + } + }, + "additionalProperties": false + } + } +} diff --git a/mod/component-json-schemas/dcae-cli-v3-draft/samples/application-config-kafka.json b/mod/component-json-schemas/dcae-cli-v3-draft/samples/application-config-kafka.json new file mode 100644 index 0000000..d343c70 --- /dev/null +++ b/mod/component-json-schemas/dcae-cli-v3-draft/samples/application-config-kafka.json @@ -0,0 +1,26 @@ +{ + "streams_publishes": { + "perf3gpp_regional": { + "type": "kafka", + "aaf_credentials": { + "username": "client", + "password": "very secure password" + }, + "kafka_info": { + "bootstrap_servers": "dmaap-mr-kafka-0.regional:6060,dmaap-mr-kafka-1.regional:6060", + "topic_name": "REG_HVVES_PERF3GPP" + } + }, + "perf3gpp_central": { + "type": "kafka", + "aaf_credentials": { + "username": "other_client", + "password": "another very secure password" + }, + "kafka_info": { + "bootstrap_servers": "dmaap-mr-kafka-0.central:6060,dmaap-mr-kafka-1.central:6060", + "topic_name": "CEN_HVVES_PERF3GPP" + } + } + } +} diff --git a/mod/component-json-schemas/dcae-cli-v3-draft/samples/stream-kafka.json b/mod/component-json-schemas/dcae-cli-v3-draft/samples/stream-kafka.json new file mode 100644 index 0000000..27ac2c9 --- /dev/null +++ b/mod/component-json-schemas/dcae-cli-v3-draft/samples/stream-kafka.json @@ -0,0 +1,13 @@ +{ + "type": "kafka", + "aaf_credentials": { + "username": "client", + "password": "very secure password" + }, + "kafka_info": { + "client_role": "com.dcae.member", + "client_id": "1500462518108", + "bootstrap_servers": "dmaap-mr-kafka-0:6060,dmaap-mr-kafka-1:6060", + "topic_name": "HVVES_PERF3GPP" + } +} diff --git a/mod/component-json-schemas/dmaap/dcae-cli-v1/dmaap-schema.json b/mod/component-json-schemas/dmaap/dcae-cli-v1/dmaap-schema.json new file mode 100644 index 0000000..e6745f7 --- /dev/null +++ b/mod/component-json-schemas/dmaap/dcae-cli-v1/dmaap-schema.json @@ -0,0 +1,51 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Schema for dmaap inputs", + "type": "object", + "oneOf": [ + { "$ref": "#/definitions/message_router" } + ], + "definitions": { + "message_router": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": ["message_router"] + }, + "aaf_username": { + "type": "string" + }, + "aaf_password": { + "type": "string" + }, + "dmaap_info": { + "type": "object", + "properties": { + "client_role": { + "type": "string" + }, + "client_id": { + "type": "string" + }, + "location": { + "type": "string" + }, + "topic_url": { + "type": "string" + } + }, + "required": [ + "topic_url" + ], + "additionalProperties": false + } + }, + "required": [ + "type", + "dmaap_info" + ], + "additionalProperties": false + } + } +} diff --git a/mod/component-json-schemas/pom.xml b/mod/component-json-schemas/pom.xml new file mode 100644 index 0000000..24f92b6 --- /dev/null +++ b/mod/component-json-schemas/pom.xml @@ -0,0 +1,245 @@ +<?xml version="1.0"?> +<!-- +================================================================================ +Copyright (c) 2017-2019 AT&T Intellectual Property. All rights reserved. +================================================================================ +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============LICENSE_END========================================================= + +--> +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.onap.dcaegen2.platform.cli</groupId> + <artifactId>cli</artifactId> + <version>1.2.0-SNAPSHOT</version> + </parent> + <groupId>org.onap.dcaegen2.platform.cli</groupId> + <artifactId>component-json-schemas</artifactId> + <name>dcaegen2-platform-cli-component-json-schemas</name> + <version>1.1.0-SNAPSHOT</version> + <url>http://maven.apache.org</url> + <properties> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <sonar.skip>true</sonar.skip> + <sonar.sources>.</sonar.sources> + <!-- customize the SONARQUBE URL --> + <!-- sonar.host.url>http://localhost:9000</sonar.host.url --> + <!-- below are language dependent --> + <!-- for Python --> + <sonar.language>py</sonar.language> + <sonar.pluginName>Python</sonar.pluginName> + <sonar.inclusions>**/*.py</sonar.inclusions> + <!-- for JavaScaript --> + <!-- + <sonar.language>js</sonar.language> + <sonar.pluginName>JS</sonar.pluginName> + <sonar.inclusions>**/*.js</sonar.inclusions> + --> + </properties> + <build> + <finalName>${project.artifactId}-${project.version}</finalName> + <pluginManagement> + <plugins> + <plugin> + <groupId>org.sonatype.plugins</groupId> + <artifactId>nexus-staging-maven-plugin</artifactId> + <version>1.6.7</version> + <configuration> + <skipNexusStagingDeployMojo>true</skipNexusStagingDeployMojo> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-deploy-plugin</artifactId> + <!-- This version supports the "deployAtEnd" parameter --> + <version>2.8</version> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <!-- first disable the default Java plugins at various stages --> + <!-- maven-resources-plugin is called during "*resource" phases by default behavior. it prepares + the resources dir. we do not need it --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-resources-plugin</artifactId> + <version>2.6</version> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <!-- maven-compiler-plugin is called during "compile" phases by default behavior. we do not need it --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.1</version> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <!-- maven-jar-plugin is called during "compile" phase by default behavior. we do not need it --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>2.4</version> + <executions> + <execution> + <id>default-jar</id> + <phase/> + </execution> + </executions> + </plugin> + <!-- maven-install-plugin is called during "install" phase by default behavior. it tries to copy stuff under + target dir to ~/.m2. we do not need it --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-install-plugin</artifactId> + <version>2.4</version> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <!-- maven-surefire-plugin is called during "test" phase by default behavior. it triggers junit test. + we do not need it --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <version>2.12.4</version> + <configuration> + <skipTests>true</skipTests> + </configuration> + </plugin> + </plugins> + </pluginManagement> + <plugins> + <!-- plugin> + <artifactId>maven-assembly-plugin</artifactId> + <version>2.4.1</version> + <configuration> + <descriptors> + <descriptor>assembly/dep.xml</descriptor> + </descriptors> + </configuration> + <executions> + <execution> + <id>make-assembly</id> + <phase>package</phase> + <goals> + <goal>single</goal> + </goals> + </execution> + </executions> + </plugin --> + <!-- now we configure custom action (calling a script) at various lifecycle phases --> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>exec-maven-plugin</artifactId> + <version>1.2.1</version> + <executions> + <execution> + <id>clean phase script</id> + <phase>clean</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>clean</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>generate-sources script</id> + <phase>generate-sources</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>generate-sources</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>compile script</id> + <phase>compile</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>compile</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>package script</id> + <phase>package</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>package</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>test script</id> + <phase>test</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>test</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>install script</id> + <phase>install</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>install</argument> + </arguments> + </configuration> + </execution> + <execution> + <id>deploy script</id> + <phase>deploy</phase> + <goals> + <goal>exec</goal> + </goals> + <configuration> + <arguments> + <argument>${project.artifactId}</argument> + <argument>deploy</argument> + </arguments> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> +</project> diff --git a/mod/component-json-schemas/tests/component-spec-cdap.json b/mod/component-json-schemas/tests/component-spec-cdap.json new file mode 100644 index 0000000..a8e3544 --- /dev/null +++ b/mod/component-json-schemas/tests/component-spec-cdap.json @@ -0,0 +1,78 @@ +{ + "self":{ + "name":"cdap.helloworld.endnode", + "version":"0.8.0", + "description":"cdap test component", + "component_type":"cdap" + }, + "streams":{ + "subscribes": [ + { + "format": "some.format", + "version": "5.0.0", + "route": "/yay", + "type": "http" + }, + { + "format": "some.format", + "version": "5.0.0", + "config_key": "foo-sub", + "type": "message router" + }], + "publishes": [{ + "format": "std.empty", + "version": "1.0.6", + "config_key": "stream_publish_example", + "type": "message_router" + }] + }, + "services":{ + "calls": [], + "provides":[ + { + "request":{ + "format":"std.empty", + "version":"1.0.6" + }, + "response":{ + "format":"std.empty", + "version":"1.0.6" + }, + "service_name":"Greeting", + "service_endpoint":"greet", + "verb":"GET" + } + ] + }, + "parameters": { + "app_config" : [ + {"name" : "some_param", + "description" : "some desc", + "value" : "some_value", + "type": "string"} + ], + "app_preferences" : [ + {"name" : "some_param2", + "description" : "some desc2", + "value" : true, + "type": "boolean"} + ], + "program_preferences" : [{"program_type" : "flows", "program_id" : "WhoFlow", "program_pref" : [{"name" : "some_param3","description" : "some desc3", "value" : "some_value3", "type": "number"}]}] + }, + "auxilary": { + "streamname":"who", + "artifact_name" : "HelloWorld", + "artifact_version" : "3.4.3", + "programs" : [ + {"program_type" : "flows", "program_id" : "WhoFlow"}, + {"program_type" : "services", "program_id" : "Greeting"} + ], + "namespace" : "hw" + }, + "artifacts": [ + { + "uri": "some jar url", + "type": "jar" + } + ] +} diff --git a/mod/component-json-schemas/tests/component-spec-docker.json b/mod/component-json-schemas/tests/component-spec-docker.json new file mode 100644 index 0000000..34458d5 --- /dev/null +++ b/mod/component-json-schemas/tests/component-spec-docker.json @@ -0,0 +1,110 @@ +{ + "self": { + "version": "0.5.0", + "name": "sandbox.platform.laika", + "description": "Web service used as a stand-alone test DCAE service component", + "component_type": "docker" + }, + "streams": { + "subscribes": [], + "publishes": [ + { + "format": "some.format", + "version": "1.0.0", + "config_key": "pub-foo", + "type": "message router" + }, + { + "format": "some.format", + "version": "1.0.0", + "config_key": "pub-foo", + "type": "http" + } + ] + }, + "services": { + "calls": [], + "provides": [ + { + "route": "/rollcall", + "verb": "GET", + "request": { + "format": "sandbox.platform.any", + "version": "0.1.0" + }, + "response": { + "format": "sandbox.platform.laika.rollcall.response", + "version": "0.1.0" + } + }, + { + "route": "/identity", + "verb": "POST", + "request": { + "format": "sandbox.platform.laika.identity.request", + "version": "0.1.0" + }, + "response": { + "format": "sandbox.platform.laika.identity.response", + "version": "0.1.0" + } + }, + { + "route": "/health", + "verb": "GET", + "request": { + "format": "sandbox.platform.any", + "version": "0.1.0" + }, + "response": { + "format": "sandbox.platform.laika.health", + "version": "0.1.0" + } + } + ] + }, + "parameters": [ + { + "name": "threshold", + "description": "Some fake threshold", + "type": "number", + "value": 2000 + }, + { + "name": "some-target-number", + "description": "Some fake target", + "type": "number", + "value": 10000, + "constraints": [ + { + "greater_or_equal": 1 + }, + { + "less_than": 100000 + }] + }, + { + "name": "magic-word", + "description": "Some magic word", + "type": "string", + "value": "requirements", + "constraints": [ + { + "valid_values": ["rally", "user story"] + }] + } + ], + "auxilary": { + "healthcheck": { + "type": "http", + "endpoint": "/foo" + }, + "ports": ["8080:8080"] + }, + "artifacts": [ + { + "uri": "some docker image path", + "type": "docker image" + } + ] +} diff --git a/mod/component-json-schemas/tests/component-spec-logging.json b/mod/component-json-schemas/tests/component-spec-logging.json new file mode 100644 index 0000000..6b74050 --- /dev/null +++ b/mod/component-json-schemas/tests/component-spec-logging.json @@ -0,0 +1,114 @@ +{ + "self": { + "version": "0.5.0", + "name": "sandbox.platform.laika", + "description": "Web service used as a stand-alone test DCAE service component", + "component_type": "docker" + }, + "streams": { + "subscribes": [], + "publishes": [ + { + "format": "some.format", + "version": "1.0.0", + "config_key": "pub-foo", + "type": "message router" + }, + { + "format": "some.format", + "version": "1.0.0", + "config_key": "pub-foo", + "type": "http" + } + ] + }, + "services": { + "calls": [], + "provides": [ + { + "route": "/rollcall", + "verb": "GET", + "request": { + "format": "sandbox.platform.any", + "version": "0.1.0" + }, + "response": { + "format": "sandbox.platform.laika.rollcall.response", + "version": "0.1.0" + } + }, + { + "route": "/identity", + "verb": "POST", + "request": { + "format": "sandbox.platform.laika.identity.request", + "version": "0.1.0" + }, + "response": { + "format": "sandbox.platform.laika.identity.response", + "version": "0.1.0" + } + }, + { + "route": "/health", + "verb": "GET", + "request": { + "format": "sandbox.platform.any", + "version": "0.1.0" + }, + "response": { + "format": "sandbox.platform.laika.health", + "version": "0.1.0" + } + } + ] + }, + "parameters": [ + { + "name": "threshold", + "description": "Some fake threshold", + "type": "number", + "value": 2000 + }, + { + "name": "some-target-number", + "description": "Some fake target", + "type": "number", + "value": 10000, + "constraints": [ + { + "greater_or_equal": 1 + }, + { + "less_than": 100000 + }] + }, + { + "name": "magic-word", + "description": "Some magic word", + "type": "string", + "value": "requirements", + "constraints": [ + { + "valid_values": ["rally", "user story"] + }] + } + ], + "auxilary": { + "healthcheck": { + "type": "http", + "endpoint": "/foo" + }, + "ports": ["8080:8080"], + "logging": { + "log_directory": "/tmp/yo", + "alternate_fb_path": "/tmp/path" + } + }, + "artifacts": [ + { + "uri": "some docker image path", + "type": "docker image" + } + ] +} diff --git a/mod/component-json-schemas/tests/dmaap-mr-bad-extra.json b/mod/component-json-schemas/tests/dmaap-mr-bad-extra.json new file mode 100644 index 0000000..e1821d5 --- /dev/null +++ b/mod/component-json-schemas/tests/dmaap-mr-bad-extra.json @@ -0,0 +1,12 @@ +{ + "type": "message_router", + "aaf_username": "foo3", + "aaf_password": "bar3", + "something_else": "boo", + "dmaap_info":{ + "client_role":"some.dcae.member", + "client_id":"123456", + "location":"mtc5", + "topic_url":"https://message-router-url/some-topic" + } +} diff --git a/mod/component-json-schemas/tests/dmaap-mr-bad-missing.json b/mod/component-json-schemas/tests/dmaap-mr-bad-missing.json new file mode 100644 index 0000000..9151032 --- /dev/null +++ b/mod/component-json-schemas/tests/dmaap-mr-bad-missing.json @@ -0,0 +1,10 @@ +{ + "type": "message_router", + "aaf_username": "foo3", + "aaf_password": "bar3", + "dmaap_info":{ + "client_role":"some.dcae.member", + "client_id":"123456", + "location":"mtc5" + } +} diff --git a/mod/component-json-schemas/tests/dmaap-mr-good.json b/mod/component-json-schemas/tests/dmaap-mr-good.json new file mode 100644 index 0000000..d3e8dda --- /dev/null +++ b/mod/component-json-schemas/tests/dmaap-mr-good.json @@ -0,0 +1,11 @@ +{ + "type": "message_router", + "aaf_username": "foo3", + "aaf_password": "bar3", + "dmaap_info":{ + "client_role":"some.dcae.member", + "client_id":"123456", + "location":"mtc5", + "topic_url":"https://message-router-url/some-topic" + } +} diff --git a/mod/onboardingapi/.coveragerc b/mod/onboardingapi/.coveragerc new file mode 100644 index 0000000..088c2da --- /dev/null +++ b/mod/onboardingapi/.coveragerc @@ -0,0 +1,21 @@ +# .coveragerc to control coverage.py +[run] +branch = True + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: diff --git a/mod/onboardingapi/ChangeLog.md b/mod/onboardingapi/ChangeLog.md new file mode 100644 index 0000000..675184d --- /dev/null +++ b/mod/onboardingapi/ChangeLog.md @@ -0,0 +1,159 @@ +# Change Log + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/) +and this project adheres to [Semantic Versioning](http://semver.org/). + + +## [2.12.0] + +* Add HTTP support using Flask/flask-restplus +* Add APIs for component (list, show, add, update, status change) +* Add APIs for data format (list, show, add, update, status change) +* Change the default value for `enforce_image` from true to false +* Enhance Docker image creation to generate the dcae-cli configuration at runtime rather than at build time +* Fix format_description to handle no description case +* Add filtering by name, version for GET /components + +## [2.11.3] + +* Fix Format class definition of "description" to match JSON schema + +## [2.11.2] + +* Upgrade to use docker 4.x.x from docker-py 1.x.x + +## [2.11.1] + +* Change to policy reconfiguration functions to pass consul_host as a parameter + +## [2.11.0] + +* Add Policy Configuration Support. +* Update and improve the Help Text that is displayed to the user. +* Component Spec schema additions for parameters section (policy, volumes). +* Component Spec schema updates to make the following required: (designer_editable, sourced_at_deployment, policy_editable). + +## [2.10.2] + +* Fix dependency conflict with python-consul +* Fix psycopg2 warning by requiring psycopg2-binary +* Use Docker client from env when checking images + +## [2.10.1] + +* Fix DCAEGEN2-402 + +## [2.10.0] + +* Make server url (url to webserver that has static artifacts like json schemas) a configuration parameter +* Seeding configuration is no longer a fatal issue +* Setup database connection via manual user inputs if seed config not there +* Seeding profiles is no longer a fatal issue +* Dynamically fetch Docker login credentials from Consul to use to authenticate when creating Docker client. +* Make docker login key into a configuration param +* Clean up the hard coupling to the user configuration particularly in the discovery module + +## [2.9.0] + +* Add data format generate command +* Fix issue with data router config keys + +## [2.8.1] + +* Improve error message when inputs map is missing item. Show the specific parameters that are causing issues. + +## [2.8.0] + +* Enhance to support parameters that are sourced at deployment +* Provide new command line arg --inputs-file +* Use inputs file to bind values to generated configuration for parameters that have been specified to be `sourced_at_deployment` true. + +## [2.7.0] + +* Rip out Docker related code and use common python-dockering library + - Using 1.2.0 of python-dockering supports Docker exec based health checks +* Add support for volumes +* Add support for special DNS configuration in Docker containers to enable use of Consul DNS interface + +## [2.6.0] + +* Use port mappings from component spec when running Docker containers + +## [2.5.0] + +* Define the data structure for the input dmaap map items for data router that are passed in `--dmaap-file`. Enhance the json schema. +* Create the appropriate delivery url +* Enhance spec validation for cdap. Throw error when cdap specs have data router subscribes. +* Verify container is up in order to construct and to display data router subscriber delivery urls + +## [2.4.0] + +* Define the data structure for the input dmaap map items that are passed in `--dmaap-file`. Create and use json schema for validation and applying defaults. +* Group config keys by `streams_publishes`, `streams_subscribes`, and `services_calls` in generating the application config for both Docker and CDAP + +## [2.3.2] + +* Fix issue where components with dashes can't be found when running components that depend upon them. This one addressed the issue in the catalog and in the config creation part of discovery. +* Fix misleading "missing downstream component" warning that should be an error. + +## [2.3.1] + +* Fix issue where components with dashes can't be found when running components that depend upon them. +EDIT: This one addressed the issue in the catalog + +## [2.3.0] + +* Enhance the `component dev` command to print all the environment variables needed to work with the platform for development +* Display the component type in the `catalog list` view + +## [2.2.0] + +* Add fields `cli_version` and `schema_path` to both the components and data formats tables to be used as metadata which can be used for debugging. + +## [2.1.0] + +* (Re)Initialize both config and profiles by first grabbing files from Nexus +* Change `--reinit` to be eager and to be used to reinit config and profiles +* Remove *default* profile +* Replace the use of backports.tempfile with a combo of pytest `tmpdir` and `monkeypatch` + +## [2.0.0] + +* Update sqlalchemy and catalog to support postgres and remove mysql support. Still compatible with sqlite. +* Add the `catalog` command used to tap into the shared catalog +* Change the `component` and the `data_format` command to be for the particular user of the dcae-cli +* Changes to support component spec v3: folding of the auxiliary specs into the component spec and adding of the property artifacts +* Add the ability to publish components and data formats + +## [1.6.0] + +* Enhance `component run` to take in dmaap json using the `--dmaap-file` option. This is used to generate configuration that will provide client-side dmaap configuration. + +## [1.5.0] + +* Enhance `component dev` to take in dmaap json using the `--dmaap-file` option. This is used to generate configuration that will provide client-side dmaap configuration. +* Make json schema remote file paths configurable. + +## [1.4.0] + +* Enhance component list view to show running instances. The standard view shows number of deployments associated with a component and an expanded view that show details of those deployments. + +## [1.3.0] + +* Fix queries to find unhealthy and defective instances to force dashes to be dots to ensure proper matching + +## [1.2.0] + +* Expand the undeploy command to include undeploying defective instances +* Remove suffix from name to fix mis-naming + +## [0.12.0] + +* Go back to setting of uid, expose setting of db url +* Add ability to *reinit* configuration via `--reinit` + +## [0.11.0] + +* Make CDAP Parameters follow parameters definitions diff --git a/mod/onboardingapi/Dockerfile b/mod/onboardingapi/Dockerfile new file mode 100644 index 0000000..606ca5b --- /dev/null +++ b/mod/onboardingapi/Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.7-alpine + +RUN apk update && \ + apk add --virtual build-deps gcc python-dev musl-dev && \ + apk add postgresql-dev bash +COPY . /code +WORKDIR /code +RUN pip install . \ + && mkdir -p ~/.config/dcae-cli +EXPOSE 80 + +CMD /code/start.sh diff --git a/mod/onboardingapi/README.md b/mod/onboardingapi/README.md new file mode 100644 index 0000000..c96e5e2 --- /dev/null +++ b/mod/onboardingapi/README.md @@ -0,0 +1,29 @@ +# dcae-cli + +The `dcae-cli` is a Python command-line tool used to manage and to test components and their data formats in onboarding. + +## Documentation + +Please review the [DCAE platform documentation](http://onap.readthedocs.io/en/latest/submodules/dcaegen2.git/docs/index.html) which has a detailed [`dcae-cli` walkthrough](http://onap.readthedocs.io/en/latest/submodules/dcaegen2.git/docs/sections/components/dcae-cli/walkthrough.html). + +## Usage + +You will be prompted to initialize the `dcae-cli` the first time you run the tool. You also have the option to [re-initializing using the `--reinit` flag](http://onap.readthedocs.io/en/latest/submodules/dcaegen2.git/docs/sections/components/dcae-cli/quickstart.html#reinit). + +You will be prompted to provide a remote server url. The remote server is expected to host several required artifacts that the `dcae-cli` requires like the json schemas to do validation. Use the following to use the bleeding edge: + +``` +Please enter the remote server url: https://git.onap.org/dcaegen2/platform/cli/plain +``` + +You will also be prompted for details on the postgres database to connect with. Follow the instructions below to run a local instance and provide the connection details in the initialization. + +### Local use + +The dcae-cli requires access to an onboarding catalog which is a postgres database. If there is no shared instance for your team or organization, then a workaround is to run a local instance of postgres on your machine. One quick way is to run a postgres Docker container: + +``` +docker run -e POSTGRES_PASSWORD=<your password> -e PGDATA=/var/lib/postgresql/data/pgdata -v <local directory>:/var/lib/postgresql/data/pgdata -p 5432:5432 -d postgres:9.5.2 +``` + +Use your favorite sql client to log into this local instance and create a database named `dcae_onboarding_db`. diff --git a/mod/onboardingapi/dcae_cli/__init__.py b/mod/onboardingapi/dcae_cli/__init__.py new file mode 100644 index 0000000..dc24da7 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/__init__.py @@ -0,0 +1,22 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +from ._version import __version__ diff --git a/mod/onboardingapi/dcae_cli/_version.py b/mod/onboardingapi/dcae_cli/_version.py new file mode 100644 index 0000000..e1ceca3 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/_version.py @@ -0,0 +1,22 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +__version__ = "2.12.0" diff --git a/mod/onboardingapi/dcae_cli/catalog/__init__.py b/mod/onboardingapi/dcae_cli/catalog/__init__.py new file mode 100644 index 0000000..d9b09f5 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/catalog/__init__.py @@ -0,0 +1,36 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Provides catalog utilities. +''' +from dcae_cli.catalog.mock.catalog import MockCatalog + +from dcae_cli.util.exc import DcaeException + + +def get_catalog(**config): + '''Returns a catalog object''' + ctype = config.get('ctype', 'local') + if ctype == 'local': + return MockCatalog() + else: + raise DcaeException("Unsupported catalog type: {:}".format(ctype)) diff --git a/mod/onboardingapi/dcae_cli/catalog/exc.py b/mod/onboardingapi/dcae_cli/catalog/exc.py new file mode 100644 index 0000000..5d65a41 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/catalog/exc.py @@ -0,0 +1,45 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Provides catalog classes +''' +from dcae_cli.util.exc import DcaeException + + +class CatalogError(DcaeException): + '''Base Catalog exception''' + + +class DuplicateEntry(CatalogError): + pass + + +class MissingEntry(CatalogError): + pass + + +class FrozenEntry(CatalogError): + pass + + +class ForbiddenRequest(CatalogError): + pass
\ No newline at end of file diff --git a/mod/onboardingapi/dcae_cli/catalog/mock/__init__.py b/mod/onboardingapi/dcae_cli/catalog/mock/__init__.py new file mode 100644 index 0000000..ceddbb9 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/catalog/mock/__init__.py @@ -0,0 +1,21 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- diff --git a/mod/onboardingapi/dcae_cli/catalog/mock/catalog.py b/mod/onboardingapi/dcae_cli/catalog/mock/catalog.py new file mode 100644 index 0000000..dcebdca --- /dev/null +++ b/mod/onboardingapi/dcae_cli/catalog/mock/catalog.py @@ -0,0 +1,834 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides the mock catalog +""" +import os +import json +import contextlib +import itertools +from functools import partial +from datetime import datetime + +import six + +from sqlalchemy import create_engine as create_engine_, event, and_, or_ +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy_utils import database_exists, create_database, drop_database + +from dcae_cli import _version as cli_version +from dcae_cli.catalog.mock.tables import Component, Format, FormatPair, Base +from dcae_cli.catalog.mock.schema import validate_component, validate_format, apply_defaults_docker_config +from dcae_cli.util import reraise_with_msg, get_app_dir +from dcae_cli.util.config import get_config, get_path_component_spec, \ + get_path_data_format +from dcae_cli.util.logger import get_logger +from dcae_cli.util.docker_util import image_exists +from dcae_cli.catalog.exc import CatalogError, DuplicateEntry, MissingEntry, FrozenEntry, ForbiddenRequest +from dcae_cli.util.cdap_util import normalize_cdap_params + + +logger = get_logger('Catalog') + + +#INTERNAL HELPERS +def _get_component(session, name, version): + '''Returns a single component ORM''' + try: + if not version: + query = session.query(Component).filter(Component.name==name).order_by(Component.version.desc()).limit(1) + else: + query = session.query(Component).filter(Component.name==name, Component.version==version) + return query.one() + except NoResultFound: + comp_msg = "{}:{}".format(name, version) if version else name + raise MissingEntry("Component '{}' was not found in the catalog".format(comp_msg)) + +def _get_component_by_id(session, component_id): + try: + result = session.query(Component).filter(Component.id==component_id).one() + return result.__dict__ + except NoResultFound: + raise MissingEntry("Component '{0}' was not found in the catalog" \ + .format(component_id)) + + +def _get_docker_image_from_spec(spec): + images = [ art["uri"] for art in spec["artifacts"] if art["type"] == "docker image" ] + return images[0] + +def _get_docker_image(session, name, version): + '''Returns the docker image name of a given component''' + comp = _get_component(session, name, version) + return _get_docker_image_from_spec(comp.get_spec_as_dict()) + +def _add_docker_component(session, user, spec, update, enforce_image=True): + '''Adds/updates a docker component to the catalog''' + image = _get_docker_image_from_spec(spec) + + if enforce_image and not image_exists(image): + raise CatalogError("Specified image '{}' does not exist locally.".format(image)) + + comp = build_generic_component(session, user, spec, update) + session.commit() + +def _get_cdap_jar_from_spec(spec): + jars = [ art["uri"] for art in spec["artifacts"] if art["type"] == "jar" ] + return jars[0] + +def _add_cdap_component(session, user, spec, update): + '''Adds/updates a cdap component to the catalog''' + comp = build_generic_component(session, user, spec, update) + session.commit() + + +#PUBLIC FUNCTIONS +@contextlib.contextmanager +def SessionTransaction(engine): + '''Provides a transactional scope around a series of operations''' + Session = sessionmaker(engine) + try: + session = Session() + yield session + session.commit() + except IntegrityError as e: + session.rollback() + _raise_if_duplicate(str(engine.url), e) + raise + except Exception: + session.rollback() + raise + finally: + session.close() + + +_dup_e = DuplicateEntry('name:version already exists. To update: [In CLI: Use the --update flag] [For HTTP request: Use PUT]') + +def _raise_if_duplicate(url, e): + '''Raises if the exception relates to duplicate entries''' + if 'sqlite' in url: + if 'UNIQUE' in e.orig.args[0].upper(): + raise _dup_e + elif 'postgres' in url: + # e.orig is of type psycopg2.IntegrityError that has + # pgcode which uses the following: + # + # https://www.postgresql.org/docs/current/static/errcodes-appendix.html#ERRCODES-TABLE + # + # 23505 means "unique_violation" + if e.orig.pgcode == "23505": + raise _dup_e + +def create_engine(base, db_name=None, purge_existing=False, db_url=None): + '''Returns an initialized database engine''' + if db_url is None: + if db_name is None: + # no url or db name indicates we want to use the tool's configured db + config = get_config() + url = config['db_url'] + else: + # if only a db name is given, interpret as a sqlite db in the app dir. this maintains backwards compat with existing tests. + db_path = os.path.join(get_app_dir(), db_name) + url = ''.join(('sqlite:///', db_path)) + else: + # a full db url is the most explicit input and should be used over other inputs if provided + url = db_url + + if not database_exists(url): + create_database(url) + elif purge_existing: + drop_database(url) + create_database(url) + + engine = create_engine_(url) + _configure_engine(engine) + base.metadata.create_all(engine) + return engine + + +def _configure_engine(engine): + '''Performs additional db-specific configurations''' + str_url = str(engine.url) + if 'sqlite' in str_url: + event.listen(engine, 'connect', lambda conn, record: conn.execute('pragma foreign_keys=ON')) + + +def get_format(session, name, version): + '''Returns a single data format ORM''' + try: + if not version: + query = session.query(Format).filter(Format.name==name).order_by(Format.version.desc()).limit(1) + else: + query = session.query(Format).filter(Format.name==name, Format.version==version) + return query.one() + except NoResultFound: + msg = "{}:{}".format(name, version) if version else name + raise MissingEntry("Data format '{}' was not found in the catalog.".format(msg)) + + +def _get_format_by_id(session, dataformat_id): + try: + result = session.query(Format).filter(Format.id==dataformat_id).one() + return result.__dict__ + except NoResultFound: + raise MissingEntry("Dataformat '{0}' was not found in the catalog" \ + .format(dataformat_id)) + + +def _create_format_tuple(entry): + '''Create tuple to identify format''' + return (entry['format'], entry['version']) + + +def _get_format_pair(session, req_name, req_version, resp_name, resp_version, create=True): + '''Returns a single data format pair ORM''' + req = get_format(session, req_name, req_version) + resp = get_format(session, resp_name, resp_version) + + query = session.query(FormatPair).filter(and_(FormatPair.req == req, FormatPair.resp == resp)) + try: + return query.one() + except NoResultFound: + if not create: + raise MissingEntry("Data format pair with request '{}:{}' and response '{}:{}' was not found in the catalog.".format(req.name, req.version, resp.name, resp.version)) + + pair = FormatPair(req=req, resp=resp) + session.add(pair) + return pair + +def _create_format_pair_tuple(entry): + '''Create tuple to identify format pair''' + req_name, req_version = entry['request']['format'], entry['request']['version'] + resp_name, resp_version = entry['response']['format'], entry['response']['version'] + return (req_name, req_version, resp_name, resp_version) + +def _get_unique_format_things(create_tuple, get_func, entries): + '''Get unique format things (formats, format pairs, ..) + + Args + ---- + create_tuple: Function that has the signature dict->tuple + get_func: Function that has the signature *tuple->orm + entries: list of dicts that have data format details that come from + streams.publishes, streams.subscribes, services.calls, services.provides + + Return + ------ + List of unique orms + ''' + src = set(create_tuple(entry) for entry in entries) + return [get_func(*yo) for yo in src] + + +def verify_component(session, name, version): + '''Returns the orm name and version of a given component''' + orm = _get_component(session, name, version) + return orm.name, orm.version + + +def get_component_type(session, name, version): + '''Returns the component_type of a given component''' + return _get_component(session, name, version).component_type + + +def get_component_spec(session, name, version): + '''Returns the spec dict of a given component''' + return json.loads(_get_component(session, name, version).spec) + + +def get_component_id(session, name, version): + '''Returns the id of a given component''' + return _get_component(session, name, version).id + + +def get_format_spec(session, name, version): + '''Returns the spec dict of a given data format''' + return json.loads(get_format(session, name, version).spec) + + +def get_dataformat_id(session, name, version): + '''Returns the id of a given data format''' + return get_format(session, name, version).id + + +def build_generic_component(session, user, spec, update): + '''Builds, adds, and returns a generic component ORM. Does not commit changes.''' + attrs = spec['self'].copy() + attrs['spec'] = json.dumps(spec) + + # TODO: This should really come from the spec too + attrs['owner'] = user + + # grab existing or create a new component + name, version = attrs['name'], attrs['version'] + if update: + comp = _get_component(session, name, version) + if comp.is_published(): + raise FrozenEntry("Component '{}:{}' has been published and cannot be updated".format(name, version)) + else: + comp = Component() + session.add(comp) + + # REVIEW: Inject these parameters as function arguments instead of this + # hidden approach? + # WATCH: This has to be done here before the code below because there is a + # commit somewhere below and since these fields are not nullable, you'll get a + # violation. + comp.cli_version = cli_version.__version__ + comp.schema_path = get_path_component_spec() + + # build the ORM + for attr, val in six.iteritems(attrs): + setattr(comp, attr, val) + + # update relationships + get_format_local = partial(get_format, session) + get_unique_formats = partial(_get_unique_format_things, _create_format_tuple, + get_format_local) + + try: + comp.publishes = get_unique_formats(spec['streams']['publishes']) + except MissingEntry as e: + reraise_with_msg(e, 'Add failed while traversing "publishes"') + + try: + comp.subscribes = get_unique_formats(spec['streams']['subscribes']) + except MissingEntry as e: + reraise_with_msg(e, 'Add failed while traversing "subscribes"') + + get_format_pairs = partial(_get_format_pair, session) + get_unique_format_pairs = partial(_get_unique_format_things, + _create_format_pair_tuple, get_format_pairs) + + try: + comp.provides = get_unique_format_pairs(spec['services']['provides']) + except MissingEntry as e: + reraise_with_msg(e, 'Add failed while traversing "provides"') + + try: + comp.calls = get_unique_format_pairs(spec['services']['calls']) + except MissingEntry as e: + reraise_with_msg(e, 'Add failed while traversing "calls"') + + return comp + + +def add_format(session, spec, user, update): + '''Helper function which adds a data format to the catalog''' + attrs = spec['self'].copy() + attrs['spec'] = json.dumps(spec) + name, version = attrs['name'], attrs['version'] + + # TODO: This should really come from the spec too + attrs['owner'] = user + + if update: + data_format = get_format(session, name, version) + if data_format.is_published(): + raise FrozenEntry("Data format {}:{} has been published and cannot be updated".format(name, version)) + else: + data_format = Format() + session.add(data_format) + + # build the ORM + for attr, val in six.iteritems(attrs): + setattr(data_format, attr, val) + + # REVIEW: Inject these parameters as function arguments instead of this + # hidden approach? + data_format.cli_version = cli_version.__version__ + data_format.schema_path = get_path_data_format() + + session.commit() + + +def _filter_neighbors(session, neighbors=None): + '''Returns a Component query filtered by available neighbors''' + if neighbors is None: + query = session.query(Component) + else: + subfilt = or_(and_(Component.name==n, Component.version==v) for n,v in neighbors) + query = session.query(Component).filter(subfilt) + return query + + +def get_subscribers(session, orm, neighbors=None): + '''Returns a list of component ORMs which subscribe to the specified format''' + query = _filter_neighbors(session, neighbors) + return query.filter(Component.subscribes.contains(orm)).all() + + +def get_providers(session, orm, neighbors=None): + '''Returns a list of component ORMs which provide the specified format pair''' + query = _filter_neighbors(session, neighbors) + return query.filter(Component.provides.contains(orm)).all() + + +def _match_pub(entries, orms): + '''Aligns the publishes orms with spec entries to get the config key''' + lookup = {(orm.name, orm.version): orm for orm in orms} + for entry in entries: + if "http" not in entry["type"]: + continue + + key = (entry['format'], entry['version']) + yield entry['config_key'], lookup[key] + + +def _match_call(entries, orms): + '''Aligns the calls orms with spec entries to get the config key''' + lookup = {(orm.req.name, orm.req.version, orm.resp.name, orm.resp.version): orm for orm in orms} + for entry in entries: + key = (entry['request']['format'], entry['request']['version'], entry['response']['format'], entry['response']['version']) + yield entry['config_key'], lookup[key] + +def get_discovery(get_params_func, session, name, version, neighbors=None): + '''Returns the parameters and interface map for a given component and considering its neighbors''' + comp = _get_component(session, name, version) + spec = json.loads(comp.spec) + interfaces = dict() + for key, orm in _match_pub(spec['streams']['publishes'], comp.publishes): + interfaces[key] = [(c.name, c.version) for c in get_subscribers(session, orm, neighbors) if not c is comp] + + for key, orm in _match_call(spec['services']['calls'], comp.calls): + interfaces[key] = [(c.name, c.version) for c in get_providers(session, orm, neighbors) if not c is comp] + + params = get_params_func(spec) + return params, interfaces + +_get_discovery_for_cdap = partial(get_discovery, normalize_cdap_params) +_get_discovery_for_docker = partial(get_discovery, + lambda spec: {param['name']: param['value'] for param in spec['parameters']}) + + +def _get_discovery_for_dmaap(get_component_spec_func, name, version): + """Get all config keys that are for dmaap streams + + Returns: + -------- + Tuple of message router config keys list, data router config keys list + """ + spec = get_component_spec_func(name, version) + + all_streams = spec["streams"].get("publishes", []) \ + + spec["streams"].get("subscribes", []) + + def is_for_message_router(stream): + return stream["type"] == "message router" \ + or stream["type"] == "message_router" + + mr_keys = [ stream["config_key"] for stream in filter(is_for_message_router, all_streams) ] + + def is_for_data_router(stream): + return stream["type"] == "data router" \ + or stream["type"] == "data_router" + + dr_keys = [ stream["config_key"] for stream in filter(is_for_data_router, all_streams) ] + return mr_keys, dr_keys + + +def _filter_latest(orms): + '''Filters and yields only (name, version, *) orm tuples with the highest version''' + get_first_key_func = lambda x: x[0] + # itertools.groupby requires the input to be sorted + sorted_orms = sorted(orms, key=get_first_key_func) + for _, g in itertools.groupby(sorted_orms, get_first_key_func): + yield max(g, key=lambda x: x[1]) + + +def list_components(session, user, only_published, subscribes=None, publishes=None, + provides=None, calls=None, latest=True): + """Get list of components + + Returns: + -------- + List of component orms as dicts + """ + filters = list() + if subscribes: + filters.extend(Component.subscribes.contains(get_format(session, n, v)) for n, v in subscribes) + if publishes: + filters.extend(Component.publishes.contains(get_format(session, n, v)) for n, v in publishes) + if provides: + filters.extend(Component.provides.contains(_get_format_pair(session, reqn, reqv, respn, respv, create=False)) + for (reqn, reqv), (respn, respv) in provides) + if calls: + filters.extend(Component.calls.contains(_get_format_pair(session, reqn, reqv, respn, respv, create=False)) + for (reqn, reqv), (respn, respv) in calls) + if filters: + query = session.query(Component).filter(or_(*filters)) + else: + query = session.query(Component) + + if user: + query = query.filter(Component.owner==user) + if only_published: + query = query.filter(Component.when_published!=None) + + orms = ((orm.name, orm.version, orm.component_type, orm) for orm in query) + + if latest: + orms = _filter_latest(orms) + + return [ orm.__dict__ for _, _, _, orm in orms ] + + +def _list_formats(session, user, only_published, latest=True): + """Get list of data formats + + Returns + ------- + List of data format orms as dicts + """ + query = session.query(Format).order_by(Format.modified.desc()) + + if user: + query = query.filter(Format.owner==user) + if only_published: + query = query.filter(Format.when_published!=None) + + orms = [ (orm.name, orm.version, orm) for orm in query ] + + if latest: + orms = _filter_latest(orms) + return [ orm.__dict__ for _, _, orm in orms ] + + +def build_config_keys_map(spec): + """Build config keys map + + Return + ------ + Dict where each item: + + <config_key>: { "group": <grouping>, "type": <http|message_router|data_router> } + + where grouping includes "streams_publishes", "streams_subscribes", "services_calls" + """ + # subscribing as http doesn't have config key + ss = [ (s["config_key"], { "group": "streams_subscribes", "type": s["type"] }) + for s in spec["streams"]["subscribes"] if "config_key" in s] + sp = [ (s["config_key"], { "group": "streams_publishes", "type": s["type"] }) + for s in spec["streams"]["publishes"] ] + sc = [ (s["config_key"], { "group": "services_calls" }) + for s in spec["services"]["calls"] ] + return dict(ss+sp+sc) + + +def get_data_router_subscriber_route(spec, config_key): + """Get route by config key for data router subscriber + + Utility method that parses the component spec + """ + for s in spec["streams"].get("subscribes", []): + if s["type"] in ["data_router", "data router"] \ + and s["config_key"] == config_key: + return s["route"] + + raise MissingEntry("No data router subscriber for {0}".format(config_key)) + + +class MockCatalog(object): + + def __init__(self, purge_existing=False, enforce_image=False, db_name=None, engine=None, db_url=None): + self.engine = create_engine(Base, db_name=db_name, purge_existing=purge_existing, db_url=db_url) if engine is None else engine + self.enforce_image = enforce_image + + def add_component(self, user, spec, update=False): + '''Validates component specification and adds component to the mock catalog''' + validate_component(spec) + + component_type = spec["self"]["component_type"] + + with SessionTransaction(self.engine) as session: + if component_type == "cdap": + _add_cdap_component(session, user, spec, update) + elif component_type == "docker": + _add_docker_component(session, user, spec, update, enforce_image=self.enforce_image) + else: + raise CatalogError("Unknown component type: {0}".format(component_type)) + + def get_docker_image(self, name, version): + '''Returns the docker image name associated with this component''' + with SessionTransaction(self.engine) as session: + return _get_docker_image(session, name, version) + + def get_docker(self, name, version): + with SessionTransaction(self.engine) as session: + comp = _get_component(session, name, version) + spec = comp.get_spec_as_dict() + # NOTE: Defaults are being applied for docker config here at read + # time. Not completely sure that this is the correct approach. The + # benefit is that defaults can be changed without altering the stored + # specs. It's a nice layering. + docker_config = apply_defaults_docker_config(spec["auxilary"]) + return _get_docker_image_from_spec(spec), docker_config, spec + + def get_docker_config(self, name, version): + _, docker_config, _ = self.get_docker(name, version) + return docker_config + + def get_cdap(self, name, version): + '''Returns a tuple representing this cdap component + + Returns + ------- + tuple(jar, config, spec) + jar: string + URL where the CDAP jar is located. + config: dict + A dictionary loaded from the CDAP JSON configuration file. + spec: dict + The dcae-cli component specification file. + ''' + with SessionTransaction(self.engine) as session: + comp = _get_component(session, name, version) + spec = comp.get_spec_as_dict() + cdap_config = spec["auxilary"] + return _get_cdap_jar_from_spec(spec), cdap_config, spec + + def get_component_type(self, name, version): + '''Returns the component type associated with this component''' + with SessionTransaction(self.engine) as session: + return get_component_type(session, name, version) + + def get_component_spec(self, name, version): + '''Returns the spec dict associated with this component''' + with SessionTransaction(self.engine) as session: + return get_component_spec(session, name, version) + + def get_component_id(self, name, version): + '''Returns the id associated with this component''' + with SessionTransaction(self.engine) as session: + return get_component_id(session, name, version) + + def get_component_by_id(self, component_id): + '''Returns the component associated with this id''' + with SessionTransaction(self.engine) as session: + return _get_component_by_id(session, component_id) + + def get_format_spec(self, name, version): + '''Returns the spec dict associated with this data format''' + with SessionTransaction(self.engine) as session: + return get_format_spec(session, name, version) + + def get_dataformat_id(self, name, version): + '''Returns the id associated with this data format''' + with SessionTransaction(self.engine) as session: + return get_dataformat_id(session, name, version) + + def get_dataformat_by_id(self, dataformat_id): + '''Returns the dataformat associated with this id''' + with SessionTransaction(self.engine) as session: + return _get_format_by_id(session, dataformat_id) + + def add_format(self, spec, user, update=False): + '''Validates data format specification and adds data format to the mock catalog''' + validate_format(spec) + with SessionTransaction(self.engine) as session: + add_format(session, spec, user, update) + + def get_discovery_for_cdap(self, name, version, neighbors=None): + '''Returns the parameters and interface map for a given component and considering its neighbors''' + with SessionTransaction(self.engine) as session: + return _get_discovery_for_cdap(session, name, version, neighbors) + + def get_discovery_for_docker(self, name, version, neighbors=None): + '''Returns the parameters and interface map for a given component and considering its neighbors''' + with SessionTransaction(self.engine) as session: + return _get_discovery_for_docker(session, name, version, neighbors) + + def get_discovery_for_dmaap(self, name, version): + with SessionTransaction(self.engine) as session: + get_component_spec_func = partial(get_component_spec, session) + return _get_discovery_for_dmaap(get_component_spec_func, name, version) + + def get_discovery_from_spec(self, user, target_spec, neighbors=None): + '''Get pieces to generate configuration for the given target spec + + This function is used to obtain the pieces needed to generate + the application configuration json: parameters map, interfaces map, dmaap + map. Where the input is a provided specification that hasn't been added to + the catalog - prospective specs - which includes a component that doesn't + exist or a new version of an existing spec. + + Returns + ------- + Tuple of three elements: + + - Dict of parameter name to parameter value + - Dict of "config_key" to list of (component.name, component.version) + known as "interface_map" + - Tuple of lists of "config_key" the first for message router the second + for data router known as "dmaap_map" + ''' + validate_component(target_spec) + + with SessionTransaction(self.engine) as session: + # The following approach was taken in order to: + # 1. Re-use existing functionality e.g. implement fast + # 2. In order to make ORM-specific queries, I need the entire ORM + # in SQLAlchemy meaning I cannot do arbitrary DataFormatPair queries + # without Component. + name = target_spec["self"]["name"] + version = target_spec["self"]["version"] + + try: + # Build a component with update to True first because you may + # want to run this for an existing component + build_generic_component(session, user, target_spec, True) + except MissingEntry: + # Since it doesn't exist already, build a new component + build_generic_component(session, user, target_spec, False) + + # This is needed so that subsequent queries will "see" the component + session.flush() + + ctype = target_spec["self"]["component_type"] + + if ctype == "cdap": + params, interface_map = _get_discovery_for_cdap(session, name, + version, neighbors) + elif ctype == "docker": + params, interface_map = _get_discovery_for_docker(session, name, + version, neighbors) + + # Don't want to commit these changes so rollback. + session.rollback() + + # Use the target spec as the source to compile the config keys from + dmaap_config_keys = _get_discovery_for_dmaap( + lambda name, version: target_spec, name, version) + + return params, interface_map, dmaap_config_keys + + def verify_component(self, name, version): + '''Returns the component's name and version if it exists and raises an exception otherwise''' + with SessionTransaction(self.engine) as session: + return verify_component(session, name, version) + + def list_components(self, subscribes=None, publishes=None, provides=None, + calls=None, latest=True, user=None, only_published=False): + '''Returns a list of component names which match the specified filter sequences''' + with SessionTransaction(self.engine) as session: + return list_components(session, user, only_published, subscribes, + publishes, provides, calls, latest) + + def list_formats(self, latest=True, user=None, only_published=False): + """Get list of data formats + + Returns + ------- + List of data formats as dicts + """ + with SessionTransaction(self.engine) as session: + return _list_formats(session, user, only_published, latest) + + def get_format(self, name, version): + """Get data format + + Throws MissingEntry exception if no matches found. + + Returns + ------- + Dict representation of data format + """ + with SessionTransaction(self.engine) as session: + return get_format(session, name, version).__dict__ + + def _publish(self, get_func, user, name, version): + """Publish data format + + Args: + ----- + get_func: Function that takes a session, name, version and outputs a data + object either Component or Format + + Returns: + -------- + True upon success else False + """ + # TODO: To make function composeable, it should take in the data format + # object + with SessionTransaction(self.engine) as session: + obj = get_func(session, name, version) + + if obj: + if obj.owner != user: + errorMsg = "Not authorized to modify {0}:{1}".format(name, version) + logger.error(errorMsg) + raise ForbiddenRequest(errorMsg) + elif obj.when_published: + errorMsg = "{0}:{1} has already been published".format(name, version) + logger.error(errorMsg) + raise CatalogError(errorMsg) + else: + obj.when_published = datetime.utcnow() + session.commit() + else: + errorMsg = "{0}:{1} not found".format(name, version) + logger.error(errorMsg) + raise MissingEntry(errorMsg) + + return True + + def publish_format(self, user, name, version): + """Publish data format + + Returns + ------- + True upon success else False + """ + return self._publish(get_format, user, name, version) + + def get_unpublished_formats(self, comp_name, comp_version): + """Get unpublished formats for given component + + Returns: + -------- + List of unique data format name, version pairs + """ + with SessionTransaction(self.engine) as session: + comp = _get_component(session, comp_name, comp_version) + + dfs = comp.publishes + comp.subscribes + dfs += [ p.req for p in comp.provides] + dfs += [ p.resp for p in comp.provides] + dfs += [ c.req for c in comp.calls] + dfs += [ c.resp for c in comp.calls] + + def is_not_published(orm): + return orm.when_published == None + + formats = [(df.name, df.version) for df in filter(is_not_published, dfs)] + return list(set(formats)) + + def publish_component(self, user, name, version): + """Publish component + + Returns + ------- + True upon success else False + """ + return self._publish(_get_component, user, name, version) diff --git a/mod/onboardingapi/dcae_cli/catalog/mock/schema.py b/mod/onboardingapi/dcae_cli/catalog/mock/schema.py new file mode 100644 index 0000000..640d125 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/catalog/mock/schema.py @@ -0,0 +1,191 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides jsonschema +""" +import json +from functools import partial, reduce + +import six +from jsonschema import validate, ValidationError +import requests + +from dcae_cli.util import reraise_with_msg, fetch_file_from_web +from dcae_cli.util import config as cli_config +from dcae_cli.util.exc import DcaeException +from dcae_cli.util.logger import get_logger + + +log = get_logger('Schema') + +# UPDATE: This message applies to the component spec which has been moved on a +# remote server. +# +# WARNING: The below has a "oneOf" for service provides, that will validate as long as any of them are chosen. +# However, this is wrong because what we really want is something like: +# if component_type == docker +# provides = foo +# elif component_type == cdap +# provides = bar +# The unlikely but problematic case is the cdap developer gets a hold of the docker documentation, uses that, it validates, and blows up at cdap runtime + + +# TODO: The next step here is to decide how to manage the links to the schemas. Either: +# +# a) Manage the links in the dcae-cli tool here and thus need to ask if this +# belongs in the config to point to some remote server or even point to local +# machine. +# UPDATE: This item has been mostly completed where at least the path is configurable now. + +# b) Read the links to the schemas from the spec - self-describing jsons. Is +# this even feasible? + +# c) Both +# + +class FetchSchemaError(RuntimeError): + pass + +def _fetch_schema(schema_path): + try: + server_url = cli_config.get_server_url() + return fetch_file_from_web(server_url, schema_path) + except requests.HTTPError as e: + raise FetchSchemaError("HTTP error from fetching schema", e) + except Exception as e: + raise FetchSchemaError("Unexpected error from fetching schema", e) + + +def _safe_dict(obj): + '''Returns a dict from a dict or json string''' + if isinstance(obj, str): + return json.loads(obj) + else: + return obj + +def _validate(fetch_schema_func, schema_path, spec): + '''Validate the given spec + + Fetch the schema and then validate. Upon a error from fetching or validation, + a DcaeException is raised. + + Parameters + ---------- + fetch_schema_func: function that takes schema_path -> dict representation of schema + throws a FetchSchemaError upon any failure + schema_path: string - path to schema + spec: dict or string representation of JSON of schema instance + + Returns + ------- + Nothing, silence is golden + ''' + try: + schema = fetch_schema_func(schema_path) + validate(_safe_dict(spec), schema) + except ValidationError as e: + reraise_with_msg(e, as_dcae=True) + except FetchSchemaError as e: + reraise_with_msg(e, as_dcae=True) + +_validate_using_nexus = partial(_validate, _fetch_schema) + + +def apply_defaults(properties_definition, properties): + """Utility method to enforce expected defaults + + This method is used to enforce properties that are *expected* to have at least + the default if not set by a user. Expected properties are not required but + have a default set. jsonschema does not provide this. + + Parameters + ---------- + properties_definition: dict of the schema definition of the properties to use + for verifying and applying defaults + properties: dict of the target properties to verify and apply defaults to + + Return + ------ + dict - a new version of properties that has the expected default values + """ + # Recursively process all inner objects. Look for more properties and not match + # on type + for k,v in six.iteritems(properties_definition): + if "properties" in v: + properties[k] = apply_defaults(v["properties"], properties.get(k, {})) + + # Collect defaults + defaults = [ (k, v["default"]) for k, v in properties_definition.items() if "default" in v ] + + def apply_default(accumulator, default): + k, v = default + if k not in accumulator: + # Not doing data type checking and any casting. Assuming that this + # should have been taken care of in validation + accumulator[k] = v + return accumulator + + return reduce(apply_default, defaults, properties) + +def apply_defaults_docker_config(config): + """Apply expected defaults to Docker config + Parameters + ---------- + config: Docker config dict + Return + ------ + Updated Docker config dict + """ + # Apply health check defaults + healthcheck_type = config["healthcheck"]["type"] + component_spec = _fetch_schema(cli_config.get_path_component_spec()) + + if healthcheck_type in ["http", "https"]: + apply_defaults_func = partial(apply_defaults, + component_spec["definitions"]["docker_healthcheck_http"]["properties"]) + elif healthcheck_type in ["script"]: + apply_defaults_func = partial(apply_defaults, + component_spec["definitions"]["docker_healthcheck_script"]["properties"]) + else: + # You should never get here + apply_defaults_func = lambda x: x + + config["healthcheck"] = apply_defaults_func(config["healthcheck"]) + + return config + +def validate_component(spec): + _validate_using_nexus(cli_config.get_path_component_spec(), spec) + + # REVIEW: Could not determine how to do this nicely in json schema. This is + # not ideal. We want json schema to be the "it" for validation. + ctype = component_type = spec["self"]["component_type"] + + if ctype == "cdap": + invalid = [s for s in spec["streams"].get("subscribes", []) \ + if s["type"] in ["data_router", "data router"]] + if invalid: + raise DcaeException("Cdap component as data router subscriber is not supported.") + +def validate_format(spec): + path = cli_config.get_path_data_format() + _validate_using_nexus(path, spec) diff --git a/mod/onboardingapi/dcae_cli/catalog/mock/tables.py b/mod/onboardingapi/dcae_cli/catalog/mock/tables.py new file mode 100644 index 0000000..0e10b79 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/catalog/mock/tables.py @@ -0,0 +1,149 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Provides a local mock catalog +''' +import uuid +import json +from datetime import datetime + +from sqlalchemy import UniqueConstraint, Table, Column, String, DateTime, ForeignKey, Boolean, Enum, Text +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship +from sqlalchemy.schema import PrimaryKeyConstraint + + +datetime_now = datetime.utcnow + +Base = declarative_base() + + +published = Table('published', Base.metadata, + Column('component_id', String, ForeignKey('components.id', ondelete='CASCADE'), nullable=False), + Column('format_id', String, ForeignKey('formats.id', ondelete='CASCADE'), nullable=False), + PrimaryKeyConstraint('component_id', 'format_id') +) + + +subscribed = Table('subscribed', Base.metadata, + Column('component_id', String, ForeignKey('components.id', ondelete='CASCADE'), nullable=False), + Column('format_id', String, ForeignKey('formats.id', ondelete='CASCADE'), nullable=False), + PrimaryKeyConstraint('component_id', 'format_id') +) + + +provided = Table('provided', Base.metadata, + Column('component_id', String, ForeignKey('components.id', ondelete='CASCADE'), nullable=False), + Column('pair_id', String, ForeignKey('format_pairs.id', ondelete='CASCADE'), nullable=False), + PrimaryKeyConstraint('component_id', 'pair_id') +) + + +called = Table('called', Base.metadata, + Column('component_id', String, ForeignKey('components.id', ondelete='CASCADE'), nullable=False), + Column('pair_id', String, ForeignKey('format_pairs.id', ondelete='CASCADE'), nullable=False), + PrimaryKeyConstraint('component_id', 'pair_id') +) + + +def generate_uuid(): + return str(uuid.uuid4()) + + +class Component(Base): + __tablename__ = 'components' + id = Column(String, primary_key=True, default=generate_uuid) + created = Column(DateTime, default=datetime_now, nullable=False) + modified = Column(DateTime, default=datetime_now, onupdate=datetime_now, nullable=False) + owner = Column(String, nullable=False) + # To be used for tracking and debugging + cli_version = Column(String, nullable=False) + schema_path = Column(String, nullable=False) + + name = Column(String(), nullable=False) + component_type = Column(Enum('docker', 'cdap', name='component_types'), nullable=False) + version = Column(String(), nullable=False) + description = Column(Text(), nullable=False) + spec = Column(Text(), nullable=False) + + when_added = Column(DateTime, default=datetime_now, nullable=True) + when_published = Column(DateTime, default=None, nullable=True) + when_revoked = Column(DateTime, default=None, nullable=True) + + publishes = relationship('Format', secondary=published) + subscribes = relationship('Format', secondary=subscribed) + provides = relationship('FormatPair', secondary=provided) + calls = relationship('FormatPair', secondary=called) + + __tableargs__ = (UniqueConstraint(name, version), ) + + def __repr__(self): + return '<{:}>'.format((self.__class__.__name__, self.id, self.name, self.version)) + + def is_published(self): + return self.when_published is not None + + def get_spec_as_dict(self): + return json.loads(self.spec) + + +class Format(Base): + __tablename__ = 'formats' + id = Column(String, primary_key=True, default=generate_uuid) + created = Column(DateTime, default=datetime_now, nullable=False) + modified = Column(DateTime, default=datetime_now, onupdate=datetime_now, nullable=False) + owner = Column(String, nullable=False) + # To be used for tracking and debugging + cli_version = Column(String, nullable=False) + schema_path = Column(String, nullable=False) + + name = Column(String(), nullable=False) + version = Column(String(), nullable=False) + description = Column(Text(), nullable=True) + spec = Column(Text(), nullable=False) + + when_added = Column(DateTime, default=datetime_now, nullable=True) + when_published = Column(DateTime, default=None, nullable=True) + when_revoked = Column(DateTime, default=None, nullable=True) + + __tableargs__ = (UniqueConstraint(name, version), ) + + def __repr__(self): + return '<{:}>'.format((self.__class__.__name__, self.id, self.name, self.version)) + + def is_published(self): + return self.when_published is not None + + +class FormatPair(Base): + __tablename__ = 'format_pairs' + id = Column(String, primary_key=True, default=generate_uuid) + req_id = Column(String, ForeignKey('formats.id', ondelete='CASCADE')) + resp_id = Column(String, ForeignKey('formats.id', ondelete='CASCADE')) + + req = relationship('Format', foreign_keys=req_id, uselist=False) + resp = relationship('Format', foreign_keys=resp_id, uselist=False) + + __table_args__ = (UniqueConstraint(req_id, resp_id), ) + + def __repr__(self): + return '<{:}>'.format((self.__class__.__name__, self.id, self.req, self.resp)) diff --git a/mod/onboardingapi/dcae_cli/catalog/mock/tests/test_mock_catalog.py b/mod/onboardingapi/dcae_cli/catalog/mock/tests/test_mock_catalog.py new file mode 100644 index 0000000..0859c44 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/catalog/mock/tests/test_mock_catalog.py @@ -0,0 +1,786 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Tests the mock catalog +''' +import json +from copy import deepcopy +from functools import partial + +import pytest + +from sqlalchemy.exc import IntegrityError + +from dcae_cli.catalog.mock.catalog import MockCatalog, MissingEntry, DuplicateEntry, _get_unique_format_things +from dcae_cli.catalog.mock import catalog + + +_c1_spec = {'self': {'name': 'std.comp_one', + 'version': '1.0.0', + 'description': 'comp1', + 'component_type': 'docker'}, + 'streams': {'publishes': [{'format': 'std.format_one', + 'version': '1.0.0', + 'config_key': 'pub1', + 'type': 'http'}], + 'subscribes': [{'format': 'std.format_one', + 'version': '1.0.0', + 'route': '/sub1', + 'type': 'http'}]}, + 'services': {'calls': [{'request': {'format': 'std.format_one', + 'version': '1.0.0'}, + 'response': {'format': 'std.format_one', + 'version': '1.0.0'}, + 'config_key': 'call1'}], + 'provides': [{'request': {'format': 'std.format_one', + 'version': '1.0.0'}, + 'response': {'format': 'std.format_one', + 'version': '1.0.0'}, + 'route': '/prov1'}]}, + 'parameters': [{"name": "foo", + "value": 1, + "description": "the foo thing", + "designer_editable": False, + "sourced_at_deployment": False, + "policy_editable": False}, + {"name": "bar", + "value": 2, + "description": "the bar thing", + "designer_editable": False, + "sourced_at_deployment": False, + "policy_editable": False} + ], + 'artifacts': [{ "uri": "foo-image", "type": "docker image" }], + 'auxilary': { + "healthcheck": { + "type": "http", + "endpoint": "/health", + "interval": "15s", + "timeout": "1s" + } + } + } + +_c2_spec = {'self': {'name': 'std.comp_two', + 'version': '1.0.0', + 'description': 'comp2', + 'component_type': 'docker'}, + 'streams': {'publishes': [], + 'subscribes': [{'format': 'std.format_one', + 'version': '1.0.0', + 'route': '/sub1', + 'type': 'http'}]}, + 'services': {'calls': [], + 'provides': [{'request': {'format': 'std.format_one', + 'version': '1.0.0'}, + 'response': {'format': 'std.format_one', + 'version': '1.0.0'}, + 'route': '/prov1'}]}, + 'parameters': [], + 'artifacts': [{ "uri": "bar-image", "type": "docker image" }], + 'auxilary': { + "healthcheck": { + "type": "http", + "endpoint": "/health", + "interval": "15s", + "timeout": "1s" + } + } + } + + +_c2v2_spec = {'self': {'name': 'std.comp_two', + 'version': '2.0.0', + 'description': 'comp2', + 'component_type': 'docker'}, + 'streams': {'publishes': [], + 'subscribes': [{'format': 'std.format_one', + 'version': '1.0.0', + 'route': '/sub1', + 'type': 'http'}]}, + 'services': {'calls': [], + 'provides': [{'request': {'format': 'std.format_one', + 'version': '1.0.0'}, + 'response': {'format': 'std.format_one', + 'version': '1.0.0'}, + 'route': '/prov1'}]}, + 'parameters': [], + 'artifacts': [{ "uri": "baz-image", "type": "docker image" }], + 'auxilary': { + "healthcheck": { + "type": "http", + "endpoint": "/health", + "interval": "15s", + "timeout": "1s" + } + } + } + + +_c3_spec = {'self': {'name': 'std.comp_three', + 'version': '3.0.0', + 'description': 'comp3', + 'component_type': 'docker'}, + 'streams': {'publishes': [], + 'subscribes': [{'format': 'std.format_two', + 'version': '1.5.0', + 'route': '/sub1', + 'type': 'http'}]}, + 'services': {'calls': [], + 'provides': [{'request': {'format': 'std.format_one', + 'version': '1.0.0'}, + 'response': {'format': 'std.format_two', + 'version': '1.5.0'}, + 'route': '/prov1'}]}, + 'parameters': [], + 'artifacts': [{ "uri": "bazinga-image", "type": "docker image" }], + 'auxilary': { + "healthcheck": { + "type": "http", + "endpoint": "/health", + "interval": "15s", + "timeout": "1s" + } + } + } + + +_df1_spec = { + "self": { + "name": "std.format_one", + "version": "1.0.0", + "description": "df1" + }, + "dataformatversion": "1.0.0", + "jsonschema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "raw-text": { + "type": "string" + } + }, + "required": ["raw-text"], + "additionalProperties": False + } + } +_df2_spec = { + "self": { + "name": "std.format_two", + "version": "1.5.0", + "description": "df2" + }, + "dataformatversion": "1.0.0", + "jsonschema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "raw-text": { + "type": "string" + } + }, + "required": ["raw-text"], + "additionalProperties": False + } + } +_df2v2_spec = { + "self": { + "name": "std.format_two", + "version": "2.0.0", + "description": "df2" + }, + "dataformatversion": "1.0.0", + "jsonschema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "raw-text": { + "type": "string" + } + }, + "required": ["raw-text"], + "additionalProperties": False + } + } + +_cdap_spec={ + "self":{ + "name":"std.cdap_comp", + "version":"0.0.0", + "description":"cdap test component", + "component_type":"cdap" + }, + "streams":{ + "publishes":[ + { + "format":"std.format_one", + "version":"1.0.0", + "config_key":"pub1", + "type": "http" + } + ], + "subscribes":[ + { + "format":"std.format_two", + "version":"1.5.0", + "route":"/sub1", + "type": "http" + } + ] + }, + "services":{ + "calls":[ + + ], + "provides":[ + { + "request":{ + "format":"std.format_one", + "version":"1.0.0" + }, + "response":{ + "format":"std.format_two", + "version":"1.5.0" + }, + "service_name":"baphomet", + "service_endpoint":"rises", + "verb":"GET" + } + ] + }, + "parameters": { + "app_config" : [], + "app_preferences" : [], + "program_preferences" : [] + }, + "artifacts": [{"uri": "bahpomet.com", "type": "jar"}], + "auxilary": { + "streamname":"streamname", + "artifact_version":"6.6.6", + "artifact_name": "test_name", + "programs" : [{"program_type" : "flows", "program_id" : "flow_id"}] + } + +} + + +def test_component_basic(mock_cli_config, mock_db_url, catalog=None): + '''Tests basic component usage of MockCatalog''' + if catalog is None: + mc = MockCatalog(db_name='dcae_cli.test.db', purge_existing=True, + enforce_image=False, db_url=mock_db_url) + else: + mc = catalog + + c1_spec = deepcopy(_c1_spec) + df1_spec = deepcopy(_df1_spec) + df2_spec = deepcopy(_df2_spec) + + user = "test_component_basic" + + # success + mc.add_format(df2_spec, user) + + # duplicate + with pytest.raises(DuplicateEntry): + mc.add_format(df2_spec, user) + + # component relies on df1_spec which hasn't been added + with pytest.raises(MissingEntry): + mc.add_component(user, c1_spec) + + # add df1 and comp1 + mc.add_format(df1_spec, user) + mc.add_component(user, c1_spec) + + with pytest.raises(DuplicateEntry): + mc.add_component(user, c1_spec) + + cname, cver = mc.verify_component('std.comp_one', version=None) + assert cver == '1.0.0' + + +def test_format_basic(mock_cli_config, mock_db_url, catalog=None): + '''Tests basic data format usage of MockCatalog''' + if catalog is None: + mc = MockCatalog(db_name='dcae_cli.test.db', purge_existing=True, + db_url=mock_db_url) + else: + mc = catalog + + user = "test_format_basic" + + df1_spec = deepcopy(_df1_spec) + df2_spec = deepcopy(_df2_spec) + + # success + mc.add_format(df1_spec, user) + + # duplicate is bad + with pytest.raises(DuplicateEntry): + mc.add_format(df1_spec, user) + + # allow update of same version + new_descr = 'a new description' + df1_spec['self']['description'] = new_descr + mc.add_format(df1_spec, user, update=True) + + # adding a new version is kosher + new_ver = '2.0.0' + df1_spec['self']['version'] = new_ver + mc.add_format(df1_spec, user) + + # can't update a format that doesn't exist + with pytest.raises(MissingEntry): + mc.add_format(df2_spec, user, update=True) + + # get spec and make sure it's updated + spec = mc.get_format_spec(df1_spec['self']['name'], version=None) + assert spec['self']['version'] == new_ver + assert spec['self']['description'] == new_descr + + +def test_discovery(mock_cli_config, mock_db_url, catalog=None): + '''Tests creation of discovery objects''' + if catalog is None: + mc = MockCatalog(db_name='dcae_cli.test.db', purge_existing=True, + enforce_image=False, db_url=mock_db_url) + else: + mc = catalog + + user = "test_discovery" + + c1_spec = deepcopy(_c1_spec) + df1_spec = deepcopy(_df1_spec) + c2_spec = deepcopy(_c2_spec) + + mc.add_format(df1_spec, user) + mc.add_component(user, c1_spec) + mc.add_component(user, c2_spec) + + params, interfaces = mc.get_discovery_for_docker(c1_spec['self']['name'], c1_spec['self']['version']) + assert params == {'bar': 2, 'foo': 1} + assert interfaces == {'call1': [('std.comp_two', '1.0.0')], 'pub1': [('std.comp_two', '1.0.0')]} + + +def _spec_tuple(dd): + '''Returns a (name, version, component type) tuple from a given component spec dict''' + return dd['self']['name'], dd['self']['version'], dd['self']['component_type'] + + +def _comp_tuple_set(*dds): + '''Runs a set of component spec tuples''' + return set(map(_spec_tuple, dds)) + + +def _format_tuple(dd): + '''Returns a (name, version) tuple from a given data format spec dict''' + return dd['self']['name'], dd['self']['version'] + + +def _format_tuple_set(*dds): + '''Runs a set of data format spec tuples''' + return set(map(_format_tuple, dds)) + + +def test_comp_list(mock_cli_config, mock_db_url, catalog=None): + '''Tests the list functionality of the catalog''' + if catalog is None: + mc = MockCatalog(db_name='dcae_cli.test.db', purge_existing=True, + enforce_image=False, db_url=mock_db_url) + else: + mc = catalog + + user = "test_comp_list" + + df1_spec = deepcopy(_df1_spec) + df2_spec = deepcopy(_df2_spec) + df2v2_spec = deepcopy(_df2v2_spec) + + c1_spec = deepcopy(_c1_spec) + c2_spec = deepcopy(_c2_spec) + c2v2_spec = deepcopy(_c2v2_spec) + c3_spec = deepcopy(_c3_spec) + + mc.add_format(df1_spec, user) + mc.add_format(df2_spec, user) + mc.add_format(df2v2_spec, user) + mc.add_component(user, c1_spec) + mc.add_component(user, c2_spec) + mc.add_component(user, c2v2_spec) + mc.add_component(user, c3_spec) + + mc.add_component(user,_cdap_spec) + + def components_to_specs(components): + return [ json.loads(c["spec"]) for c in components ] + + # latest by default. only v2 of c2 + components = mc.list_components() + specs = components_to_specs(components) + assert _comp_tuple_set(*specs) == _comp_tuple_set(c1_spec, c2v2_spec, c3_spec, _cdap_spec) + + # all components + components = mc.list_components(latest=False) + specs = components_to_specs(components) + assert _comp_tuple_set(*specs) == _comp_tuple_set(c1_spec, c2_spec, c2v2_spec, c3_spec, _cdap_spec) + + components = mc.list_components(subscribes=[('std.format_one', None)]) + specs = components_to_specs(components) + assert _comp_tuple_set(*specs) == _comp_tuple_set(c1_spec, c2v2_spec) + + # no comps subscribe to latest std.format_two + components = mc.list_components(subscribes=[('std.format_two', None)]) + assert not components + + components = mc.list_components(subscribes=[('std.format_two', '1.5.0')]) + specs = components_to_specs(components) + assert _comp_tuple_set(*specs) == _comp_tuple_set(c3_spec, _cdap_spec) + + # raise if format doesn't exist + with pytest.raises(MissingEntry): + mc.list_components(subscribes=[('std.format_two', '5.0.0')]) + + components = mc.list_components(publishes=[('std.format_one', None)]) + specs = components_to_specs(components) + assert _comp_tuple_set(*specs) == _comp_tuple_set(c1_spec, _cdap_spec) + + components = mc.list_components(calls=[(('std.format_one', None), ('std.format_one', None)), ]) + specs = components_to_specs(components) + assert _comp_tuple_set(*specs) == _comp_tuple_set(c1_spec) + + # raise if format doesn't exist + with pytest.raises(MissingEntry): + mc.list_components(calls=[(('std.format_one', '5.0.0'), ('std.format_one', None)), ]) + + components = mc.list_components(provides=[(('std.format_one', '1.0.0'), ('std.format_two', '1.5.0')), ]) + specs = components_to_specs(components) + assert _comp_tuple_set(*specs) == _comp_tuple_set(c3_spec, _cdap_spec) + + # test for listing published components + + name_pub = c1_spec["self"]["name"] + version_pub = c1_spec["self"]["version"] + mc.publish_component(user, name_pub, version_pub) + components = mc.list_components(only_published=True) + specs = components_to_specs(components) + assert _comp_tuple_set(*specs) == _comp_tuple_set(c1_spec) + + components = mc.list_components(only_published=False) + assert len(components) == 4 + + +def test_format_list(mock_cli_config, mock_db_url, catalog=None): + '''Tests the list functionality of the catalog''' + if catalog is None: + mc = MockCatalog(db_name='dcae_cli.test.db', purge_existing=True, + enforce_image=False, db_url=mock_db_url) + else: + mc = catalog + + user = "test_format_list" + + df1_spec = deepcopy(_df1_spec) + df2_spec = deepcopy(_df2_spec) + df2v2_spec = deepcopy(_df2v2_spec) + + mc.add_format(df1_spec, user) + mc.add_format(df2_spec, user) + mc.add_format(df2v2_spec, user) + + def formats_to_specs(components): + return [ json.loads(c["spec"]) for c in components ] + + # latest by default. ensure only v2 of df2 makes it + formats = mc.list_formats() + specs = formats_to_specs(formats) + assert _format_tuple_set(*specs) == _format_tuple_set(df1_spec, df2v2_spec) + + # list all + formats = mc.list_formats(latest=False) + specs = formats_to_specs(formats) + assert _format_tuple_set(*specs) == _format_tuple_set(df1_spec, df2_spec, df2v2_spec) + + # test listing of published formats + + name_pub = df1_spec["self"]["name"] + version_pub = df1_spec["self"]["version"] + + mc.publish_format(user, name_pub, version_pub) + formats = mc.list_formats(only_published=True) + specs = formats_to_specs(formats) + assert _format_tuple_set(*specs) == _format_tuple_set(df1_spec) + + formats = mc.list_formats(only_published=False) + assert len(formats) == 2 + + +def test_component_add_cdap(mock_cli_config, mock_db_url, catalog=None): + '''Adds a mock CDAP application''' + if catalog is None: + mc = MockCatalog(db_name='dcae_cli.test.db', purge_existing=True, + db_url=mock_db_url) + else: + mc = catalog + + user = "test_component_add_cdap" + + df1_spec = deepcopy(_df1_spec) + df2_spec = deepcopy(_df2_spec) + + mc.add_format(df1_spec, user) + mc.add_format(df2_spec, user) + + mc.add_component(user, _cdap_spec) + + name, version, _ = _spec_tuple(_cdap_spec) + jar_out, cdap_config_out, spec_out = mc.get_cdap(name, version) + + assert _cdap_spec["artifacts"][0]["uri"] == jar_out + assert _cdap_spec["auxilary"] == cdap_config_out + assert _cdap_spec == spec_out + + +def test_get_discovery_from_spec(mock_cli_config, mock_db_url): + mc = MockCatalog(db_name='dcae_cli.test.db', purge_existing=True, + enforce_image=False, db_url=mock_db_url) + + user = "test_get_discovery_from_spec" + + c1_spec_updated = deepcopy(_c1_spec) + c1_spec_updated["streams"]["publishes"][0] = { + 'format': 'std.format_one', + 'version': '1.0.0', + 'config_key': 'pub1', + 'type': 'http' + } + c1_spec_updated["streams"]["subscribes"][0] = { + 'format': 'std.format_one', + 'version': '1.0.0', + 'route': '/sub1', + 'type': 'http' + } + + # Case when c1 doesn't exist + + mc.add_format(_df1_spec, user) + mc.add_component(user, _c2_spec) + actual_params, actual_interface_map, actual_dmaap_config_keys \ + = mc.get_discovery_from_spec(user, c1_spec_updated, None) + + assert actual_params == {'bar': 2, 'foo': 1} + assert actual_interface_map == { 'pub1': [('std.comp_two', '1.0.0')], + 'call1': [('std.comp_two', '1.0.0')] } + assert actual_dmaap_config_keys == ([], []) + + # Case when c1 already exist + + mc.add_component(user,_c1_spec) + + c1_spec_updated["services"]["calls"][0]["config_key"] = "callme" + actual_params, actual_interface_map, actual_dmaap_config_keys \ + = mc.get_discovery_from_spec(user, c1_spec_updated, None) + + assert actual_params == {'bar': 2, 'foo': 1} + assert actual_interface_map == { 'pub1': [('std.comp_two', '1.0.0')], + 'callme': [('std.comp_two', '1.0.0')] } + assert actual_dmaap_config_keys == ([], []) + + # Case where add in dmaap streams + # TODO: Add in subscribes test case after spec gets pushed + + c1_spec_updated["streams"]["publishes"][0] = { + 'format': 'std.format_one', + 'version': '1.0.0', + 'config_key': 'pub1', + 'type': 'message router' + } + + actual_params, actual_interface_map, actual_dmaap_config_keys \ + = mc.get_discovery_from_spec(user, c1_spec_updated, None) + + assert actual_params == {'bar': 2, 'foo': 1} + assert actual_interface_map == { 'callme': [('std.comp_two', '1.0.0')] } + assert actual_dmaap_config_keys == (["pub1"], []) + + # Case when cdap spec doesn't exist + + cdap_spec = deepcopy(_cdap_spec) + cdap_spec["streams"]["publishes"][0] = { + 'format': 'std.format_one', + 'version': '1.0.0', + 'config_key': 'pub1', + 'type': 'http' + } + cdap_spec["streams"]["subscribes"][0] = { + 'format': 'std.format_two', + 'version': '1.5.0', + 'route': '/sub1', + 'type': 'http' + } + + mc.add_format(_df2_spec, user) + actual_params, actual_interface_map, actual_dmaap_config_keys \ + = mc.get_discovery_from_spec(user, cdap_spec, None) + + assert actual_params == {'program_preferences': [], 'app_config': {}, 'app_preferences': {}} + assert actual_interface_map == {'pub1': [('std.comp_two', '1.0.0'), ('std.comp_one', '1.0.0')]} + assert actual_dmaap_config_keys == ([], []) + + +def test_get_unpublished_formats(mock_cli_config, mock_db_url, catalog=None): + if catalog is None: + mc = MockCatalog(db_name='dcae_cli.test.db', purge_existing=True, + enforce_image=False, db_url=mock_db_url) + else: + mc = catalog + + user = "test_get_unpublished_formats" + + mc.add_format(_df1_spec, user) + mc.add_component(user, _c1_spec) + + # detect unpublished formats + + name_to_pub = _c1_spec["self"]["name"] + version_to_pub = _c1_spec["self"]["version"] + formats = mc.get_unpublished_formats(name_to_pub, version_to_pub) + assert [('std.format_one', '1.0.0')] == formats + + # all formats published + + mc.publish_format(user, _df1_spec["self"]["name"], _df1_spec["self"]["version"]) + formats = mc.get_unpublished_formats(name_to_pub, version_to_pub) + assert len(formats) == 0 + + +def test_get_unique_format_things(): + def create_tuple(entry): + return (entry["name"], entry["version"]) + + def get_orm(name, version): + return ("ORM", name, version) + + entries = [{"name": "abc", "version": 123}, + {"name": "abc", "version": 123}, + {"name": "abc", "version": 123}, + {"name": "def", "version": 456}, + {"name": "def", "version": 456}] + + get_unique_fake_format = partial(_get_unique_format_things, create_tuple, + get_orm) + expected = [("ORM", "abc", 123), ("ORM", "def", 456)] + + assert sorted(expected) == sorted(get_unique_fake_format(entries)) + + +def test_filter_latest(): + orms = [('std.empty.get', '1.0.0'), ('std.unknown', '1.0.0'), + ('std.unknown', '1.0.1'), ('std.empty.get', '1.0.1')] + + assert list(catalog._filter_latest(orms)) == [('std.empty.get', '1.0.1'), \ + ('std.unknown', '1.0.1')] + + +def test_raise_if_duplicate(): + class FakeOrig(object): + args = ["unique", "duplicate"] + + url = "sqlite" + orig = FakeOrig() + error = IntegrityError("Error about uniqueness", None, orig) + + with pytest.raises(catalog.DuplicateEntry): + catalog._raise_if_duplicate(url, error) + + # Couldn't find psycopg2.IntegrityError constructor nor way + # to set pgcode so decided to mock it. + class FakeOrigPostgres(object): + pgcode = "23505" + + url = "postgres" + orig = FakeOrigPostgres() + error = IntegrityError("Error about uniqueness", None, orig) + + with pytest.raises(catalog.DuplicateEntry): + catalog._raise_if_duplicate(url, error) + + +def test_get_docker_image_from_spec(): + assert "foo-image" == catalog._get_docker_image_from_spec(_c1_spec) + +def test_get_cdap_jar_from_spec(): + assert "bahpomet.com" == catalog._get_cdap_jar_from_spec(_cdap_spec) + + +def test_build_config_keys_map(): + stub_spec = { + 'streams': { + 'publishes': [ + {'format': 'std.format_one', 'version': '1.0.0', + 'config_key': 'pub1', 'type': 'http'}, + {'format': 'std.format_one', 'version': '1.0.0', + 'config_key': 'pub2', 'type': 'message_router'} + ], + 'subscribes': [ + {'format': 'std.format_one', 'version': '1.0.0', 'route': '/sub1', + 'type': 'http'}, + {'format': 'std.format_one', 'version': '1.0.0', + 'config_key': 'sub2', 'type': 'message_router'} + ] + }, + 'services': { + 'calls': [ + {'request': {'format': 'std.format_one', 'version': '1.0.0'}, + 'response': {'format': 'std.format_one', 'version': '1.0.0'}, + 'config_key': 'call1'} + ], + 'provides': [ + {'request': {'format': 'std.format_one', 'version': '1.0.0'}, + 'response': {'format': 'std.format_one', 'version': '1.0.0'}, + 'route': '/prov1'} + ] + } + } + + grouping = catalog.build_config_keys_map(stub_spec) + expected = {'call1': {'group': 'services_calls'}, 'pub1': {'type': 'http', 'group': 'streams_publishes'}, 'sub2': {'type': 'message_router', 'group': 'streams_subscribes'}, 'pub2': {'type': 'message_router', 'group': 'streams_publishes'}} + assert expected == grouping + + +def test_get_data_router_subscriber_route(): + spec = {"streams": {"subscribes": [ { "type": "data_router", "config_key": + "alpha", "route": "/alpha" }, { "type": "message_router", "config_key": + "beta" } ]}} + + assert "/alpha" == catalog.get_data_router_subscriber_route(spec, "alpha") + + with pytest.raises(catalog.MissingEntry): + catalog.get_data_router_subscriber_route(spec, "beta") + + with pytest.raises(catalog.MissingEntry): + catalog.get_data_router_subscriber_route(spec, "gamma") + + +if __name__ == '__main__': + '''Test area''' + pytest.main([__file__, ]) diff --git a/mod/onboardingapi/dcae_cli/catalog/mock/tests/test_schema.py b/mod/onboardingapi/dcae_cli/catalog/mock/tests/test_schema.py new file mode 100644 index 0000000..90674d9 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/catalog/mock/tests/test_schema.py @@ -0,0 +1,421 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Tests the mock catalog +''' +import pytest +import json, copy + +from dcae_cli.catalog.mock.schema import validate_component, validate_format,apply_defaults_docker_config, apply_defaults +from dcae_cli.catalog.mock import schema +from dcae_cli.util.exc import DcaeException + + +format_test = r''' +{ + "self": { + "name": "asimov.format.integerClassification", + "version": "1.0.0", + "description": "Represents a single classification from a machine learning model - just a test version" + }, + "dataformatversion": "1.0.0", + "jsonschema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "classification": { + "type": "string" + } + }, + "additionalProperties": false + } +} +''' + + +component_test = r''' +{ + "self": { + "version": "1.0.0", + "name": "asimov.component.kpi_anomaly", + "description": "Classifies VNF KPI data as anomalous", + "component_type": "docker" + }, + "streams": { + "subscribes": [ + { + "format": "dcae.vnf.kpi", + "version": "1.0.0", + "route": "/data", + "type": "http" + }, + { + "format":"std.format_one", + "version":"1.0.0", + "config_key":"sub2", + "type": "message router" + } + ], + "publishes": [ + { + "format": "asimov.format.integerClassification", + "version": "1.0.0", + "config_key": "prediction", + "type": "http" + }, + { + "format":"std.format_one", + "version":"1.0.0", + "config_key":"pub2", + "type": "message router" + } + ] + }, + "services": { + "calls": [], + "provides": [ + { + "route": "/score-vnf", + "request": { + "format": "dcae.vnf.kpi", + "version": "1.0.0" + }, + "response": { + "format": "asimov.format.integerClassification", + "version": "1.0.0" + } + } + ] + }, + "parameters": [ + { + "name": "threshold", + "value": 0.75, + "description": "Probability threshold to exceed to be anomalous", + "designer_editable": false, + "sourced_at_deployment": false, + "policy_editable": false + } + ], + "artifacts": [ + { + "uri": "somedockercontainerpath", + "type": "docker image" + } + ], + "auxilary": { + "healthcheck": { + "type": "http", + "endpoint": "/health" + } + } +} +''' + +cdap_component_test = r''' +{ + "self":{ + "name":"std.cdap_comp", + "version":"0.0.0", + "description":"cdap test component", + "component_type":"cdap" + }, + "streams":{ + "publishes":[ + { + "format":"std.format_one", + "version":"1.0.0", + "config_key":"pub1", + "type": "http" + }, + { + "format":"std.format_one", + "version":"1.0.0", + "config_key":"pub2", + "type": "message router" + } + ], + "subscribes":[ + { + "format":"std.format_two", + "version":"1.5.0", + "route":"/sub1", + "type": "http" + }, + { + "format":"std.format_one", + "version":"1.0.0", + "config_key":"sub2", + "type": "message router" + } + ] + }, + "services":{ + "calls":[ + + ], + "provides":[ + { + "request":{ + "format":"std.format_one", + "version":"1.0.0" + }, + "response":{ + "format":"std.format_two", + "version":"1.5.0" + }, + "service_name":"baphomet", + "service_endpoint":"rises", + "verb":"GET" + } + ] + }, + "parameters":[ + + ], + "artifacts": [ + { + "uri": "somecdapjarurl", + "type": "jar" + } + ], + "auxilary": { + "streamname":"who", + "artifact_name" : "HelloWorld", + "artifact_version" : "3.4.3", + "programs" : [ + {"program_type" : "flows", "program_id" : "WhoFlow"}, + {"program_type" : "services", "program_id" : "Greeting"} + ], + "namespace" : "hw" + } +} +''' + + +def test_basic(mock_cli_config): + validate_component(json.loads(component_test)) + validate_format(json.loads(format_test)) + validate_component(json.loads(cdap_component_test)) + + # Test with DR publishes for cdap + dr_publishes = { "format":"std.format_one", "version":"1.0.0", + "config_key":"pub3", "type": "data router" } + cdap_valid = json.loads(cdap_component_test) + cdap_valid["streams"]["publishes"].append(dr_publishes) + + # Test with DR subscribes for cdap + cdap_invalid = json.loads(cdap_component_test) + ss = cdap_invalid["streams"]["subscribes"][0] + ss["type"] = "data_router" + ss["config_key"] = "nada" + cdap_invalid["streams"]["subscribes"][0] = ss + + with pytest.raises(DcaeException): + validate_component(cdap_invalid) + + + +def test_validate_docker_config(mock_cli_config): + + def compose_spec(config): + spec = json.loads(component_test) + spec["auxilary"] = config + return spec + + good_docker_configs = [ + { + "healthcheck": { + "type": "http", + "endpoint": "/health", + "interval": "15s", + "timeout": "1s" + } + }, + { + "healthcheck": { + "type": "script", + "script": "curl something" + } + }] + + for good_config in good_docker_configs: + spec = compose_spec(good_config) + assert validate_component(spec) == None + + bad_docker_configs = [ + #{}, + { + "healthcheck": {} + }, + { + "healthcheck": { + "type": "http" + } + }, + { + "healthcheck": { + "type": "http", + "script": "huh" + } + }] + + for bad_config in bad_docker_configs: + with pytest.raises(DcaeException): + spec = compose_spec(bad_config) + validate_component(spec) + + +def test_validate_cdap_config(mock_cli_config): + + def compose_spec(config): + spec = json.loads(cdap_component_test) + spec["auxilary"] = config + return spec + + good_cdap_configs = [ + { + "streamname":"streamname", + "artifact_version":"6.6.6", + "artifact_name" : "testname", + "programs" : [], + }, + { + "streamname":"streamname", + "artifact_version":"6.6.6", + "artifact_name" : "testname", + "programs" : [{"program_type" : "flows", "program_id" : "flow_id"}], + "program_preferences" : [{"program_type" : "flows", "program_id" : "flow_id", "program_pref" : {"he" : "shall rise"}}], + "namespace" : "this should be an optional field", + "app_preferences" : {"he" : "shall rise"} + } + ] + + for good_config in good_cdap_configs: + spec = compose_spec(good_config) + assert validate_component(spec) == None + + bad_cdap_configs = [ + {}, + {"YOU HAVE" : "ALWAYS FAILED ME"} + ] + + for bad_config in bad_cdap_configs: + with pytest.raises(DcaeException): + spec = compose_spec(bad_config) + validate_component(bad_config) + + +def test_apply_defaults(): + definition = { "length": { "default": 10 }, "duration": { "default": "10s" } } + + # Test: Add expected properties + properties = {} + actual = apply_defaults(definition, properties) + assert actual == { "length": 10, "duration": "10s" } + + # Test: Don't mess with existing values + properties = { "length": 100, "duration": "100s" } + actual = apply_defaults(definition, properties) + assert actual == properties + + # Test: No defaults to apply + definition = { "length": {}, "duration": {} } + properties = { "width": 100 } + actual = apply_defaults(definition, properties) + assert actual == properties + + # Test: Nested object + definition = { "length": { "default": 10 }, "duration": { "default": "10s" }, + "location": { "properties": { "lat": { "default": "40" }, + "long": { "default": "75" }, "alt": {} } } } + actual = apply_defaults(definition, {}) + assert actual == {'duration': '10s', 'length': 10, + 'location': {'lat': '40', 'long': '75'}} + + +def test_apply_defaults_docker_config(mock_cli_config): + # Test: Adding of missing expected properties for http + dc = { "healthcheck": { "type": "http", "endpoint": "/foo" } } + actual = apply_defaults_docker_config(dc) + + assert "interval" in actual["healthcheck"] + assert "timeout" in actual["healthcheck"] + + # Test: Adding of missing expected properties for script + dc = { "healthcheck": { "type": "script", "script": "/bin/do-something" } } + actual = apply_defaults_docker_config(dc) + + assert "interval" in actual["healthcheck"] + assert "timeout" in actual["healthcheck"] + + # Test: Expected properties already exist + dc = { "healthcheck": { "type": "http", "endpoint": "/foo", + "interval": "10000s", "timeout": "100000s" } } + actual = apply_defaults_docker_config(dc) + assert dc == actual + + # Test: Never should happen + dc = { "healthcheck": { "type": "bogus" } } + actual = apply_defaults_docker_config(dc) + assert dc == actual + + +def test_validate(): + fake_schema = { + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Test schema", + "type": "object", + "properties": { + "foo": { "type": "string" }, + "bar": { "type": "integer" } + }, + "required": ["foo", "bar"] + } + + good_path = "/correct_path" + + def fetch_schema(path): + if path == good_path: + return fake_schema + else: + raise schema.FetchSchemaError("Schema not found") + + # Success case + + good_instance = { "foo": "hello", "bar": 1776 } + + schema._validate(fetch_schema, good_path, good_instance) + + # Error from validating + + bad_instance = {} + + with pytest.raises(DcaeException): + schema._validate(fetch_schema, good_path, bad_instance) + + # Error from fetching + + bad_path = "/wrong_path" + + with pytest.raises(DcaeException): + schema._validate(fetch_schema, bad_path, good_instance) diff --git a/mod/onboardingapi/dcae_cli/cli.py b/mod/onboardingapi/dcae_cli/cli.py new file mode 100644 index 0000000..fc2849b --- /dev/null +++ b/mod/onboardingapi/dcae_cli/cli.py @@ -0,0 +1,115 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides entry-level logic for building the CLI. Commands and heavy-lifting logic should be in their own module. +""" +import click + +from dcae_cli import util +from dcae_cli.commands.catalog import catalog +from dcae_cli.commands.component import component +from dcae_cli.commands.data_format import data_format +from dcae_cli.commands.profiles import profiles +from dcae_cli.catalog import get_catalog +from dcae_cli.util.exc import DcaeException +from dcae_cli.util.logger import get_logger +from dcae_cli.util import config as conf +from dcae_cli.util import profiles as prof + + +log = get_logger('cli') + + +def _reinit_cli(): + """Reinit cli""" + click.echo("Warning! Reinitializing your dcae-cli configuration") + try: + conf.reinit_config() + prof.reinit_profiles() + except Exception as e: + raise DcaeException("Failed to reinitialize configuration: {0}".format(e)) + +def _reinit_callback(ctx, param, value): + """Callback used for the eager --reinit option""" + if not value or ctx.resilient_parsing: + return + _reinit_cli() + click.echo("Reinitialize done") + ctx.exit() + + + +@click.group() +@click.option('--verbose', '-v', is_flag=True, default=False, help='Prints INFO-level logs to screen.') +# This is following the same pattern as --version +# http://click.pocoo.org/5/options/#callbacks-and-eager-options +@click.option('--reinit', is_flag=True, callback=_reinit_callback, expose_value=False, + is_eager=True, help='Re-initialize dcae-cli configuration') +@click.version_option() +@click.pass_context +def cli(ctx, verbose): + + if ctx.obj is None: + ctx.obj = dict() + + if 'config' not in ctx.obj: + config = conf.get_config() + + if conf.should_force_reinit(config): + if click.confirm("You must reinitialize your dcae-cli configuration. Reinitialize now?", + abort=True): + _reinit_cli() + + ctx.obj['config'] = config + else: + config = ctx.obj['config'] + + if 'catalog' not in ctx.obj: + try: + ctx.obj['catalog'] = get_catalog(**config) + except Exception as e: + log.error(e) + raise DcaeException("Having issues connecting to the onboarding catalog") + + if verbose: + util.logger.set_verbose() + + +@cli.command(name="http", help="Run HTTP API") +@click.option('--live', is_flag=True, default=False, help='Starts up the HTTP API in live mode which means it binds to 80') +@click.pass_context +def run_http_api(ctx, live): + catalog = ctx.obj['catalog'] + should_debug = not live + # Importing http module has to be here otherwise unit tests will break + # because http module makes config calls when the module is loaded (global). + # Config calls must always be done lazily as much as possible in order for the + # mock_cli_config pytest.fixture to kick in. + from dcae_cli import http + http.start_http_server(catalog, debug=should_debug) + + + +cli.add_command(catalog) +cli.add_command(component) +cli.add_command(data_format) +cli.add_command(profiles) diff --git a/mod/onboardingapi/dcae_cli/commands/__init__.py b/mod/onboardingapi/dcae_cli/commands/__init__.py new file mode 100644 index 0000000..ceddbb9 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/__init__.py @@ -0,0 +1,21 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- diff --git a/mod/onboardingapi/dcae_cli/commands/catalog/__init__.py b/mod/onboardingapi/dcae_cli/commands/catalog/__init__.py new file mode 100644 index 0000000..93f14ee --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/catalog/__init__.py @@ -0,0 +1,21 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +from .commands import catalog diff --git a/mod/onboardingapi/dcae_cli/commands/catalog/commands.py b/mod/onboardingapi/dcae_cli/commands/catalog/commands.py new file mode 100644 index 0000000..dc6b27a --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/catalog/commands.py @@ -0,0 +1,115 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +""" +Queries onboarding catalog +""" +import click + +from dcae_cli.commands import util + + +@click.group() +def catalog(): + pass + + +@catalog.command(name="list") +@click.option("--expanded", is_flag=True, default=False, help="Display the expanded view - show all versions and all statuses") +#TODO: @click.argument('query') +@click.pass_obj +def action_list(obj, expanded): + """Lists resources in the onboarding catalog""" + # Query both components and data formats. Display both sets. + + user, catalog = obj['config']['user'], obj['catalog'] + + only_latest = not expanded + only_published = not expanded + + # TODO: Probably want to implement pagination + comps = catalog.list_components(latest=only_latest, only_published=only_published) + dfs = catalog.list_formats(latest=only_latest, only_published=only_published) + + def format_record_component(obj): + when_published = obj["when_published"].date() \ + if obj["when_published"] else "" + + return (obj["name"], obj["version"], obj["component_type"], + util.format_description(obj["description"]), obj["owner"], + util.get_status_string(obj), when_published) + + comps = [ format_record_component(comp) for comp in comps ] + + click.echo("") + click.echo("Components:") + click.echo(util.create_table(('Name', 'Version', 'Type', 'Description', 'Owner', 'Status', + 'Published'), comps)) + + def format_record_format(obj): + when_published = obj["when_published"].date() \ + if obj["when_published"] else "" + + return (obj["name"], obj["version"], + util.format_description(obj["description"]), obj["owner"], + util.get_status_string(obj), when_published) + + dfs = [ format_record_format(df) for df in dfs ] + + click.echo("") + click.echo("Data formats:") + click.echo(util.create_table(('Name', 'Version', 'Description', 'Owner', 'Status', + 'Published'), dfs)) + + +@catalog.command(name="show") +@click.argument("resource", metavar="name:version") +@click.pass_obj +def action_show(obj, resource): + """Provides more information about a resource""" + # Query both components and data formats. Display both sets. + name, ver = util.parse_input(resource) + catalog = obj['catalog'] + spec = None + + try: + spec = catalog.get_component_spec(name, ver) + + click.echo("") + click.echo("Component specification") + click.echo("-----------------------") + click.echo(util.format_json(spec)) + click.echo("") + except: + pass + + try: + spec = obj['catalog'].get_format_spec(name, ver) + + click.echo("") + click.echo("Data format") + click.echo("-----------") + click.echo(util.format_json(spec)) + click.echo("") + except: + pass + + if not spec: + click.echo("No matching component nor data format found") diff --git a/mod/onboardingapi/dcae_cli/commands/component/__init__.py b/mod/onboardingapi/dcae_cli/commands/component/__init__.py new file mode 100644 index 0000000..b1f4a8f --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/component/__init__.py @@ -0,0 +1,25 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides the component group +""" +from .commands import component diff --git a/mod/onboardingapi/dcae_cli/commands/component/commands.py b/mod/onboardingapi/dcae_cli/commands/component/commands.py new file mode 100644 index 0000000..b2483d1 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/component/commands.py @@ -0,0 +1,394 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides component commands +""" +import json +from pprint import pformat + +import click +import os + +from discovery_client import resolve_name + +from dcae_cli.util import profiles, load_json, dmaap, inputs, policy +from dcae_cli.util.run import run_component, dev_component +from dcae_cli.util import discovery as dis +from dcae_cli.util import docker_util as du +from dcae_cli.util.discovery import DiscoveryNoDownstreamComponentError +from dcae_cli.util.undeploy import undeploy_component +from dcae_cli.util.exc import DcaeException + +from dcae_cli.commands import util +from dcae_cli.commands.util import parse_input, parse_input_pair, create_table + +from dcae_cli.catalog.exc import MissingEntry + + +@click.group() +def component(): + pass + + +@component.command(name='list') +@click.option('--latest', is_flag=True, default=True, help='Only list the latest version of components which match the filter criteria') +@click.option('--subscribes', '-sub', multiple=True, help='Only list components which subscribe to FORMAT') +@click.option('--publishes', '-pub', multiple=True, help='Only list components which publish FORMAT') +@click.option('--provides', '-pro', multiple=True, type=(str, str), help='Only list components which provide services REQ_FORMAT RESP_FORMAT') +@click.option('--calls', '-cal', multiple=True, type=(str, str), help='Only list components which call services REQ_FORMAT RESP_FORMAT') +@click.option('--deployed', is_flag=True, default=False, help='Display the deployed view. Shows details of deployed instances.') +@click.pass_obj +def list_component(obj, latest, subscribes, publishes, provides, calls, deployed): + '''Lists components in the public catalog. Uses flags to filter results.''' + subs = list(map(parse_input, subscribes)) if subscribes else None + pubs = list(map(parse_input, publishes)) if publishes else None + provs = list(map(parse_input_pair, provides)) if provides else None + cals = list(map(parse_input_pair, calls)) if calls else None + + user, catalog = obj['config']['user'], obj['catalog'] + # TODO: How about components that you don't own but you have deployed? + comps = catalog.list_components(subs, pubs, provs, cals, latest, user=user) + + active_profile = profiles.get_profile() + consul_host = active_profile.consul_host + + click.echo("Active profile: {0}".format(profiles.get_active_name())) + click.echo("") + + def format_resolve_results(results): + """Format the results from the resolve_name function call""" + if results: + # Most likely the results will always be length one until we migrate + # to a different way of registering names + return "\n".join([ pformat(result) for result in results ]) + else: + return None + + def get_instances_as_rows(comp): + """Get all deployed running instances of a component plus details about + those instances and return as a list of rows""" + cname = comp["name"] + cver = comp["version"] + ctype = comp["component_type"] + + instances = dis.get_healthy_instances(user, cname, cver) + instances_status = ["Healthy"]*len(instances) + instances_conns = [ format_resolve_results(resolve_name(consul_host, instance)) \ + for instance in instances ] + + instances_defective = dis.get_defective_instances(user, cname, cver) + instances_status += ["Defective"]*len(instances_defective) + instances_conns += [""]*len(instances_defective) + + instances += instances_defective + + return list(zip(instances, instances_status, instances_conns)) + + # Generate grouped rows where a grouped row is (name, version, type, [instances]) + grouped_rows = [ (comp, get_instances_as_rows(comp)) for comp in comps ] + + # Display + if deployed: + def display_deployed(comp, instances): + cname = comp["name"] + cver = comp["version"] + ctype = comp["component_type"] + + click.echo("Name: {0}".format(cname)) + click.echo("Version: {0}".format(cver)) + click.echo("Type: {0}".format(ctype)) + click.echo(create_table(('Instance', 'Status', 'Connection'), instances)) + click.echo("") + + [ display_deployed(*row) for row in grouped_rows ] + else: + def format_row(comp, instances): + return comp["name"], comp["version"], comp["component_type"], \ + util.format_description(comp["description"]), \ + util.get_status_string(comp), comp["modified"], len(instances) + + rows = [ format_row(*grouped_row) for grouped_row in grouped_rows ] + click.echo(create_table(('Name', 'Version', 'Type', 'Description', + 'Status', 'Modified', '#Deployed'), rows)) + click.echo("\nUse the \"--deployed\" option to see more details on deployments") + + +@component.command() +@click.argument('component', metavar="name:version") +@click.pass_obj +def show(obj, component): + '''Provides more information about a COMPONENT''' + cname, cver = parse_input(component) + catalog = obj['catalog'] + comp_spec = catalog.get_component_spec(cname, cver) + + click.echo(util.format_json(comp_spec)) + + +_help_dmaap_file = """ +Path to a file that contains a json of dmaap client information. The structure of the json is expected to be: + + { + <config_key1>: {..client object 1..}, + <config_key2>: {..client object 2..}, + ... + } + +Where "client object" can be for message or data router. The "config_key" matches the value of specified in the message router "streams" in the component specification. + +Please refer to the documentation for examples of "client object". +""" + +def _parse_dmaap_file(dmaap_file): + try: + with open(dmaap_file, 'r+') as f: + dmaap_map = json.load(f) + dmaap.validate_dmaap_map_schema(dmaap_map) + return dmaap.apply_defaults_dmaap_map(dmaap_map) + except Exception as e: + message = "Problems with parsing the dmaap file. Check to make sure that it is a valid json and is in the expected format." + raise DcaeException(message) + + +_help_inputs_file = """ +Path to a file that contains a json that contains values to be used to bind to configuration parameters that have been marked as "sourced_at_deployment". The structure of the json is expected to be: + + { + <parameter1 name>: value, + <parameter2 name>: value + } + +The "parameter name" is the value of the "name" property for the given configuration parameter. +""" + +def _parse_inputs_file(inputs_file): + try: + with open(inputs_file, 'r+') as f: + inputs_map = json.load(f) + # TODO: Validation of schema in the future? + return inputs_map + except Exception as e: + message = "Problems with parsing the inputs file. Check to make sure that it is a valid json and is in the expected format." + raise DcaeException(message) + + +_help_policy_file = """ +Path to a file that contains a json of an (update/remove) Policy change. +All "policies" can also be specified. +The structure of the json is expected to be: + +{ +"updated_policies": [{"policyName": "value", "": ""},{"policyName": "value", "": ""}], +"removed_policies": [{"policyName": "value", "": ""},{"policyName": "value", "": ""}], +"policies": [{"policyName": "value", "": ""},{"policyName": "value", "": ""}] +} +""" + +def _parse_policy_file(policy_file): + try: + with open(policy_file, 'r+') as f: + policy_change_file = json.load(f) + policy.validate_against_policy_schema(policy_change_file) + return policy_change_file + except Exception as e: + click.echo(format(e)) + message = "Problems with parsing the Policy file. Check to make sure that it is a valid json and is in the expected format." + raise DcaeException(message) + +@component.command() +@click.option('--external-ip', '-ip', default=None, help='The external IP address of the Docker host. Only used for Docker components.') +@click.option('--additional-user', default=None, help='Additional user to grab instances from.') +@click.option('--attached', is_flag=True, help='(Docker) dcae-cli deploys then attaches to the component when set') +@click.option('--force', is_flag=True, help='Force component to run without valid downstream dependencies') +@click.option('--dmaap-file', type=click.Path(resolve_path=True, exists=True, dir_okay=False), + help=_help_dmaap_file) +@click.option('--inputs-file', type=click.Path(resolve_path=True, exists=True, dir_okay=False), + help=_help_inputs_file) +@click.argument('component') +@click.pass_obj +def run(obj, external_ip, additional_user, attached, force, dmaap_file, component, + inputs_file): + '''Runs latest (or specific) COMPONENT version. You may optionally specify version via COMPONENT:VERSION''' + + click.echo("Running the Component.....") + click.echo("") + + cname, cver = parse_input(component) + user, catalog = obj['config']['user'], obj['catalog'] + + dmaap_map = _parse_dmaap_file(dmaap_file) if dmaap_file else {} + inputs_map = _parse_inputs_file(inputs_file) if inputs_file else {} + + try: + run_component(user, cname, cver, catalog, additional_user, attached, force, + dmaap_map, inputs_map, external_ip) + except DiscoveryNoDownstreamComponentError as e: + message = "Either run a compatible downstream component first or run with the --force flag to ignore this error" + raise DcaeException(message) + except inputs.InputsValidationError as e: + click.echo("ERROR: There is a problem. {0}".format(e)) + click.echo("") + message = "Component requires inputs. Please look at the use of --inputs-file and make sure the format is correct" + raise DcaeException(message) + +@component.command() +@click.argument('component') +@click.pass_obj +def undeploy(obj, component): + '''Undeploy latest (or specific) COMPONENT version. You may optionally specify version via COMPONENT:VERSION''' + cname, cver = parse_input(component) + user, catalog = obj['config']['user'], obj['catalog'] + undeploy_component(user, cname, cver, catalog) + + +@component.command() +@click.argument('specification', type=click.Path(resolve_path=True, exists=True)) +@click.option('--additional-user', default=None, help='Additional user to grab instances from.') +@click.option('--force', is_flag=True, help='Force component to run without valid downstream dependencies') +@click.option('--dmaap-file', type=click.Path(resolve_path=True, exists=True, dir_okay=False), + help=_help_dmaap_file) +@click.option('--inputs-file', type=click.Path(resolve_path=True, exists=True, dir_okay=False), + help=_help_inputs_file) +@click.pass_obj +def dev(obj, specification, additional_user, force, dmaap_file, inputs_file): + '''Set up component in development for discovery, use for local development''' + user, catalog = obj['config']['user'], obj['catalog'] + + dmaap_map = _parse_dmaap_file(dmaap_file) if dmaap_file else {} + inputs_map = _parse_inputs_file(inputs_file) if inputs_file else {} + + with open(specification, 'r+') as f: + spec = json.loads(f.read()) + try: + dev_component(user, catalog, spec, additional_user, force, dmaap_map, + inputs_map) + except DiscoveryNoDownstreamComponentError as e: + message = "Either run a compatible downstream component first or run with the --force flag to ignore this error" + raise DcaeException(message) + except inputs.InputsValidationError as e: + click.echo("ERROR: There is a problem. {0}".format(e)) + click.echo("") + message = "Component requires inputs. Please look at the use of --inputs-file and make sure the format is correct" + raise DcaeException(message) + + +@component.command() +@click.argument('component') +@click.pass_obj +def publish(obj, component): + """Pushes a COMPONENT to the public catalog""" + name, version = parse_input(component) + user, catalog = obj['config']['user'], obj['catalog'] + + try: + # Dependent data formats must be published first before publishing + # component. Check that here + unpub_formats = catalog.get_unpublished_formats(name, version) + + if unpub_formats: + click.echo("ERROR: You must publish dependent data formats first:") + click.echo("") + click.echo("\n".join([":".join(uf) for uf in unpub_formats])) + click.echo("") + return + except MissingEntry as e: + raise DcaeException("Component not found") + + if catalog.publish_component(user, name, version): + click.echo("Component has been published") + else: + click.echo("ERROR: Component could not be published") + + +@component.command() +@click.option('--update', is_flag=True, help='Updates a locally added component if it has not already been published') +@click.argument('specification', type=click.Path(resolve_path=True, exists=True)) +@click.pass_obj +def add(obj, update, specification): + """Add Component to local onboarding catalog""" + user, catalog = obj['config']['user'], obj['catalog'] + + spec = load_json(specification) + catalog.add_component(user, spec, update) + + +@component.command() +@click.option('--policy-file', type=click.Path(resolve_path=True, exists=True, dir_okay=False), help=_help_policy_file) +@click.argument('component') +@click.pass_obj +def reconfig(obj, policy_file, component): + """Reconfigure COMPONENT for Policy change. + Modify Consul KV pairs for ('updated_policies', 'removed_policies', and 'policies') for Policy change event, + Execute the reconfig script(s) in the Docker container""" + + click.echo("Running Component Reconfiguration.....") + click.echo("") + + # Read and Validate the policy-file + policy_change_file = _parse_policy_file(policy_file) if policy_file else {} + + if not (policy_change_file): + click.echo("ERROR: For component 'reconfig', you must specify a --policy-file") + click.echo("") + return + else: + # The Component Spec contains the Policy 'Reconfig Script Path/ScriptName' + cname, cver = parse_input(component) + catalog = obj['catalog'] + comp_spec = catalog.get_component_spec(cname, cver) + + # Check if component is running and healthy + active_profile = profiles.get_profile() + consul_host = active_profile.consul_host + service_name = os.environ["SERVICE_NAME"] + if dis.is_healthy(consul_host, service_name): + pass + else: + click.echo("ERROR: The component must be running and healthy. It is not.") + click.echo("") + return + + try: + policy_reconfig_path = comp_spec['auxilary']['policy']['script_path'] + except KeyError: + click.echo("ERROR: Policy Reconfig Path (auxilary/policy/script_path) is not specified in the Component Spec") + click.echo("") + return + + kvUpdated = dis.policy_update(policy_change_file, dis.default_consul_host()) + + if kvUpdated: + active_profile = profiles.get_profile() + docker_logins = dis.get_docker_logins() + + command = dis.build_policy_command(policy_reconfig_path, policy_change_file, dis.default_consul_host()) + + # Run the Policy Reconfig script + client = du.get_docker_client(active_profile, docker_logins) + du.reconfigure(client, service_name, command) + else: + click.echo("ERROR: There was a problem updating the policies in Consul") + click.echo("") + return + + click.echo("") + click.echo("The End of Component Reconfiguration") diff --git a/mod/onboardingapi/dcae_cli/commands/data_format/__init__.py b/mod/onboardingapi/dcae_cli/commands/data_format/__init__.py new file mode 100644 index 0000000..b025f61 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/data_format/__init__.py @@ -0,0 +1,25 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides the data format group +""" +from .commands import data_format diff --git a/mod/onboardingapi/dcae_cli/commands/data_format/commands.py b/mod/onboardingapi/dcae_cli/commands/data_format/commands.py new file mode 100644 index 0000000..b952336 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/data_format/commands.py @@ -0,0 +1,163 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides data format commands +""" +import json + +import click + +import genson + +import sys + +import os + +from jsonschema import Draft4Validator + +from dcae_cli.util import load_json +from dcae_cli.util.logger import get_logger + +from dcae_cli.commands import util +from dcae_cli.commands.util import create_table, parse_input + +from dcae_cli.catalog.exc import MissingEntry +from dcae_cli.catalog.exc import DcaeException + + +logger = get_logger('DataFormatCommand') + + +@click.group() +def data_format(): + pass + + +@data_format.command() +@click.option('--update', is_flag=True, help='Updates a locally added data format if it has not already been published') +@click.argument('specification', type=click.Path(resolve_path=True, exists=True)) +@click.pass_obj +def add(obj, update, specification): + '''Tracks a Format file Specification locally, but does not push to the catalog''' + spec = load_json(specification) + user, catalog = obj['config']['user'], obj['catalog'] + catalog.add_format(spec, user, update) + + +@data_format.command(name='list') +@click.option('--latest', is_flag=True, help='Only list the latest version of data formats') +@click.pass_obj +def list_format(obj, latest): + """Lists all your Data Formats""" + user, catalog = obj['config']['user'], obj['catalog'] + dfs = catalog.list_formats(latest, user=user) + + def format_record(df): + return (df["name"], df["version"], + util.format_description(df["description"]), + util.get_status_string(df), df["modified"]) + + dfs = [ format_record(df) for df in dfs ] + + click.echo("") + click.echo("Data formats for {0}".format(user)) + click.echo(create_table(('Name', 'Version', 'Description', 'Status', 'Modified'), dfs)) + + +@data_format.command() +@click.argument('data-format', metavar="name:version") +@click.pass_obj +def show(obj, data_format): + '''Provides more information about a Data Format''' + name, ver = parse_input(data_format) + spec = obj['catalog'].get_format_spec(name, ver) + + click.echo(util.format_json(spec)) + + +@data_format.command() +@click.argument('data-format') +@click.pass_obj +def publish(obj, data_format): + """Publish Format to make publicly available""" + name, version = parse_input(data_format) + user, catalog = obj['config']['user'], obj['catalog'] + + if catalog.publish_format(user, name, version): + click.echo("Data format has been published") + else: + click.echo("ERROR: Data format could not be published") + +@data_format.command() +@click.option('--keywords', is_flag=True, help='Adds a template of possible descriptive keywords', default=False) +@click.argument('name_version', metavar="name:version", required = True) +@click.argument('file-or-dir-path', type=click.Path(resolve_path=True, exists=True, dir_okay=True, file_okay=True, readable=True), metavar="file-or-dir-path") +@click.pass_obj +def generate(obj, name_version, file_or_dir_path, keywords): + '''Create schema from a file or directory examples''' + name, version = parse_input(name_version) + if version == None: + version = "" + schema = genson.Schema() + if os.path.isfile(file_or_dir_path): + addfile(file_or_dir_path, schema) + else: + foundJSON = False + for root, dirs, files in os.walk(file_or_dir_path): + for filename in files: + fullfilename = os.path.join(file_or_dir_path, filename) + addfile(fullfilename,schema) + foundJSON = True + if foundJSON == False: + raise DcaeException('No JSON files found in ' + file_or_dir_path) + + json_obj = json.loads(schema.to_json()) + json_obj['$schema'] = "http://json-schema.org/draft-04/schema#" + jschema = json.dumps(json_obj) + jschema = jschema.replace('"required":', '"additionalproperties": true, "required":') + jschema = jschema.replace('"type":', ' "description": "", "type":') + + if (keywords): + jschema = jschema.replace('"type": "string"', ' "maxLength": 0, "minLength": 0, "pattern": "", "type": "string"') + jschema = jschema.replace('"type": "integer"', ' "maximum": 0, "mininimum": 0, "multipleOf": 0, "type": "integer"') + jschema = jschema.replace('"type": "array"', ' "maxItems": 0, "minItems": 0, "uniqueItems": "false", "type": "array"') + + jschema = '{ "self": { "name": "' + name + '", "version": "' + version + '", "description": ""} , "dataformatversion": "1.0.0", "jsonschema": ' + jschema + '}' + #Draft4Validator.check_schema(json.loads(jschema)) + try: + print(json.dumps(json.loads(jschema), sort_keys=True, indent=4 )) + except ValueError: + raise DcaeException('Problem with JSON generation') + +def addfile(filename, schema): + try: + fileadd = open(filename, "r") + except IOError: + raise DcaeException('Cannot open' + filename) + try: + json_object = json.loads(fileadd.read()) + schema.add_object(json_object) + except ValueError: + raise DcaeException('Bad JSON file: ' + filename) + finally: + fileadd.close() + diff --git a/mod/onboardingapi/dcae_cli/commands/profiles/__init__.py b/mod/onboardingapi/dcae_cli/commands/profiles/__init__.py new file mode 100644 index 0000000..0d71c1b --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/profiles/__init__.py @@ -0,0 +1,25 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides the profiles group +""" +from .commands import profiles diff --git a/mod/onboardingapi/dcae_cli/commands/profiles/commands.py b/mod/onboardingapi/dcae_cli/commands/profiles/commands.py new file mode 100644 index 0000000..df34b5c --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/profiles/commands.py @@ -0,0 +1,87 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides profiles commands +""" +import json + +import click + +from dcae_cli.util.exc import DcaeException +from dcae_cli.util.profiles import (get_profiles, activate_profile, get_active_name, update_profile, + delete_profile, create_profile) + + +@click.group() +def profiles(): + pass + + +@profiles.command() +@click.argument('name') +def activate(name): + '''Sets profile (name) as the active profile''' + activate_profile(name) + + +@profiles.command(name='list') +def list_profiles(): + '''Lists available profiles''' + profiles = get_profiles(include_active=False) + active = get_active_name() + names = sorted(profiles.keys()) + outputs = ("{} {}".format(' ' if not name == active else '* ', name) for name in names) + click.echo('\n'.join(outputs)) + + +@profiles.command() +@click.argument('name') +def show(name): + '''Provides more information about a Profile''' + profiles = get_profiles() + try: + click.echo(json.dumps(profiles[name], sort_keys=True, indent=4)) + except KeyError as e: + raise DcaeException("Profile '{}' does not exist.".format(e)) + + +@profiles.command() +@click.argument('name', type=click.STRING) +def create(name): + '''Creates new profile (name), with defaults''' + create_profile(name) + + +@profiles.command(name='set') +@click.argument('name') +@click.argument('key') +@click.argument('value') +def update(name, key, value): + '''Updates profile (name) for specific Key/Value''' + update_profile(name, **{key: value}) + + +@profiles.command() +@click.argument('name') +def delete(name): + '''Deletes profile (name)''' + delete_profile(name) diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/cdap/format.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/cdap/format.json new file mode 100644 index 0000000..8456a30 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/cdap/format.json @@ -0,0 +1,10 @@ +{ + "self": { + "name": "std.empty", + "version": "1.0.6", + "description": "Represents an empty message with no content" + }, + "dataformatversion": "1.0.0", + "jsonschema": { + } +} diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/cdap/spec_end.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/cdap/spec_end.json new file mode 100644 index 0000000..9642a6e --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/cdap/spec_end.json @@ -0,0 +1,77 @@ +{ + "self":{ + "name":"cdap.helloworld.mock.catalog.testing.endnode", + "version":"0.7.1", + "description":"cdap test component", + "component_type":"cdap" + }, + "streams":{ + "subscribes": [], + "publishes": [{ + "format": "std.empty", + "version": "1.0.6", + "config_key": "stream_publish_example", + "type": "http" + }] + }, + "services":{ + "calls": [], + "provides":[ + { + "request":{ + "format":"std.empty", + "version":"1.0.6" + }, + "response":{ + "format":"std.empty", + "version":"1.0.6" + }, + "service_name":"Greeting", + "service_endpoint":"greet", + "verb":"GET" + } + ] + }, + "parameters": { + "app_config" : [ + {"name" : "some_param", + "description" : "some desc", + "value" : "some_value", + "designer_editable" : false, + "sourced_at_deployment" : false, + "policy_editable" : false} + ], + "app_preferences" : [ + {"name" : "some_param2", + "description" : "some desc2", + "value" : "some_value2", + "designer_editable" : false, + "sourced_at_deployment" : false, + "policy_editable" : false} + ], + "program_preferences" : [{"program_type" : "flows", + "program_id" : "WhoFlow", + "program_pref" : [{"name" : "some_param3", + "description" : "some desc3", + "value" : "some_value3", + "designer_editable" : false, + "sourced_at_deployment" : false, + "policy_editable" : false}]}] + }, + "auxilary": { + "streamname":"who", + "artifact_name" : "HelloWorld", + "artifact_version" : "3.4.3", + "programs" : [ + {"program_type" : "flows", "program_id" : "WhoFlow"}, + {"program_type" : "services", "program_id" : "Greeting"} + ], + "namespace" : "hw" + }, + "artifacts": [ + { + "uri": "http://make-me-valid/jar_files/HelloWorld-3.4.3.jar", + "type": "jar" + } + ] +} diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/cdap/spec_start.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/cdap/spec_start.json new file mode 100644 index 0000000..83b5c28 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/cdap/spec_start.json @@ -0,0 +1,78 @@ +{ + "self":{ + "name":"cdap.helloworld.mock.catalog.testing.startnode", + "version":"0.7.1", + "description":"cdap test component", + "component_type":"cdap" + }, + "streams":{ + "subscribes": [{ + "format": "std.empty", + "version": "1.0.6", + "route": "/unsure_if_needed_for_cdap", + "type": "http" + }], + "publishes": [] + }, + "services":{ + "calls": [ + { + "config_key": "service_call_example", + "verb": "GET", + "request": { + "format": "std.empty", + "version": "1.0.6" + }, + "response": { + "format": "std.empty", + "version": "1.0.6" + } + } + ], + "provides":[] + }, +"parameters": { + "app_config" : [ + {"name" : "some_param", + "description" : "some desc", + "value" : "some_value", + "designer_editable" : false, + "sourced_at_deployment" : false, + "policy_editable" : false} + ], + "app_preferences" : [ + {"name" : "some_param2", + "description" : "some desc2", + "value" : "some_value2", + "designer_editable" : false, + "sourced_at_deployment" : false, + "policy_editable" : false} + ], + "program_preferences" : [{"program_type" : "flows", + "program_id" : "WhoFlow", + "program_pref" : [{"name" : "some_param3", + "description" : "some desc3", + "value" : "some_value3", + "designer_editable" : false, + "sourced_at_deployment" : false, + "policy_editable" : false} + ]}] + }, + "auxilary": { + "streamname":"who", + "artifact_name" : "HelloWorld", + "artifact_version" : "3.4.3", + "programs" : [ + {"program_type" : "flows", "program_id" : "WhoFlow"}, + {"program_type" : "services", "program_id" : "Greeting"} + ], + "namespace" : "hw" + }, + "artifacts": [ + { + "uri": "http://make-me-valid/jar_files/HelloWorld-3.4.3.jar", + "type": "jar" + } + ] +} + diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/collector/kpi-collector.comp.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/collector/kpi-collector.comp.json new file mode 100644 index 0000000..5b86d9c --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/collector/kpi-collector.comp.json @@ -0,0 +1,47 @@ +{ + "self": { + "version": "1.0.0", + "name": "std.vnf.kpi_collector", + "description": "Continuously publishes VNF KPIs", + "component_type": "docker" + }, + "streams": { + "subscribes": [], + "publishes": [ + { + "format": "std.vnf.kpi", + "version": "1.0.0", + "config_key": "kpi_pub", + "type": "http" + } + ] + }, + "services": { + "calls": [], + "provides": [] + }, + "parameters": [ + { + "name": "sleep_sec", + "value": 0.75, + "description": "Number of seconds to sleep between publishes", + "designer_editable": false, + "sourced_at_deployment": false, + "policy_editable": false + } + ], + "auxilary": { + "healthcheck": { + "type": "http", + "endpoint": "/health", + "interval": "15s", + "timeout": "1s" + } + }, + "artifacts": [ + { + "uri": "asimov-anomaly-collector", + "type": "docker image" + } + ] +} diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/collector/vnf-kpi.format.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/collector/vnf-kpi.format.json new file mode 100644 index 0000000..6fba1a1 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/collector/vnf-kpi.format.json @@ -0,0 +1,13 @@ +{ + "self": { + "name": "std.vnf.kpi", + "version": "1.0.0", + "description": "Represents a KPI of a VNF at particular instance of time." + }, + "dataformatversion": "1.0.0", + "reference": { + "name": "Common Event Format", + "format": "JSON", + "version": "25.0.0" + } +} diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/anomaly-model.comp.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/anomaly-model.comp.json new file mode 100644 index 0000000..3e2d142 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/anomaly-model.comp.json @@ -0,0 +1,52 @@ +{ + "self": { + "version": "1.0.0", + "name": "asimov.anomaly_classifier", + "description": "Classifies VNF KPIs as anommalous or not", + "component_type": "docker" + }, + "streams": { + "subscribes": [{ + "format": "std.vnf.kpi", + "version": "1.0.0", + "route": "/data", + "type": "http" + }], + "publishes": [ + { + "format": "asimov.std.integerClassification", + "version": "1.0.0", + "config_key": "pred", + "type": "http" + } + ] + }, + "services": { + "calls": [], + "provides": [] + }, + "parameters": [ + { + "name": "threshold", + "value": 0.75, + "description": "Probability threshold to exceed to be anomalous", + "designer_editable" : false, + "sourced_at_deployment" : false, + "policy_editable" : false + } + ], + "auxilary": { + "healthcheck": { + "type": "http", + "endpoint": "/health", + "interval": "15s", + "timeout": "1s" + } + }, + "artifacts": [ + { + "uri": "asimov-anomaly-model", + "type": "docker image" + } + ] +} diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/badjson b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/badjson new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/badjson diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/generatedir/ex1.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/generatedir/ex1.json new file mode 100755 index 0000000..7db1e06 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/generatedir/ex1.json @@ -0,0 +1,3 @@ +{ + "foobar": "test 1" +} diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/generatedir/ex2.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/generatedir/ex2.json new file mode 100755 index 0000000..75a5fa4 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/generatedir/ex2.json @@ -0,0 +1,4 @@ +{ + "foobar2": "test 1" +} + diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/int-class.format.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/int-class.format.json new file mode 100644 index 0000000..7d3dedf --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/model/int-class.format.json @@ -0,0 +1,18 @@ +{ + "self": { + "name": "asimov.std.integerClassification", + "version": "1.0.0", + "description": "Represents a single classification from a machine learning model" + }, + "dataformatversion": "1.0.0", + "jsonschema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "classification": { + "type": "string" + } + }, + "additionalProperties": false + } +} diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/viz/empty.format.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/viz/empty.format.json new file mode 100644 index 0000000..4ff7d3a --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/viz/empty.format.json @@ -0,0 +1,10 @@ +{ + "self": { + "name": "std.empty", + "version": "1.0.0", + "description": "Represents an empty message with no content" + }, + "dataformatversion": "1.0.0", + "jsonschema": { + } +} diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/viz/line-viz.comp.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/viz/line-viz.comp.json new file mode 100644 index 0000000..d3b9e23 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/viz/line-viz.comp.json @@ -0,0 +1,51 @@ +{ + "self": { + "version": "1.0.0", + "name": "asimov.viz.line_plot", + "description": "Plots class probabilities as a line plot in real-time", + "component_type": "docker" + }, + "streams": { + "subscribes": [ + { + "format": "asimov.std.integerClassification", + "version": "1.0.0", + "route": "/prediction", + "type": "http" + } + ], + "publishes": [] + }, + "services": { + "calls": [], + "provides": [ + { + "route": "/viz", + "verb": "GET", + "request": { + "format": "std.empty", + "version": "1.0.0" + }, + "response": { + "format": "std.web.url", + "version": "1.0.0" + } + } + ] + }, + "parameters": [], + "auxilary": { + "healthcheck": { + "type": "http", + "endpoint": "/health", + "interval": "15s", + "timeout": "1s" + } + }, + "artifacts": [ + { + "uri": "asimov-anomaly-viz", + "type": "docker image" + } + ] +} diff --git a/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/viz/web-url.format.json b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/viz/web-url.format.json new file mode 100644 index 0000000..3e823b9 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/mocked_components/viz/web-url.format.json @@ -0,0 +1,19 @@ +{ + "self": { + "name": "std.web.url", + "version": "1.0.0", + "description": "Represents a web URL" + }, + "dataformatversion": "1.0.0", + "jsonschema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "URL": { + "type": "string" + } + }, + "additionalProperties": false + } +} + diff --git a/mod/onboardingapi/dcae_cli/commands/tests/test_component_cmd.py b/mod/onboardingapi/dcae_cli/commands/tests/test_component_cmd.py new file mode 100644 index 0000000..2bba4cf --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/test_component_cmd.py @@ -0,0 +1,149 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Tests component CLI commands +''' +import os +import json +from click.testing import CliRunner +import time +import pytest + +from dcae_cli.cli import cli +from dcae_cli.catalog import MockCatalog + +TEST_DIR = os.path.dirname(__file__) + + +def _get_spec(path): + with open(path) as file: + return json.load(file) + + +def test_comp_docker(mock_cli_config, mock_db_url, obj=None): + + obj = {'catalog': MockCatalog(purge_existing=True, db_name='dcae_cli.test.db', + enforce_image=False, db_url=mock_db_url), + 'config': {'user': 'test-user'}} + + df_kpi = os.path.join(TEST_DIR, 'mocked_components', 'collector', 'vnf-kpi.format.json') + comp_coll = os.path.join(TEST_DIR, 'mocked_components', 'collector', 'kpi-collector.comp.json') + + df_cls = os.path.join(TEST_DIR, 'mocked_components', 'model', 'int-class.format.json') + comp_model = os.path.join(TEST_DIR, 'mocked_components', 'model', 'anomaly-model.comp.json') + + df_empty = os.path.join(TEST_DIR, 'mocked_components', 'viz', 'empty.format.json') + df_url = os.path.join(TEST_DIR, 'mocked_components', 'viz', 'web-url.format.json') + comp_viz = os.path.join(TEST_DIR, 'mocked_components', 'viz', 'line-viz.comp.json') + + runner = CliRunner() + + + # add the collector + cmd = "data_format add {:}".format(df_kpi).split() + assert runner.invoke(cli, cmd, obj=obj).exit_code == 0 + + cmd = "component add {:}".format(comp_coll).split() + assert runner.invoke(cli, cmd, obj=obj).exit_code == 0 + + + # add the model + cmd = "data_format add {:}".format(df_cls).split() + assert runner.invoke(cli, cmd, obj=obj).exit_code == 0 + + cmd = "component add {:}".format(comp_model).split() + assert runner.invoke(cli, cmd, obj=obj).exit_code == 0 + + + # add the viz + cmd = "data_format add {:}".format(df_empty).split() + assert runner.invoke(cli, cmd, obj=obj).exit_code == 0 + + cmd = "data_format add {:}".format(df_url).split() + assert runner.invoke(cli, cmd, obj=obj).exit_code == 0 + + cmd = "component add {:}".format(comp_viz).split() + assert runner.invoke(cli, cmd, obj=obj).exit_code == 0 + + + # light test of component list + df_cls_spec = _get_spec(df_cls) + df_cls_name, df_cls_ver = df_cls_spec['self']['name'], df_cls_spec['self']['version'] + comp_model_spec = _get_spec(comp_model) + comp_model_name = comp_model_spec['self']['name'] + + cmd = "component list -pub {:}".format(df_cls_name).split() + #assert comp_model_name in runner.invoke(cli, cmd, obj=obj).output + + cmd = "component list -pub {:}:{:}".format(df_cls_name, df_cls_ver).split() + #assert comp_model_name in runner.invoke(cli, cmd, obj=obj).output + + + # light test of component info + cmd = "component show {:}".format(comp_model_name).split() + spec_str = runner.invoke(cli, cmd, obj=obj).output + assert comp_model_spec == json.loads(spec_str) + + +@pytest.mark.skip(reason="This is not a pure unit test. Need a way to setup dependencies and trigger in the appropriate stages of testing.") +def test_comp_cdap(obj=None): + """ + This is not a unit test. It is bigger than that. It Does a full "workflow" test: + 1) adds a data format + 2) adds a cdap component + 3) runs a cdap component using our "Rework" broker + 4) undeploys the cdap component using our "Rework" broker + + NOTE: TODO: Mocking out the broker would be an improvement over this, probably. This is impure. Mocking the broker owuld be a huge undertaking, though. + """ + + obj = {'catalog': MockCatalog(purge_existing=True, db_name='dcae_cli.test.db'), + 'config': {'user': 'test-user'}} + runner = CliRunner() + + #add the data format + df = os.path.join(TEST_DIR, 'mocked_components', 'cdap', 'format.json') + cmd = "data_format add {:}".format(df).split() + assert runner.invoke(cli, cmd, obj=obj).exit_code == 0 + + #add the CDAP components + # TODO: Need to update the host + jar = 'http://make-me-valid/HelloWorld-3.4.3.jar' + + comp_cdap_start = os.path.join(TEST_DIR, 'mocked_components', 'cdap', 'spec_start.json') + cmd = "component add {0}".format(comp_cdap_start).split() + print(cmd) + result = runner.invoke(cli, cmd, obj=obj) + print(result.output) + assert result.exit_code == 0 + + comp_cdap_end = os.path.join(TEST_DIR, 'mocked_components', 'cdap', 'spec_end.json') + cmd = "component add {0}".format(comp_cdap_end).split() + print(cmd) + result = runner.invoke(cli, cmd, obj=obj) + print(result.output) + assert result.exit_code == 0 + +if __name__ == '__main__': + '''Test area''' + #pytest.main([__file__, ]) + test_comp_cdap() diff --git a/mod/onboardingapi/dcae_cli/commands/tests/test_data_format_cmd.py b/mod/onboardingapi/dcae_cli/commands/tests/test_data_format_cmd.py new file mode 100644 index 0000000..a291a74 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/test_data_format_cmd.py @@ -0,0 +1,122 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Tests data_format CLI commands +''' +import os +import json + +import pytest +from click.testing import CliRunner + +from dcae_cli.cli import cli +from dcae_cli.catalog import MockCatalog + + +TEST_DIR = os.path.dirname(__file__) + + +def _get_spec(path): + with open(path) as file: + return json.load(file) + + +def test_basic(mock_cli_config, mock_db_url, tmpdir): + obj = {'catalog': MockCatalog(purge_existing=True, db_name='dcae_cli.test.db', + enforce_image=False, db_url=mock_db_url), + 'config': {'user': 'test-user'}} + + runner = CliRunner() + spec_file = os.path.join(TEST_DIR, 'mocked_components', 'model', 'int-class.format.json') + cmd = "data_format add {:}".format(spec_file).split() + + # succeed the first time + result = runner.invoke(cli, cmd, obj=obj) + + assert result.exit_code == 0 + + # adding a duplicate is an error + result = runner.invoke(cli, cmd, obj=obj) + assert result.exit_code == 1 + assert 'exists' in result.output.lower() + + # allow updates + cmd = "data_format add --update {:}".format(spec_file).split() + result = runner.invoke(cli, cmd, obj=obj) + assert result.exit_code == 0 + + + # light test of list format command + cmd = 'data_format list'.split() + df_spec = _get_spec(spec_file) + df_name = df_spec['self']['name'] + assert df_name in runner.invoke(cli, cmd, obj=obj).output + + + # light test of component info + cmd = "data_format show {:}".format(df_name).split() + spec_str = runner.invoke(cli, cmd, obj=obj).output + assert df_spec == json.loads(spec_str) + + # test of generate + bad_dir = os.path.join(TEST_DIR, 'mocked_components', 'model', 'baddir') + cmd = "data_format generate --keywords \"name:1.0.2\" {:}".format(bad_dir).split() + err_str = runner.invoke(cli, cmd, obj=obj).output + assert "does not exist" in err_str + + empty_dir = os.path.join(TEST_DIR, 'mocked_components', 'model', 'emptydir') + try: + os.stat(empty_dir) + except: + os.mkdir(empty_dir) + cmd = "data_format generate --keywords \"name:1.0.2\" {:}".format(empty_dir).split() + err_str = runner.invoke(cli, cmd, obj=obj).output + assert "No JSON files found" in err_str + + bad_json = os.path.join(TEST_DIR, 'mocked_components', 'model', 'badjson') + cmd = "data_format generate --keywords \"name:1.0.2\" {:}".format(bad_json).split() + err_str = runner.invoke(cli, cmd, obj=obj).output + assert "Bad JSON file" in err_str + + generate_dir = os.path.join(TEST_DIR, 'mocked_components', 'model', 'generatedir') + cmd = "data_format generate --keywords name:1.0.2 {:} ".format(generate_dir).split() + actual = json.loads(runner.invoke(cli, cmd, obj=obj).output) + expected = json.loads('{\n "dataformatversion": "1.0.0", \n "jsonschema": {\n "$schema": "http://json-schema.org/draft-04/schema#", \n "description": "", \n "properties": {\n "foobar": {\n "description": "", \n "maxLength": 0, \n "minLength": 0, \n "pattern": "", \n "type": "string"\n }, \n "foobar2": {\n "description": "", \n "maxLength": 0, \n "minLength": 0, \n "pattern": "", \n "type": "string"\n }\n }, \n "type": "object"\n }, \n "self": {\n "description": "", \n "name": "name", \n "version": "1.0.2"\n }\n}\n') + assert actual == expected + + generate_dir = os.path.join(TEST_DIR, 'mocked_components', 'model', 'generatedir') + cmd = "data_format generate name:1.0.2 {:} ".format(generate_dir).split() + actual = json.loads(runner.invoke(cli, cmd, obj=obj).output) + expected = json.loads('{\n "dataformatversion": "1.0.0", \n "jsonschema": {\n "$schema": "http://json-schema.org/draft-04/schema#", \n "description": "", \n "properties": {\n "foobar": {\n "description": "", \n "type": "string"\n }, \n "foobar2": {\n "description": "", \n "type": "string"\n }\n }, \n "type": "object"\n }, \n "self": {\n "description": "", \n "name": "name", \n "version": "1.0.2"\n }\n}\n' + ) + assert actual == expected + + generate_dir = os.path.join(TEST_DIR, 'mocked_components', 'model', 'generatedir', 'ex1.json') + cmd = "data_format generate name:1.0.2 {:} ".format(generate_dir).split() + actual = json.loads(runner.invoke(cli, cmd, obj=obj).output) + expected = json.loads('{\n "dataformatversion": "1.0.0", \n "jsonschema": {\n "$schema": "http://json-schema.org/draft-04/schema#", \n "additionalproperties": true, \n "description": "", \n "properties": {\n "foobar": {\n "description": "", \n "type": "string"\n }\n }, \n "required": [\n "foobar"\n ], \n "type": "object"\n }, \n "self": {\n "description": "", \n "name": "name", \n "version": "1.0.2"\n }\n}\n') + assert actual == expected + + +if __name__ == '__main__': + '''Test area''' + pytest.main([__file__, ]) diff --git a/mod/onboardingapi/dcae_cli/commands/tests/test_profiles_cmd.py b/mod/onboardingapi/dcae_cli/commands/tests/test_profiles_cmd.py new file mode 100644 index 0000000..be89722 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/tests/test_profiles_cmd.py @@ -0,0 +1,84 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Tests profiles CLI commands +''' +import json + +import pytest +import click +from click.testing import CliRunner + +from dcae_cli import util +from dcae_cli.cli import cli +from dcae_cli.util import profiles +from dcae_cli.util import config + + +def test_basic(monkeypatch, tmpdir, mock_db_url): + + runner = CliRunner() + + # Setup config + test_db_url = mock_db_url + config_dict = { "user": "ninny", "active_profile": "fake-solutioning", + "db_url": test_db_url, "cli_version": "2.0.0" } + config_file = tmpdir.join("config.json") + config_file.write(json.dumps(config_dict)) + + # Setup profile + profile_dict = { "fake-solutioning": { "cdap_broker": "cdap_broker", + "config_binding_service": "config_binding_service", + "consul_host": "realsolcnsl00.dcae.solutioning.com", + "docker_host": "realsoldokr00.dcae.solutioning.com:2376" }} + profile_file = tmpdir.join("profiles.json") + profile_file.write(json.dumps(profile_dict)) + + monkeypatch.setattr(click, "get_app_dir", lambda app: str(tmpdir.realpath())) + + cmd = 'profiles show fake-solutioning'.split() + result = runner.invoke(cli, cmd) + assert result.output == "{}\n".format(json.dumps(profile_dict["fake-solutioning"], + sort_keys=True, indent=4)) + + cmd = 'profiles list'.split() + result = runner.invoke(cli, cmd) + assert result.output == '* fake-solutioning\n' + + cmd = 'profiles create foo'.split() + result = runner.invoke(cli, cmd) + + cmd = 'profiles list'.split() + result = runner.invoke(cli, cmd) + assert result.output == '* fake-solutioning\n foo\n' + + cmd = 'profiles activate foo'.split() + result = runner.invoke(cli, cmd) + + cmd = 'profiles list'.split() + result = runner.invoke(cli, cmd) + assert result.output == ' fake-solutioning\n* foo\n' + + +if __name__ == '__main__': + '''Test area''' + pytest.main([__file__, ]) diff --git a/mod/onboardingapi/dcae_cli/commands/util.py b/mod/onboardingapi/dcae_cli/commands/util.py new file mode 100644 index 0000000..f9527fa --- /dev/null +++ b/mod/onboardingapi/dcae_cli/commands/util.py @@ -0,0 +1,111 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides utilities for commands +""" +import json +import textwrap +from terminaltables import AsciiTable + +from dcae_cli.util import DcaeException + + +def parse_input(input_): + '''Returns (name, version) tuple parsed from name:version''' + arg = input_.split(':') + if len(arg) == 1: + cname, cver = arg[0], None + elif len(arg) == 2: + cname, cver = arg + cver = None if not cver else cver + else: + raise DcaeException("Input '{:}' must be NAME or NAME:VERSION".format(input_)) + return cname, cver + + +def parse_input_pair(req, resp): + '''Returns a tuple output of `parse_input` for convenience''' + return parse_input(req), parse_input(resp) + + +def create_table(header, entries): + '''Returns an ASCII table string''' + data = [header, ] + if entries: + data.extend(entries) + else: + data.append(['']*len(header)) + return AsciiTable(data).table + + +# Utility methods used to format records for displaying + +def get_status_string(record): + """Get the status label given a record of either data format or component""" + if "when_revoked" not in record or \ + "when_published" not in record or \ + "when_added" not in record: + return None + + if record["when_revoked"] is not None: + return "revoked" + elif record["when_published"] is not None: + return "published" + else: + return "unpublished" + +def get_status_string_camel(record): + """Get the status label given a record of either data format or component, in camelCase""" + if "whenRevoked" not in record or \ + "whenPublished" not in record or \ + "whenAdded" not in record: + return None + + if record["whenRevoked"] is not None: + return "revoked" + elif record["whenPublished"] is not None: + return "published" + else: + return "unpublished" + +def format_description(description, line_width=50, num_lines=3): + """Formats the description field + + Description field can be long. This function line wraps to a specified number + of lines. The last line trails with ".." if the text still overflows to + signal that there is more. + """ + if not description: + return '' + lines = textwrap.wrap(description) + lines = lines[:num_lines] + last_line = lines.pop() + + if len(last_line) > line_width and line_width > 2: + last_line = "{0}..".format(last_line[:-2]) + + lines.append(last_line) + return "\n".join(lines) + + +def format_json(some_json): + return json.dumps(some_json, sort_keys=True, indent=4) diff --git a/mod/onboardingapi/dcae_cli/conftest.py b/mod/onboardingapi/dcae_cli/conftest.py new file mode 100644 index 0000000..7956a81 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/conftest.py @@ -0,0 +1,72 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. +""" +This module is actually for pytesting. This contains fixtures. +""" + +import pytest +import dcae_cli + +# REVIEW: Having issues trying to share this amongst all the tests. Putting this +# fixture here allows it to be shared when running tests over the entire project. +# The pytest recommendation was to place this file high up in the project. + +@pytest.fixture +def mock_cli_config(monkeypatch): + """Fixture to provide a mock dcae-cli configuration and profiles + + This fixture monkeypatches the respective get calls to return mock objects + """ + # NOTE: The component spec and data format in gerrit moved once already. + # Might move again.. + fake_config = { "active_profile": "default", "user": "bob", + "server_url": "https://git.onap.org/dcaegen2/platform/cli/plain", + "db_url": "postgresql://postgres:abc123@localhost:5432/dcae_onboarding_db", + "path_component_spec": "/component-json-schemas/component-specification/dcae-cli-v2/component-spec-schema.json", + "path_data_format": "/component-json-schemas/data-format/dcae-cli-v1/data-format-schema.json" + } + + fake_profiles = { "default": { "consul_host": "consul", + "cdap_broker": "cdap_broker", + "config_binding_service": "config_binding_service", + "docker_host": "docker_host" } + } + fake_profiles["active"] = fake_profiles["default"] + + def fake_get_config(): + return fake_config + + def fake_get_profiles(user_only=False, include_active=True): + return fake_profiles + + from dcae_cli.util import config, profiles + monkeypatch.setattr(dcae_cli.util.config, "get_config", fake_get_config) + monkeypatch.setattr(dcae_cli.util.profiles, "get_profiles", fake_get_profiles) + + +@pytest.fixture +def mock_db_url(tmpdir): + """Fixture to provide mock db url + + This url is intended to be the location of where to place the local sqlite + databases for each unit test""" + dbname="dcae_cli.test.db" + config_dir = tmpdir.mkdir("config") + return "/".join(["sqlite://", str(config_dir), dbname]) diff --git a/mod/onboardingapi/dcae_cli/http.py b/mod/onboardingapi/dcae_cli/http.py new file mode 100644 index 0000000..792cd7f --- /dev/null +++ b/mod/onboardingapi/dcae_cli/http.py @@ -0,0 +1,501 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2019 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= + +"""Code for http interface""" + +import json +from datetime import datetime +from flask import Flask, request +from flask_restplus import Api, Resource, fields, abort +from dcae_cli._version import __version__ +from dcae_cli.commands import util +from dcae_cli.util.logger import get_logger +from dcae_cli.util.exc import DcaeException +from dcae_cli.util import config as cli_config +from dcae_cli.catalog.exc import MissingEntry, CatalogError, DuplicateEntry, FrozenEntry, ForbiddenRequest +from dcae_cli.catalog.mock.catalog import MockCatalog + +_log = get_logger("http") + +_app = Flask(__name__) +# Try to bundle as many errors together +# https://flask-restplus.readthedocs.io/en/stable/parsing.html#error-handling +_app.config['BUNDLE_ERRORS'] = True +_api = Api(_app, version=__version__, title="DCAE Onboarding HTTP API", description="" + , contact="mhwangatresearch.att.com", default_mediatype="application/json" + , prefix="/onboarding", doc="/onboarding", default="onboarding" + ) + +compSpecPath = cli_config.get_server_url() + cli_config.get_path_component_spec() +component_fields_request = _api.schema_model('Component Spec', + {'properties': {'owner': {'type': 'string'}, + 'spec': {'type': 'object', \ + 'description': 'The Component Spec schema is here -> ' + compSpecPath} + } + }) + +component_fields_get = _api.model('component fields', { + 'id': fields.String(required=True, description='. . . . ID of the component'), + 'name': fields.String(required=True, description='. . . . Name of the component'), + 'version': fields.String(required=True, description='. . . . Version of the component'), + 'owner': fields.String(required=True, description='. . . . ID of who added the component'), + 'whenAdded': fields.DateTime(required=True, dt_format='iso8601', description='. . . . When component was added to the Catalog'), + 'modified': fields.DateTime(required=True, dt_format='iso8601', description='. . . . When component was last modified'), + 'status': fields.String(required=True, description='. . . . Status of the component'), + 'description': fields.String(required=True, description='. . . . Description of the component'), + 'componentType': fields.String(required=True, description='. . . . only "docker"'), + 'componentUrl': fields.String(required=True, description='. . . . Url to the Component Specification') + }) +components_get = _api.model('Component List', {'components': fields.List(fields.Nested(component_fields_get))}) + +component_fields_by_id = _api.inherit('component fields by id', component_fields_get, { + 'spec': fields.Raw(required=True, description='The Component Specification (json)') + }) + +component_post = _api.model('Component post', {'componentUrl': fields.String(required=True, description='. . . . Url to the Component Specification')}) + +dataformatPath = cli_config.get_server_url() + cli_config.get_path_data_format() +dataformat_fields_request = _api.schema_model('Data Format Spec', + {'properties': {'owner': {'type': 'string'}, + 'spec': {'type': 'object', \ + 'description': 'The Data Format Spec schema is here -> ' + dataformatPath} + } + }) + +dataformat_fields_get = _api.model('dataformat fields', { + 'id': fields.String(required=True, description='. . . . ID of the data format'), + 'name': fields.String(required=True, description='. . . . Name of the data format'), + 'version': fields.String(required=True, description='. . . . Version of the data format'), + 'owner': fields.String(required=True, description='. . . . ID of who added the data format'), + 'whenAdded': fields.DateTime(required=True, dt_format='iso8601', description='. . . . When data format was added to the Catalog'), + 'modified': fields.DateTime(required=True, dt_format='iso8601', description='. . . . When data format was last modified'), + 'status': fields.String(required=True, description='. . . . Status of the data format'), + 'description': fields.String(required=True, description='. . . . Description of the data format'), + 'dataFormatUrl': fields.String(required=True, description='. . . . Url to the Data Format Specification') + }) +dataformats_get = _api.model('Data Format List', {'dataFormats': fields.List(fields.Nested(dataformat_fields_get))}) + +dataformat_fields_by_id = _api.inherit('dataformat fields by id', dataformat_fields_get, { + 'spec': fields.Raw(required=True, description='The Data Format Specification (json)') + }) + +dataformat_post = _api.model('Data Format post', {'dataFormatUrl': fields.String(required=True, description='. . . . Url to the Data Format Specification')}) + + +patch_fields = _api.model('Patch Spec', {'owner': fields.String(required=True, description='User ID'), + 'status': fields.String(required=True, enum=['published', 'revoked'], \ + description='. . . . .[published] is the only status change supported right now') + } ) + +error_message = _api.model('Error message', {'message': fields.String(description='. . . . .Details about the unsuccessful API request')}) + + +parser_components = _api.parser() +parser_components.add_argument("name", type=str, trim=True, + location="args", help="Name of component to filter for") +parser_components.add_argument("version", type=str, trim=True, + location="args", help="Version of component to filter for") + +################ +## Component ## +################ +@_api.route("/components", endpoint="resource_components") +class Components(Resource): + """Component resource""" + @_api.doc("get_components", description="Get list of Components in the catalog") + @_api.marshal_with(components_get) + @_api.response(200, 'Success, Components retrieved') + @_api.response(500, 'Internal Server Error') + @_api.expect(parser_components) + def get(self): + only_latest = False + only_published = False + + args = parser_components.parse_args() + + mockCat = MockCatalog() + comps = mockCat.list_components(latest=only_latest, only_published=only_published) + + def format_record_component(obj): + def format_value(v): + if type(v) == datetime: + return v.isoformat() + else: + return v + def to_camel_case(snake_str): + components = snake_str.split('_') + # We capitalize the first letter of each component except the first one + # with the 'title' method and join them together. + return components[0] + ''.join(x.title() for x in components[1:]) + + return dict([(to_camel_case(k), format_value(v)) \ + for k,v in obj.items()]) + + def add_self_url(comp): + comp["componentUrl"] = fields.Url("resource_component", absolute=True) \ + .output(None, {"component_id": comp["id"]}) + return comp + + def add_status(comp): + # "whenRevoked" and "whenPublished" are used to get status + comp["status"] = util.get_status_string_camel(comp) + return comp + + def should_keep(comp): + """Takes args to be used to filter the list of components""" + ok_name = args["name"] == None or args["name"] == comp["name"] + ok_version = args["version"] == None or args["version"] == comp["version"] + return ok_name and ok_version + + comps = [ add_self_url(add_status(format_record_component(comp))) + for comp in comps if should_keep(comp) ] + + return { "components": comps }, 200 + + + @_api.doc("post_component", description="Add a Component to the Catalog", body=component_fields_request) + @_api.marshal_with(component_post) + @_api.response(200, 'Success, Component added') + @_api.response(400, 'Bad Request', model=error_message) + @_api.response(409, 'Component already exists', model=error_message) + @_api.response(500, 'Internal Server Error') + @_api.expect(component_fields_request) + def post(self): + resp = None + try: + http_body = request.get_json() + + user = http_body['owner'] + spec = http_body['spec'] + try: + name = spec['self']['name'] + version = spec['self']['version'] + except Exception: + raise DcaeException("(Component) Spec needs to have a 'self' section with 'name' and 'version'") + + mockCat = MockCatalog() + ''' Pass False to do an add vs update ''' + mockCat.add_component(user, spec, False) + + component_id = mockCat.get_component_id(name, version) + componentUrl = fields.Url("resource_component", absolute=True) \ + .output(None, {"component_id": component_id}) + resp = {"componentUrl": componentUrl} + + except KeyError as e: + abort(code=400, message="Request field missing: {}".format(e)) + except DuplicateEntry as e: + resp = e.message.replace("name:version", name + ":" + version) + # We abort flask_restplus so our error message will override "marshal_with()" in response body + abort(code=409, message=resp) + except (CatalogError, DcaeException) as e: + abort(code=400, message=e) + + return resp, 200 + + +###################### +## Component by ID ## +###################### +@_api.route("/components/<string:component_id>", endpoint="resource_component") +class Component(Resource): + @_api.doc("get_component", description="Get a Component") + @_api.marshal_with(component_fields_by_id) + @_api.response(200, 'Success, Component retrieved') + @_api.response(404, 'Component not found in Catalog', model=error_message) + @_api.response(500, 'Internal Server Error') + def get(self, component_id): + resp = None + try: + mockCat = MockCatalog() + comp = mockCat.get_component_by_id(component_id) + status = util.get_status_string(comp) + + resp = { "id": comp["id"] + , "name": comp['name'] + , "version": comp['version'] + , "whenAdded": comp['when_added'].isoformat() + , "modified": comp["modified"].isoformat() + , "owner": comp["owner"] + , "description": comp['description'] + , "componentType": comp['component_type'] + , "spec": json.loads(comp["spec"]) + , "componentUrl": fields.Url("resource_component", absolute=True) + .output(None, {"component_id": comp["id"]}) + , "status": status + } + + except MissingEntry as e: + abort(code=404, message=e) + + return resp, 200 + + + @_api.doc("put_component", description="Replace a Component Spec in the Catalog", body=component_fields_request) + @_api.response(200, 'Success, Component replaced') + @_api.response(400, 'Bad Request', model=error_message) + @_api.response(404, 'Component not found in Catalog', model=error_message) + @_api.response(500, 'Internal Server Error') + @_api.expect(component_fields_request) + def put(self, component_id): + resp = None + try: + http_body = request.get_json() + user = http_body['owner'] + spec = http_body['spec'] + mockCat = MockCatalog() + ''' Pass True to do an update vs add ''' + mockCat.add_component(user, spec, True) + + except MissingEntry as e: + abort(code=404, message=e) + except (FrozenEntry, CatalogError, DcaeException) as e: + abort(code=400, message=e) + + return resp, 200 + + + @_api.doc("patch_component", description="Update a Component's status in the Catalog", body=patch_fields) + @_api.response(200, 'Success, Component status updated') + @_api.response(400, 'Bad Request', model=error_message) + @_api.response(403, 'Forbidden Request', model=error_message) + @_api.response(404, 'Component not found in Catalog', model=error_message) + @_api.response(500, 'Internal Server Error') + @_api.expect(patch_fields) + def patch(self, component_id): + resp = None + try: + http_body = request.get_json() + user = http_body['owner'] + field = http_body['status'] + if field not in ['published', 'revoked']: + raise DcaeException("Unknown status in request: '{}'".format(field)) + if field == 'revoked': + raise DcaeException("This status is not supported yet: '{}'".format(field)) + + mockCat = MockCatalog() + comp = mockCat.get_component_by_id(component_id) + comp_name = comp['name'] + comp_version = comp['version'] + + mockCat.publish_component(user, comp_name, comp_version) + + except MissingEntry as e: + abort(code=404, message=e) + except ForbiddenRequest as e: + abort(code=403, message=e) + except (CatalogError, DcaeException) as e: + abort(code=400, message=e) + + return resp, 200 + + +################### +## Data Format ## +################### +@_api.route("/dataformats", endpoint="resource_formats") +class DataFormats(Resource): + """Data Format resource""" + @_api.doc("get_dataformats", description="Get list of Data Formats in the catalog") + @_api.marshal_with(dataformats_get) + @_api.response(200, 'Success, Data Formats retrieved') + @_api.response(500, 'Internal Server Error') + def get(self): + only_latest = False + only_published = False + + mockCat = MockCatalog() + formats = mockCat.list_formats(latest=only_latest, only_published=only_published) + + def format_record_dataformat(obj): + + def format_value(v): + if type(v) == datetime: + return v.isoformat() + else: + return v + + def to_camel_case(snake_str): + components = snake_str.split('_') + # We capitalize the first letter of each component except the first one + # with the 'title' method and join them together. + return components[0] + ''.join(x.title() for x in components[1:]) + + return dict([(to_camel_case(k), format_value(v)) \ + for k,v in obj.items()]) + + formats = [ format_record_dataformat(format) for format in formats ] + + def add_self_url(format): + format["dataFormatUrl"] = fields.Url("resource_format", absolute=True) \ + .output(None, {"dataformat_id": format["id"]}) + return format + + formats = [ add_self_url(format) for format in formats ] + + def add_status(format): + # "whenRevoked" and "whenPublished" are used to get status + format["status"] = util.get_status_string_camel(format) + + return format + + formats = [ add_status(format) for format in formats ] + + return { "dataFormats": formats }, 200 + + + @_api.doc("post_dataformat", description="Add a Data Format to the Catalog", body=dataformat_fields_request) + @_api.marshal_with(dataformat_post) + @_api.response(200, 'Success, Data Format added') + @_api.response(400, 'Bad Request', model=error_message) + @_api.response(409, 'Data Format already exists', model=error_message) + @_api.response(500, 'Internal Server Error') + @_api.expect(dataformat_fields_request) + def post(self): + resp = None + try: + http_body = request.get_json() + user = http_body['owner'] + spec = http_body['spec'] + try: + name = spec['self']['name'] + version = spec['self']['version'] + except Exception: + raise DcaeException("(Data Format) Spec needs to have a 'self' section with 'name' and 'version'") + + mockCat = MockCatalog() + ''' Pass False to do an add vs update ''' + mockCat.add_format(spec, user, False) + + dataformat_id = mockCat.get_dataformat_id(name, version) + dataformatUrl = fields.Url("resource_format", absolute=True) \ + .output(None, {"dataformat_id": dataformat_id}) + + resp = {"dataFormatUrl": dataformatUrl} + + except KeyError as e: + abort(code=400, message="Request field missing: {}".format(e)) + except DuplicateEntry as e: + resp = e.message.replace("name:version", name + ":" + version) + abort(code=409, message=resp) + except (CatalogError, DcaeException) as e: + abort(code=400, message=e) + + return resp, 200 + + +######################### +## Data Format by ID ## +######################### +@_api.route("/dataformats/<string:dataformat_id>", endpoint="resource_format") +class DataFormat(Resource): + @_api.doc("get_dataformat", description="Get a Data Format") + @_api.marshal_with(dataformat_fields_by_id) + @_api.response(200, 'Success, Data Format retrieved') + @_api.response(404, 'Data Format not found in Catalog', model=error_message) + @_api.response(500, 'Internal Server Error') + def get(self, dataformat_id): + resp = None + try: + mockCat = MockCatalog() + format = mockCat.get_dataformat_by_id(dataformat_id) + status = util.get_status_string(format) + + resp = { "id": format["id"] + , "name": format['name'] + , "version": format['version'] + , "whenAdded": format["when_added"].isoformat() + , "modified": format["modified"].isoformat() + , "owner": format["owner"] + , "description": format["description"] + , "spec": json.loads(format["spec"]) + , "dataFormatUrl": fields.Url("resource_format", absolute=True) + .output(None, {"dataformat_id": format["id"]}) + , "status": status + } + + except MissingEntry as e: + abort(code=404, message=e) + + return resp, 200 + + + @_api.doc("put_dataformat", description="Replace a Data Format Spec in the Catalog", body=dataformat_fields_request) + @_api.response(200, 'Success, Data Format added') + @_api.response(400, 'Bad Request', model=error_message) + @_api.response(404, 'Data Format not found in Catalog', model=error_message) + @_api.response(500, 'Internal Server Error') + @_api.expect(dataformat_fields_request) + def put(self, dataformat_id): + resp = None + try: + http_body = request.get_json() + user = http_body['owner'] + spec = http_body['spec'] + mockCat = MockCatalog() + ''' Pass True to do an update vs add ''' + mockCat.add_format(spec, user, True) + + except MissingEntry as e: + abort(code=404, message=e) + except (CatalogError, FrozenEntry, DcaeException) as e: + abort(code=400, message=e) + + return resp, 200 + + + @_api.doc("patch_dataformat", description="Update a Data Format's status in the Catalog", body=patch_fields) + @_api.response(200, 'Success, Data Format status updated') + @_api.response(400, 'Bad Request', model=error_message) + @_api.response(403, 'Forbidden Request', model=error_message) + @_api.response(404, 'Data Format not found in Catalog', model=error_message) + @_api.response(500, 'Internal Server Error') + @_api.expect(patch_fields) + def patch(self, dataformat_id): + resp = None + try: + http_body = request.get_json() + user = http_body['owner'] + field = http_body['status'] + if field not in ['published', 'revoked']: + raise DcaeException("Unknown status in request: '{}'".format(field)) + if field == 'revoked': + raise DcaeException("This status is not supported yet: '{}'".format(field)) + + mockCat = MockCatalog() + dataformat = mockCat.get_dataformat_by_id(dataformat_id) + dataformat_name = dataformat['name'] + dataformat_version = dataformat['version'] + + mockCat.publish_format(user, dataformat_name, dataformat_version) + + except MissingEntry as e: + abort(code=404, message=e) + except ForbiddenRequest as e: + abort(code=403, message=e) + except (CatalogError, DcaeException) as e: + abort(code=400, message=e) + + return resp, 200 + + +def start_http_server(catalog, debug=True): + if debug: + _app.run(debug=True) + else: + _app.run(host="0.0.0.0", port=80, debug=False) diff --git a/mod/onboardingapi/dcae_cli/tests/test_cli.py b/mod/onboardingapi/dcae_cli/tests/test_cli.py new file mode 100644 index 0000000..defa77d --- /dev/null +++ b/mod/onboardingapi/dcae_cli/tests/test_cli.py @@ -0,0 +1,37 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +'''Provides CLI-level tests''' +from click.testing import CliRunner + +from dcae_cli.cli import cli + + + + +if __name__ == '__main__': + '''Manual tests for now''' + import traceback + runner = CliRunner() + result = runner.invoke(cli, "-v component add docker gliderlabs/registrator /Users/trianta/Documents/Repositories/dcae-cli-components/model/int-classification.json".split(), obj=dict()) + print(result.output) + print(result.exit_code) + print(result.exception) diff --git a/mod/onboardingapi/dcae_cli/util/__init__.py b/mod/onboardingapi/dcae_cli/util/__init__.py new file mode 100644 index 0000000..b39de74 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/__init__.py @@ -0,0 +1,120 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides reusable utilites +""" +import os +import json +import sys +import errno +import contextlib +import requests + +import six +import click + +from dcae_cli.util.exc import DcaeException, FileNotFoundError + + +APP_NAME = 'dcae-cli' + + +def get_app_dir(): + '''Returns the absolute directory path for dcae cli aux files''' + return click.get_app_dir(APP_NAME) + + +def makedirs(path, exist_ok=True): + '''Emulates Python 3.2+ os.makedirs functionality''' + try: + os.makedirs(path, exist_ok=exist_ok) + except TypeError: + try: + os.makedirs(path) + except OSError as e: + if e.errno == errno.EEXIST and not exist_ok: + raise + + +def get_pref(path, init_func=None): + '''Returns a general preference dict. Uses `init_func` to create a new one if the file does not exist.''' + try: + with open(path) as file: + pref = json.load(file) + except FileNotFoundError: + pref = init_func() if init_func is not None else dict() + write_pref(pref, path) + return pref + + +def pref_exists(path): + return os.path.isfile(path) + + +def update_pref(path, init_func=None, **kwargs): + '''Sets specified key-value pairs in a preference file and returns an updated dict''' + pref = get_pref(path, init_func) + pref.update(kwargs) + write_pref(pref, path) + + return pref + + +def write_pref(pref, path): + '''Writes a preference json file to disk''' + makedirs(os.path.dirname(path), exist_ok=True) + with open(path, 'w') as file: + json.dump(pref, file) + + +def reraise_with_msg(e, msg=None, cls=None, as_dcae=False): + '''Reraises exception e with an additional message prepended''' + if as_dcae: + cls = DcaeException + traceback = sys.exc_info()[2] + cls = e.__class__ if cls is None else cls + new_msg = "{:}: {:}".format(msg, e) if msg else str(e) + new_e = cls(new_msg) + six.reraise(cls, new_e, traceback) + + +def load_json(path): + '''Helper function which loads a JSON file and returns a dict''' + with open(path) as file: + try: + return json.load(file) + except ValueError: + raise DcaeException("File '{}' appears to be a malformed JSON.".format(path)) + + +def fetch_file_from_web(server_url, path, transform_func=json.loads): + """Fetch file from a web server + + The default behavior is to transform the response to a json. + """ + artifact_url = "{0}/{1}".format(server_url, path) + r = requests.get(artifact_url) + r.raise_for_status() + if transform_func: + return transform_func(r.text) + else: + return r.text diff --git a/mod/onboardingapi/dcae_cli/util/cdap_util.py b/mod/onboardingapi/dcae_cli/util/cdap_util.py new file mode 100644 index 0000000..a38f530 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/cdap_util.py @@ -0,0 +1,206 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides utilities for cdap components +""" +import logging +import json +import requests +import six + +from dcae_cli.util.logger import get_logger +from dcae_cli.util.exc import DcaeException +from dcae_cli.util import discovery + +_logger = get_logger('cdap-utils') +_logger.setLevel(logging.DEBUG) + +#HELPER FUNCTIONS +def _merge_spec_config_into_broker_put(jar, config, spec, params, templated_conf): + """ + The purpose of this function is to form the CDAP Broker PUT from the CDAP compponent jar, spec, config, and params, where: + - jar is a URL + - config is the CDAP "auxillary file" + - spec is the CDAP component specification + - params contains the subkeys "app_config", "app_preferences", "program_preferences" from the parameters config specification + - (this last one isn't REALLY needed because it is a subset of "spec", but some preprocessing has already been done, specifically "normalize_cdap_params" + + The CDAP Broker API existed well before the component spec, so there is overlap with different naming. + In the future, if this component spec becomes production and everyone follows it, + I will change the broker API to use the same keys so that this mapping becomes unneccessary. + However, while this is still a moving project, I am simply going to do a horrible mapping here. + + The CDAP broker PUT looks as follows: + { + "service_component_type" : ..., + "jar_url" : ..., + "artifact_name" : ..., + "artifact_version" : ..., + "app_config" : ..., + "app_preferences" : ..., + "program_preferences": ..., + "programs": ..., + "streamname" : ..., + "namespace" : ..., + "service_endpoints" : ... + } + + "So you cooked up a story and dropped the six of us into a *meat grinder*" - Arnold Schwarzenegger, Predator. + + #RE: Streams/consumes: this is used in the designer for validation but does not lead to anything in the CDAP developers configuration. + """ + + #map services/provides into service_endpoints broker JSON + services = spec["services"]["provides"] # is [] if empty + se = [] + if services != []: + for s in services: + se.append({"service_name" : s["service_name"], "service_endpoint" : s["service_endpoint"], "endpoint_method" : s["verb"]}) + + BrokerPut = { + "cdap_application_type" : "program-flowlet", #TODO! Fix this once Hydrator apps is integrated into this CLI tool. + "service_component_type" : spec["self"]["component_type"], + "jar_url" : jar, + "artifact_version" : config["artifact_version"], + "artifact_name" : config["artifact_name"], + "artifact_version" : config["artifact_version"], + "programs": config["programs"], + "streamname" : config["streamname"], + "services" : se, + } + + Optionals = {v : config[v] for v in [i for i in ["namespace"] if i in config]} + + #not a fan of whatever is going on in update such that I can't return this in single line + BrokerPut.update(Optionals) + BrokerPut.update(params) + + # NOTE: app_config comes from params + BrokerPut["app_config"]["services_calls"] = templated_conf["services_calls"] + BrokerPut["app_config"]["streams_publishes"] = templated_conf["streams_publishes"] + BrokerPut["app_config"]["streams_subscribes"] = templated_conf["streams_subscribes"] + + return BrokerPut + +def _get_broker_url_from_profile(profile): + """ + Gets the broker URL from profile + """ + #Functions named so well you don't need docstrings. (C) tombo 2017 + res = requests.get("http://{0}:8500/v1/catalog/service/{1}".format(profile.consul_host, profile.cdap_broker)).json() + return "http://{ip}:{port}".format(ip=res[0]["ServiceAddress"], port=res[0]["ServicePort"]) + +#PUBLIC +def run_component(catalog, params, instance_name, profile, jar, config, spec, templated_conf): + """ + Runs a CDAP Component + + By the time this function is called, the instance_name and instance_name:rel have already been pushed into consul by this parent function + instance_name will be overwritten by the broker and the rels key will be used by the broker to call the CBS + """ + broker_url = _get_broker_url_from_profile(profile) + + #register with the broker + broker_put = _merge_spec_config_into_broker_put(jar, config, spec, params, templated_conf) + + #helps the component developer debug their spec if CDAP throws a 400 + _logger.info("Your (unbound, bound will be shown if deployment completes) app_config is being sent as") + _logger.info(json.dumps(broker_put["app_config"])) + + _logger.info("Your app_preferences are being sent as") + _logger.info(json.dumps(broker_put["app_preferences"])) + + _logger.info("Your program_preferences are being sent as") + _logger.info(json.dumps(broker_put["program_preferences"])) + + response = requests.put("{brokerurl}/application/{appname}".format(brokerurl=broker_url, appname=instance_name), + json = broker_put, + headers = {'content-type':'application/json'}) + + deploy_success = False + try: + response.raise_for_status() #bomb if not 2xx + deploy_success = True + except: + #need this to raise a dirty status code for tests to work, so not just logging + raise DcaeException("A Deployment Error Occured. Broker Response: {0}, Broker Response Text: {1}".format(response.status_code, response.text)) + + if deploy_success: + #TODO: not sure what this error handling looks like, should never happen that a deploy succeeds but this get fails + #Get the cluster URL to tell the user to go check their application + response = requests.get(broker_url) + response.raise_for_status() #bomb if not 2xx + cdap_cluster = response.json()["managed cdap url"] + + #Fetch the Application's AppConfig to show them what the bound config looks like: + #TODO: This should be an endpoint in the broker. I filed an issue in the broker. For now, do the horrendous special character mapping here. + #TODO: This only fetches AppConfig, add AppPreferences + ns = "default" if "namespace" not in broker_put else broker_put["namespace"] + mapped_appname = ''.join(e for e in instance_name if e.isalnum()) + r = requests.get("{0}/v3/namespaces/{1}/apps/{2}".format(cdap_cluster, ns, mapped_appname)).json() + config = r["configuration"] + + _logger.info("Deployment Complete!") + _logger.info("The CDAP cluster API is at {0}. The *GUI* Port is {1}. You may now go check your application there to confirm it is running correctly.".format(cdap_cluster, response.json()["cdap GUI port"])) + _logger.info("Your instance name is: {0}. In CDAP, this will appear as: {1}".format(instance_name, mapped_appname)) + _logger.info("The bound Configuration for this application is: {0}".format(config)) + + #TODO: Should we tell the user about metrics and healthcheck to try those too? + +def normalize_cdap_params(spec): + """ + The CDAP component specification includes some optional fields that the broker expects. + This parses the specification, includes those fields if those are there, and sets the broker defaults otherwise + """ + Params = {} + p = spec["parameters"] + #app preferences + Params["app_preferences"] = {} if "app_preferences" not in p else {param["name"] : param["value"] for param in p["app_preferences"]} + #app config + Params["app_config"] = {} if "app_config" not in p else {param["name"] : param["value"] for param in p["app_config"]} + #program preferences + if "program_preferences" not in p: + Params["program_preferences"] = [] + else: + Params["program_preferences"] = [] + for tup in p["program_preferences"]: + Params["program_preferences"].append({"program_id" : tup["program_id"], + "program_type" : tup["program_type"], + "program_pref" : {param["name"] : param["value"] for param in tup["program_pref"]}}) + return Params + +def undeploy_component(profile, instance_name): + """ + Undeploys a CDAP Component, which in CDAP terms means stop and delete + """ + broker_url = _get_broker_url_from_profile(profile) + + #call the delete + response = requests.delete("{brokerurl}/application/{appname}".format(brokerurl=broker_url, appname=instance_name)) + try: + response.raise_for_status() #bomb if not 2xx + _logger.info("Undeploy complete.") + return True + except Exception as e: + _logger.error("An undeploy Error Occured: {2}. Broker Response: {0}, Broker Response Text: {1}".format(response.status_code, response.text, e)) + return False + diff --git a/mod/onboardingapi/dcae_cli/util/config.py b/mod/onboardingapi/dcae_cli/util/config.py new file mode 100644 index 0000000..f9936c3 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/config.py @@ -0,0 +1,156 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides dcae cli config utilities +""" +import os, re + +import click +import six + +from dcae_cli import util +from dcae_cli import _version +from dcae_cli.util import get_app_dir, get_pref, update_pref, write_pref, pref_exists + + +class ConfigurationInitError(RuntimeError): + pass + +def get_config_path(): + '''Returns the absolute configuration file path''' + return os.path.join(get_app_dir(), 'config.json') + + +def _init_config_user(): + while True: + user = click.prompt('Please enter your user id', type=str).strip() + + # There should be no special characters + if re.match("(?:\w*)\Z", user): + return user + else: + click.echo("Invalid user id. Please try again.") + +def _init_config_server_url(): + return click.prompt("Please enter the remote server url", type=str).strip() + +def _init_config_db_url(): + click.echo("Now we need to set up access to the onboarding catalog") + hostname = click.prompt("Please enter the onboarding catalog hostname").strip() + user = click.prompt("Please enter the onboarding catalog user").strip() + password = click.prompt("Please enter the onboarding catalog password").strip() + return "postgresql://{user}:{password}@{hostname}:5432/dcae_onboarding_db".format( + hostname=hostname, user=user, password=password) + +def _init_config(): + '''Returns an initial dict for populating the config''' + # Grab the remote config and merge it in + new_config = {} + + try: + server_url = _init_config_server_url() + new_config = util.fetch_file_from_web(server_url, "/dcae-cli/config.json") + except: + # Failing to pull seed configuration from remote server is not considered + # a problem. Just continue and give user the option to set it up + # themselves. + if not click.confirm("Could not download initial configuration from remote server. Attempt manually setting up?"): + raise ConfigurationInitError("Could not setup dcae-cli configuration") + + # UPDATE: Keeping the server url even though the config was not found there. + new_config["server_url"] = server_url + new_config["user"] = _init_config_user() + new_config["cli_version"] = _version.__version__ + + if "db_url" not in new_config or not new_config["db_url"]: + # The seed configuration was not provided so manually set up the db + # connection + new_config["db_url"] = _init_config_db_url() + + if "active_profile" not in new_config: + # The seed configuration was not provided which means the profiles will + # be the same. The profile will be hardcoded to a an empty default. + new_config["active_profile"] = "default" + + return new_config + + +def should_force_reinit(config): + """Configs older than 2.0.0 should be replaced""" + ver = config.get("cli_version", "0.0.0") + return int(ver.split(".")[0]) < 2 + +def get_config(): + '''Returns the configuration dictionary''' + return get_pref(get_config_path(), _init_config) + +def get_server_url(): + """Returns the remote server url + + The remote server holds the artifacts that the dcae-cli requires like the + seed config json and seed profiles json, and json schemas. + """ + return get_config().get("server_url") + +def get_docker_logins_key(): + """Returns the Consul key that Docker logins are stored under + + Default is "docker_plugin/docker_logins" which matches up with the docker + plugin default. + """ + return get_config().get("docker_logins_key", "docker_plugin/docker_logins") + +# These functions are used to fetch the configurable path to the various json +# schema files used in validation. + +def get_path_component_spec(): + return get_config().get("path_component_spec", + "/schemas/component-specification/dcae-cli-v2/component-spec-schema.json") + +def get_path_data_format(): + return get_config().get("path_data_format", + "/schemas/data-format/dcae-cli-v1/data-format-schema.json") + +def get_active_profile(): + return get_config().get("active_profile", None) + + +def update_config(**kwargs): + '''Updates and returns the configuration dictionary''' + return update_pref(path=get_config_path(), init_func=get_config, **kwargs) + + +def _reinit_config(init_func): + new_config = init_func() + config_path = get_config_path() + + if pref_exists(config_path): + existing_config = get_config() + # Make sure to clobber existing values and not other way + existing_config.update(new_config) + new_config = existing_config + + write_pref(new_config, config_path) + return new_config + +def reinit_config(): + return _reinit_config(_init_config) diff --git a/mod/onboardingapi/dcae_cli/util/discovery.py b/mod/onboardingapi/dcae_cli/util/discovery.py new file mode 100644 index 0000000..e8d2ff8 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/discovery.py @@ -0,0 +1,777 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides Consul helper functions +""" +import re +import json +import contextlib +from collections import defaultdict +from itertools import chain +from functools import partial +from datetime import datetime +from uuid import uuid4 + +import six +from copy import deepcopy +from consul import Consul + +from dcae_cli.util.logger import get_logger +from dcae_cli.util.exc import DcaeException +from dcae_cli.util.profiles import get_profile +from dcae_cli.util.config import get_docker_logins_key + +import os +import click + +logger = get_logger('Discovery') + +# NOTE: Removed the suffix completely. The useful piece of the suffix was the +# location but it was implemented in a static fashion (hardcoded). Rather than +# enhancing the existing approach and making the suffix dynamic (to support +# "rework-central" and "solutioning"), the thinking is to revisit this name stuff +# and use Consul's query interface so that location is a tag attribute. +_inst_re = re.compile(r"^(?P<user>[^.]*).(?P<hash>[^.]*).(?P<ver>\d+-\d+-\d+).(?P<comp>.*)$") + + +class DiscoveryError(DcaeException): + pass + +class DiscoveryNoDownstreamComponentError(DiscoveryError): + pass + + +def default_consul_host(): + """Return default consul host + + This method was created to purposefully make fetching the default lazier than + the previous impl. The previous impl had the default as a global variable and + thus requiring the configuration to be setup before doing anything further. + The pain point of that impl is in unit testing where now all code that + imported this module had a strict dependency upon the impure configuration. + """ + return get_profile().consul_host + + +def _choose_consul_host(consul_host): + """Chooses the appropriate consul host + + Chooses between a provided value and a default + """ + return default_consul_host() if consul_host == None else consul_host + + +def replace_dots(comp_name, reverse=False): + '''Converts dots to dashes to prevent downstream users of Consul from exploding''' + if not reverse: + return comp_name.replace('.', '-') + else: + return comp_name.replace('-', '.') + +# Utility functions for using Consul + +def _is_healthy_pure(get_health_func, instance): + """Checks to see if a component instance is running healthy + + Pure function edition + + Args + ---- + get_health_func: func(string) -> complex object + Look at unittests in test_discovery to see examples + instance: (string) fully qualified name of component instance + + Returns + ------- + True if instance has been found and is healthy else False + """ + index, resp = get_health_func(instance) + + if resp: + def is_passing(instance): + return all([check["Status"] == "passing" for check in instance["Checks"]]) + return any([is_passing(instance) for instance in resp]) + else: + return False + +def is_healthy(consul_host, instance): + """Checks to see if a component instance is running healthy + + Impure function edition + + Args + ---- + consul_host: (string) host string of Consul + instance: (string) fully qualified name of component instance + + Returns + ------- + True if instance has been found and is healthy else False + """ + cons = Consul(consul_host) + return _is_healthy_pure(cons.health.service, instance) + +def _get_instances_from_kv(get_from_kv_func, user): + """Get component instances from kv store + + Deployed component instances get entries in a kv store to store configuration + information. This is a way to source a list of component instances that were + attempted to run. A component could have deployed but failed to register itself. + The only trace of that deployment would be checking the kv store. + + Args + ---- + get_from_kv_func: func(string, boolean) -> (don't care, list of dicts) + Look at unittests in test_discovery to see examples + user: (string) user id + + Returns + ------- + List of unique component instance names + """ + # Keys from KV contain rels key entries and non-rels key entries. Keep the + # rels key entries but remove the ":rel" suffix because we are paranoid that + # this could exist without the other + _, instances_kv = get_from_kv_func(user, recurse=True) + return [] if instances_kv is None \ + else list(set([ dd["Key"].replace(":rel", "") for dd in instances_kv ])) + +def _get_instances_from_catalog(get_from_catalog_func, user): + """Get component instances from catalog + + Fetching instances from the catalog covers the deployment cases where + components registered successfully regardless of their health check status. + + Args + ---- + get_from_catalog_func: func() -> (don't care, dict) + Look at unittests in test_discovery to see examples + user: (string) user id + + Returns + ------- + List of unique component instance names + """ + # Get all services and filter here by user + response = get_from_catalog_func() + return list(set([ instance for instance in response[1].keys() if user in instance ])) + +def _merge_instances(user, *get_funcs): + """Merge the result of an arbitrary list of get instance function calls + + Args + ---- + user: (string) user id + get_funcs: func(string) -> list of strings + Functions that take in a user parameter to output a list of instance + names + + Returns + ------- + List of unique component instance names + """ + return list(set(chain.from_iterable([ get_func(user) for get_func in get_funcs ]))) + +def _get_instances(consul_host, user): + """Get all deployed component instances for a given user + + Sourced from multiple places to ensure we get a complete list of all + component instances no matter what state they are in. + + Args + ---- + consul_host: (string) host string of Consul + user: (string) user id + + Returns + ------- + List of unique component instance names + """ + cons = Consul(consul_host) + + get_instances_from_kv = partial(_get_instances_from_kv, cons.kv.get) + get_instances_from_catalog = partial(_get_instances_from_catalog, cons.catalog.services) + + return _merge_instances(user, get_instances_from_kv, get_instances_from_catalog) + + +# Custom (sometimes higher order) "discovery" functionality + +def _make_instances_map(instances): + """Make an instance map + + Instance map is a dict where the keys are tuples (component type, component version) + that map to a set of strings that are instance names. + """ + mapping = defaultdict(set) + for instance in instances: + match = _inst_re.match(instance) + if match is None: + continue + + _, _, ver, comp = match.groups() + cname = replace_dots(comp, reverse=True) + version = replace_dots(ver, reverse=True) + key = (cname, version) + mapping[key].add(instance) + return mapping + + +def get_user_instances(user, consul_host=None, filter_instances_func=is_healthy): + '''Get a user's instance map + + Args: + ----- + filter_instances_func: fn(consul_host, instance) -> boolean + Function used to filter instances. Default is is_healthy + + Returns: + -------- + Dict whose keys are component (name,version) tuples and values are list of component instance names + ''' + consul_host = _choose_consul_host(consul_host) + filter_func = partial(filter_instances_func, consul_host) + instances = list(filter(filter_func, _get_instances(consul_host, user))) + + return _make_instances_map(instances) + + +def _get_component_instances(filter_instances_func, user, cname, cver, consul_host): + """Get component instances that are filtered + + Args: + ----- + filter_instances_func: fn(consul_host, instance) -> boolean + Function used to filter instances + + Returns + ------- + List of strings where the strings are fully qualified instance names + """ + instance_map = get_user_instances(user, consul_host=consul_host, + filter_instances_func=filter_instances_func) + + # REVIEW: We don't restrict component names from using dashes. We do + # transform names with dots to use dashes for domain segmenting reasons. + # Instance map creation always reverses that making dashes to dots even though + # the component name may have dashes. Thus always search for instances by + # a dotted component name. We are open to a collision but that is low chance + # - someone has to use the same name in dotted and dashed form which is weird. + cname_dashless = replace_dots(cname, reverse=True) + + # WATCH: instances_map.get returns set. Force to be list to have consistent + # return + return list(instance_map.get((cname_dashless, cver), [])) + +def get_healthy_instances(user, cname, cver, consul_host=None): + """Lists healthy instances of a particular component for a given user + + Returns + ------- + List of strings where the strings are fully qualified instance names + """ + consul_host = _choose_consul_host(consul_host) + return _get_component_instances(is_healthy, user, cname, cver, consul_host) + +def get_defective_instances(user, cname, cver, consul_host=None): + """Lists *not* running instances of a particular component for a given user + + This means that there are component instances that are sitting out there + deployed but not successfully running. + + Returns + ------- + List of strings where the strings are fully qualified instance names + """ + def is_not_healthy(consul_host, component): + return not is_healthy(consul_host, component) + + consul_host = _choose_consul_host(consul_host) + return _get_component_instances(is_not_healthy, user, cname, cver, consul_host) + + +def lookup_instance(consul_host, name): + """Query Consul for service details""" + cons = Consul(consul_host) + index, results = cons.catalog.service(name) + return results + +def parse_instance_lookup(results): + """Parse the resultset from lookup_instance + + Returns: + -------- + String in host form <address>:<port> + """ + if results: + # Just grab first + result = results[0] + return "{address}:{port}".format(address=result["ServiceAddress"], + port=result["ServicePort"]) + else: + return + + +def _create_rels_key(config_key): + """Create rels key from config key + + Assumes config_key is well-formed""" + return "{:}:rel".format(config_key) + + +def _create_dmaap_key(config_key): + """Create dmaap key from config key + + Assumes config_key is well-formed""" + return "{:}:dmaap".format(config_key) + + +def _create_policies_key(config_key): + """Create policies key from config key + + Assumes config_key is well-formed""" + return "{:}:policies/".format(config_key) + +def clear_user_instances(user, host=None): + '''Removes all Consul key:value entries for a given user''' + host = _choose_consul_host(host) + cons = Consul(host) + cons.kv.delete(user, recurse=True) + + +_multiple_compat_msg = '''Component '{cname}' config_key '{ckey}' has multiple compatible downstream \ +components: {compat}. The current infrastructure can only support interacing with a single component. \ +Only downstream component '{chosen}' will be connected.''' + +_no_compat_msg = "Component '{cname}' config_key '{ckey}' has no compatible downstream components." + +_no_inst_msg = '''Component '{cname}' config_key '{ckey}' is compatible with downstream component '{chosen}' \ +however there are no instances available for connecting.''' + + +def _cfmt(*args): + '''Returns a string formatted representation for a component and version''' + if len(args) == 1: + return ':'.join(args[0]) + elif len(args) == 2: + return ':'.join(args) + else: + raise DiscoveryError('Input should be name, version or (name, version)') + + +def _get_downstream(cname, cver, config_key, compat_comps, instance_map, + force=False): + ''' + Returns a component type and its instances to use for a given config key + + Parameters + ---------- + cname : string + Name of the upstream component + cver : string + Version of the upstream component + config_key : string + Mainly used for populating warnings meaningfully + compat_comps : dict + A list of component (name, version) tuples + instance_map : dict + A dict whose keys are component (name, version) tuples and values are a list of instance names + ''' + if not compat_comps: + conn_comp = ('', '') + logger.warning(_no_compat_msg.format(cname=_cfmt(cname, cver), ckey=config_key)) + else: + conn_comp = six.next(iter(compat_comps)) + if len(compat_comps) > 1: + logger.warning(_multiple_compat_msg.format(cname=_cfmt(cname, cver), ckey=config_key, + compat=list(map(_cfmt, compat_comps)), chosen=_cfmt(conn_comp))) + if all(conn_comp): + instances = instance_map.get(conn_comp, tuple()) + if not instances: + if force: + logger.warning(_no_inst_msg.format(cname=_cfmt(cname, cver), \ + ckey=config_key, chosen=_cfmt(conn_comp))) + else: + logger.error(_no_inst_msg.format(cname=_cfmt(cname, cver), \ + ckey=config_key, chosen=_cfmt(conn_comp))) + raise DiscoveryNoDownstreamComponentError("No compatible downstream component found.") + else: + instances = tuple() + + return conn_comp, instances + + +def create_config(user, cname, cver, params, interface_map, instance_map, dmaap_map, + instance_prefix=None, force=False): + ''' + Creates a config and corresponding rels entries in Consul. Returns the Consul the keys and entries. + + Parameters + ---------- + user : string + The user namespace to create the config and rels under. E.g. user.foo.bar... + cname : string + Name of the upstream component + cver : string + Version of the upstream component + params : dict + Parameters of the component, taken directly from the component specification + interface_map : dict + A dict mapping the config_key of published streams and/or called services to a list of compatible + component types and versions + instance_map : dict + A dict mapping component types and versions to a list of instances currently running + dmaap_map : dict + A dict that contains config key to dmaap information. This map is checked + first before checking the instance_map which means before checking for + direct http components. + instance_prefix : string, optional + The unique prefix to associate with the component instance whose config is being created + force: string, optional + Config will continue to be created even if there are no downstream compatible + component when this flag is set to True. Default is False. + ''' + inst_pref = str(uuid4()) if instance_prefix is None else instance_prefix + conf_key = "{:}.{:}.{:}.{:}".format(user, inst_pref, replace_dots(cver), replace_dots(cname)) + rels_key = _create_rels_key(conf_key) + dmaap_key = _create_dmaap_key(conf_key) + + conf = params.copy() + rels = list() + + # NOTE: The dmaap_map entries are broken up between the templetized config + # and the dmaap json in Consul + for config_key, dmaap_goodies in six.iteritems(dmaap_map): + conf[config_key] = deepcopy(dmaap_map[config_key]) + # Here comes the magic. << >> signifies dmaap to downstream config + # binding service. + conf[config_key]["dmaap_info"] = "<<{:}>>".format(config_key) + + # NOTE: The interface_map may not contain *all* possible interfaces + # that may be connected with because the catalog.get_discovery call filters + # based upon neighbors. Essentailly the interface_map is being pre-filtered + # which is probably a latent bug. + + for config_key, compat_types in six.iteritems(interface_map): + # Don't clobber config keys that have been set from above + if config_key not in conf: + conn_comp, instances = _get_downstream(cname, cver, config_key, \ + compat_types, instance_map, force=force) + conn_name, conn_ver = conn_comp + middle = '' + + if conn_name and conn_ver: + middle = "{:}.{:}".format(replace_dots(conn_ver), replace_dots(conn_name)) + else: + if not force: + raise DiscoveryNoDownstreamComponentError("No compatible downstream component found.") + + config_val = '{{' + middle + '}}' + conf[config_key] = config_val + rels.extend(instances) + + dmaap_map_just_info = { config_key: v["dmaap_info"] + for config_key, v in six.iteritems(dmaap_map) } + return conf_key, conf, rels_key, rels, dmaap_key, dmaap_map_just_info + + +def get_docker_logins(host=None): + """Get Docker logins from Consul + + Returns + ------- + List of objects where the objects must be of the form + {"registry": .., "username":.., "password":.. } + """ + key = get_docker_logins_key() + host = _choose_consul_host(host) + (index, val) = Consul(host).kv.get(key) + + if val: + return json.loads(val['Value'].decode("utf-8")) + else: + return [] + + +def push_config(conf_key, conf, rels_key, rels, dmaap_key, dmaap_map, host=None): + '''Uploads the config and rels to Consul''' + host = _choose_consul_host(host) + cons = Consul(host) + for k, v in ((conf_key, conf), (rels_key, rels), (dmaap_key, dmaap_map)): + cons.kv.put(k, json.dumps(v)) + + logger.info("* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *") + logger.info("* If you run a 'component reconfig' command, you must first execute the following") + logger.info("* export SERVICE_NAME={:}".format(conf_key)) + logger.info("* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *") + + +def remove_config(config_key, host=None): + """Deletes a config from Consul + + Returns + ------- + True when all artifacts have been successfully deleted else False + """ + host = _choose_consul_host(host) + cons = Consul(host) + # "recurse=True" deletes the SERVICE_NAME KV and all other KVs with suffixes (:rel, :dmaap, :policies) + results = cons.kv.delete(config_key, recurse=True) + + return results + + +def _group_config(config, config_key_map): + """Groups config by streams_publishes, streams_subscribes, services_calls""" + # Copy non streams and services first + grouped_conf = { k: v for k,v in six.iteritems(config) + if k not in config_key_map } + + def group(group_name): + grouped_conf[group_name] = { k: v for k,v in six.iteritems(config) + if k in config_key_map and config_key_map[k]["group"] == group_name } + + # Copy and group the streams and services + # Map returns iterator so must force running its course + list(map(group, ["streams_publishes", "streams_subscribes", "services_calls"])) + return grouped_conf + + +def _apply_inputs(config, inputs_map): + """Update configuration with inputs + + This method updates the values of the configuration parameters using values + from the inputs map. + """ + config.update(inputs_map) + return config + + +@contextlib.contextmanager +def config_context(user, cname, cver, params, interface_map, instance_map, + config_key_map, dmaap_map={}, inputs_map={}, instance_prefix=None, + host=None, always_cleanup=True, force_config=False): + '''Convenience utility for creating configs and cleaning them up + + Args + ---- + always_cleanup: (boolean) + This context manager will cleanup the produced config + context always if this is True. When False, cleanup will only occur upon any + exception getting thrown in the context manager block. Default is True. + force: (boolean) + Config will continue to be created even if there are no downstream compatible + component when this flag is set to True. Default is False. + ''' + host = _choose_consul_host(host) + + try: + conf_key, conf, rels_key, rels, dmaap_key, dmaap_map = create_config( + user, cname, cver, params, interface_map, instance_map, dmaap_map, + instance_prefix, force=force_config) + + conf = _apply_inputs(conf, inputs_map) + conf = _group_config(conf, config_key_map) + + push_config(conf_key, conf, rels_key, rels, dmaap_key, dmaap_map, host) + yield (conf_key, conf) + except Exception as e: + if not always_cleanup: + try: + conf_key, rels_key, host + except UnboundLocalError: + pass + else: + remove_config(conf_key, host) + + raise e + finally: + if always_cleanup: + try: + conf_key, rels_key, host + except UnboundLocalError: + pass + else: + remove_config(conf_key, host) + + +def policy_update(policy_change_file, consul_host): + + # Determine if it is an 'updated_policies' or 'removed_policies' change, or if user included ALL policies + policies = True if "policies" in policy_change_file.keys() else False + updated = True if "updated_policies" in policy_change_file.keys() else False + removed = True if "removed_policies" in policy_change_file.keys() else False + + cons = Consul(consul_host) + service_name = os.environ["SERVICE_NAME"] + policy_folder = service_name + ":policies/items/" + event_folder = service_name + ":policies/event" + + if policies: + # User specified ALL "policies" in the Policy File. Ignore "updated_policies"/"removed_policies" + logger.warning("The 'policies' specified in the 'policy-file' will replace all policies in Consul.") + allPolicies = policy_change_file['policies'] + if not update_all_policies(cons, policy_folder, allPolicies): + return False + + else: + # If 'removed_policies', delete the Policy from the Component KV pair + if removed: + policyDeletes = policy_change_file['removed_policies'] + if not remove_policies(cons, policy_folder, policyDeletes): + return False + + # If 'updated_policies', update the Component KV pair + if updated: + policyUpdates = policy_change_file['updated_policies'] + if not update_specified_policies(cons, policy_folder, policyUpdates): + return False + + return create_policy_event(cons, event_folder, policy_folder) + + +def create_policy_event(cons, event_folder, policy_folder): + """ Create a Policy 'event' KV pair in Consol """ + + timestamp = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ") + update_id = str(uuid4()) + policies = cons.kv.get(policy_folder, recurse=True) + policies_count = str(policies).count("'Key':") + + event = '{"action": "gathered", "timestamp": "' + timestamp + '", "update_id": "' + update_id + '", "policies_count": ' + str(policies_count) + '}' + if not cons.kv.put(event_folder, event): + logger.error("Policy 'Event' creation of ({:}) in Consul failed".format(event_folder)) + return False + + return True + + +def update_all_policies(cons, policy_folder, allPolicies): + """ Delete all policies from Consul, then add the policies the user specified in the 'policies' section of the policy-file """ + + if not cons.kv.delete(policy_folder, recurse=True): # Deletes all Policies under the /policies/items folder + logger.error("Policy delete of ({:}) in Consul failed".format(policy_folder)) + return False + + if not update_specified_policies(cons, policy_folder, allPolicies): + return False + + return True + +def update_specified_policies(cons, policy_folder, policyUpdates): + """ Replace the policies the user specified in the 'updated_policies' (or 'policies') section of the policy-file """ + + for policy in policyUpdates: + policy_folder_id = extract_policy_id(policy_folder, policy) + if policy_folder_id: + policyBody = json.dumps(policy) + if not cons.kv.put(policy_folder_id, policyBody): + logger.error("Policy update of ({:}) in Consul failed".format(policy_folder_id)) + return False + else: + return False + + return True + + +def remove_policies(cons, policy_folder, policyDeletes): + """ Delete the policies that the user specified in the 'removed_policies' section of the policy-file """ + + for policy in policyDeletes: + policy_folder_id = extract_policy_id(policy_folder, policy) + if policy_folder_id: + if not cons.kv.delete(policy_folder_id): + logger.error("Policy delete of ({:}) in Consul failed".format(policy_folder_id)) + return False + else: + return False + + return True + +def extract_policy_id(policy_folder, policy): + """ Extract the Policy ID from the policyName. + Return the Consul key (Policy Folder with Policy ID) """ + + policyId_re = re.compile(r"(.*)\.\d+\.[a-zA-Z]+$") + + policyName = policy['policyName'] # Extract the policy Id "Consul Key" from the policy name + match = policyId_re.match(policyName) + + if match: + policy_id = match.group(1) + policy_folder_id = policy_folder + policy_id + + return policy_folder_id + else: + logger.error("policyName ({:}) needs to end in '.#.xml' in order to extract the Policy ID".format(policyName)) + return + + +def build_policy_command(policy_reconfig_path, policy_change_file, consul_host): + """ Build command to execute the Policy Reconfig script in the Docker container """ + + # Determine if it is an 'updated_policies' and/or 'removed_policies' change, or if user included ALL policies + all_policies = True if "policies" in policy_change_file.keys() else False + updated = True if "updated_policies" in policy_change_file.keys() else False + removed = True if "removed_policies" in policy_change_file.keys() else False + + # Create the Reconfig Script command (3 parts: Command and 2 ARGs) + command = [] + command.append(policy_reconfig_path) + command.append("policies") + + # Create a Dictionary of 'updated', 'removed', and 'ALL' policies + + # 'updated' policies - policies come from the --policy-file + if updated: + updated_policies = policy_change_file['updated_policies'] + else: updated_policies = [] + + policies = {} + policies["updated_policies"] = updated_policies + + # 'removed' policies - policies come from the --policy-file + if removed: + removed_policies = policy_change_file['removed_policies'] + else: removed_policies = [] + + policies["removed_policies"] = removed_policies + + # ALL 'policies' - policies come from Consul + cons = Consul(consul_host) + service_name = os.environ["SERVICE_NAME"] + policy_folder = service_name + ":policies/items/" + + id, consul_policies = cons.kv.get(policy_folder, recurse=True) + + policy_values = [] + if consul_policies: + for policy in consul_policies: + policy_value = json.loads(policy['Value']) + policy_values.append(policy_value) + + policies["policies"] = policy_values + + # Add the policies to the Docker "command" as a JSON string + command.append(json.dumps(policies)) + + return command diff --git a/mod/onboardingapi/dcae_cli/util/dmaap.py b/mod/onboardingapi/dcae_cli/util/dmaap.py new file mode 100644 index 0000000..138e909 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/dmaap.py @@ -0,0 +1,358 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +""" +Functions for DMaaP integration +""" +import six +import logging +from jsonschema import validate, ValidationError +from dcae_cli.util import reraise_with_msg +from dcae_cli.util.logger import get_logger +from dcae_cli.catalog.mock.schema import apply_defaults + + +logger = get_logger('Dmaap') + +_SCHEMA = { + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Schema for dmaap inputs", + "type": "object", + "oneOf": [ + { "$ref": "#/definitions/message_router" }, + { "$ref": "#/definitions/data_router_publisher" }, + { "$ref": "#/definitions/data_router_subscriber" } + ], + "definitions": { + "message_router": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": ["message_router"] + }, + "aaf_username": { + "type": "string", + "default": None + }, + "aaf_password": { + "type": "string", + "default": None + }, + "dmaap_info": { + "type": "object", + "properties": { + "client_role": { + "type": "string", + "default": None + }, + "client_id": { + "type": "string", + "default": None + }, + "location": { + "type": "string", + "default": None + }, + "topic_url": { + "type": "string" + } + }, + "required": [ + "topic_url" + ], + "additionalProperties": False + } + }, + "required": [ + "type", + "dmaap_info" + ], + "additionalProperties": False + }, + "data_router_publisher": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": ["data_router"] + }, + "dmaap_info": { + "type": "object", + "properties": { + "location": { + "type": "string", + "default": None, + "description": "the DCAE location for the publisher, used to set up routing" + }, + "publish_url": { + "type": "string", + "description": "the URL to which the publisher makes Data Router publish requests" + }, + "log_url": { + "type": "string", + "default": None, + "description": "the URL from which log data for the feed can be obtained" + }, + "username": { + "type": "string", + "default": None, + "description": "the username the publisher uses to authenticate to Data Router" + }, + "password": { + "type": "string", + "default": None, + "description": "the password the publisher uses to authenticate to Data Router" + }, + "publisher_id": { + "type": "string", + "default": "" + } + }, + "required": [ + "publish_url" + ], + "additionalProperties": False + } + }, + "required": [ + "type", + "dmaap_info" + ], + "additionalProperties": False + }, + "data_router_subscriber": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": ["data_router"] + }, + "dmaap_info": { + "type": "object", + "properties": { + "location": { + "type": "string", + "default": None, + "description": "the DCAE location for the publisher, used to set up routing" + }, + "delivery_url": { + "type": "string", + "description": "the URL to which the Data Router should deliver files" + }, + "username": { + "type": "string", + "default": None, + "description": "the username Data Router uses to authenticate to the subscriber when delivering files" + }, + "password": { + "type": "string", + "default": None, + "description": "the username Data Router uses to authenticate to the subscriber when delivering file" + }, + "subscriber_id": { + "type": "string", + "default": "" + } + }, + "additionalProperties": False + } + }, + "required": [ + "type", + "dmaap_info" + ], + "additionalProperties": False + } + } +} + + +_validation_msg = """ +Is your DMaaP client object a valid json? +Does your DMaaP client object follow this format? + +Message router: + + { + "aaf_username": <string, optional>, + "aaf_password": <string, optional>, + "type": "message_router", + "dmaap_info": { + "client_role": <string, optional>, + "client_id": <string, optional>, + "location": <string, optional>, + "topic_url": <string, required> + } + } + +Data router (publisher): + + { + "type": "data_router", + "dmaap_info": { + "location": <string, optional>, + "publish_url": <string, required>, + "log_url": <string, optional>, + "username": <string, optional>, + "password": <string, optional>, + "publisher_id": <string, optional> + } + } + +Data router (subscriber): + + { + "type": "data_router", + "dmaap_info": { + "location": <string, optional>, + "delivery_url": <string, optional>, + "username": <string, optional>, + "password": <string, optional>, + "subscriber_id": <string, optional> + } + } + +""" + +def validate_dmaap_map_schema(dmaap_map): + """Validate the dmaap map schema""" + for k, v in six.iteritems(dmaap_map): + try: + validate(v, _SCHEMA) + except ValidationError as e: + logger.error("DMaaP validation issue with \"{k}\"".format(k=k)) + logger.error(_validation_msg) + reraise_with_msg(e, as_dcae=True) + + +class DMaaPValidationError(RuntimeError): + pass + +def _find_matching_definition(instance): + """Find and return matching definition given an instance""" + for subsection in ["message_router", "data_router_publisher", + "data_router_subscriber"]: + try: + validate(instance, _SCHEMA["definitions"][subsection]) + return _SCHEMA["definitions"][subsection] + except ValidationError: + pass + + # You should never get here but just in case.. + logger.error("No matching definition: {0}".format(instance)) + raise DMaaPValidationError("No matching definition") + +def apply_defaults_dmaap_map(dmaap_map): + """Apply the defaults to the dmaap map""" + def grab_properties(instance): + return _find_matching_definition(instance)["properties"] + + return { k: apply_defaults(grab_properties(v), v) for k,v in + six.iteritems(dmaap_map) } + + +def validate_dmaap_map_entries(dmaap_map, mr_config_keys, dr_config_keys): + """Validate dmaap map entries + + Validate dmaap map to make sure all config keys are there and that there's + no additional config keys beceause this map is used in generating the + configuration json. + + Returns: + -------- + True when dmaap_map is ok and False when it is not + """ + # Catch when there is no dmaap_map when there should be + if len(mr_config_keys) + len(dr_config_keys) > 0 and len(dmaap_map) == 0: + logger.error("You have dmaap streams defined in your specification") + logger.error("You must provide a dmaap json to resolve those dmaap streams.") + logger.error("Please use the \"--dmaap-file\" option") + return False + + config_keys = dr_config_keys + mr_config_keys + # Look for missing keys + is_missing = lambda config_key: config_key not in dmaap_map + missing_keys = list(filter(is_missing, config_keys)) + + if missing_keys: + logger.error("Missing config keys in dmaap json: {0}".format( + ",".join(missing_keys))) + logger.error("Re-edit your dmaap json") + return False + + # Look for unexpected keys + is_unexpected = lambda config_key: config_key not in config_keys + unexpected_keys = list(filter(is_unexpected, dmaap_map.keys())) + + if unexpected_keys: + # NOTE: Changed this to a non-error in order to support the feature of + # developer having a master dmaap map + logger.warn("Unexpected config keys in dmaap json: {0}".format( + ",".join(unexpected_keys))) + return True + + return True + + +def update_delivery_urls(get_route_func, target_base_url, dmaap_map): + """Update delivery urls for dmaap map + + This method picks out all the data router connections for subscribers and + updates the delivery urls with the supplied base target url concatentated + with the user specified route (or path). + + Args: + ----- + get_route_func (func): Function that takes a config_key and returns the route + used for the data router subscriber + target_base_url (string): "{http|https}://<hostname>:<port>" + dmaap_map (dict): DMaaP map is map of inputs that is config_key to provisioned + data router feed or message router topic connection details + + Returns: + -------- + Returns the updated DMaaP map + """ + def update_delivery_url(config_key, dm): + route = get_route_func(config_key) + dm["dmaap_info"]["delivery_url"] = "{base}{tween}{path}".format(base=target_base_url, + path=route, tween="" if route[0] == "/" else "/") + return dm + + def is_dr_subscriber(dm): + return dm["type"] == "data_router" and "publish_url" not in dm["dmaap_info"] + + updated_map = { config_key: update_delivery_url(config_key, dm) + for config_key, dm in six.iteritems(dmaap_map) if is_dr_subscriber(dm) } + dmaap_map.update(updated_map) + + return dmaap_map + + +def list_delivery_urls(dmaap_map): + """List delivery urls + + Returns: + -------- + List of tuples (config_key, deliery_url) + """ + return [(config_key, dm["dmaap_info"]["delivery_url"]) \ + for config_key, dm in six.iteritems(dmaap_map) if "delivery_url" in dm["dmaap_info"]] diff --git a/mod/onboardingapi/dcae_cli/util/docker_util.py b/mod/onboardingapi/dcae_cli/util/docker_util.py new file mode 100644 index 0000000..90a6811 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/docker_util.py @@ -0,0 +1,226 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides utilities for Docker components +""" +import socket +from sys import platform + +import docker +import six + +import dockering as doc +from dcae_cli.util.logger import get_logger +from dcae_cli.util.exc import DcaeException + +dlog = get_logger('Docker') + +_reg_img = 'gliderlabs/registrator:latest' +# TODO: Source this from app's configuration [ONAP URL TBD] +_reg_cmd = '-ip {:} consul://make-me-valid:8500' + +class DockerError(DcaeException): + pass + +class DockerConstructionError(DcaeException): + pass + + +# Functions to provide envs to pass into Docker containers + +def _convert_profile_to_docker_envs(profile): + """Convert a profile object to Docker environment variables + + Parameters + ---------- + profile: namedtuple + + Returns + ------- + dict of environemnt variables to be used by docker-py + """ + profile = profile._asdict() + return dict([(key.upper(), value) for key, value in six.iteritems(profile)]) + + +def build_envs(profile, docker_config, instance_name): + profile_envs = _convert_profile_to_docker_envs(profile) + health_envs = doc.create_envs_healthcheck(docker_config) + return doc.create_envs(instance_name, profile_envs, health_envs) + + +# Methods to call Docker engine + +# TODO: Consolidate these two docker client methods. Need ability to invoke local +# vs remote Docker engine + +def get_docker_client(profile, logins=[]): + hostname, port = profile.docker_host.split(":") + try: + client = doc.create_client(hostname, port, logins=logins) + client.ping() + return client + except: + raise DockerError('Could not connect to the Docker daemon. Is it running?') + + +def image_exists(image): + '''Returns True if the image exists locally''' + client = docker.APIClient(version="auto", **docker.utils.kwargs_from_env()) + return True if client.images(image) else False + + +def _infer_ip(): + '''Infers the IP address of the host running this tool''' + if not platform.startswith('linux'): + raise DockerError('Non-linux environment detected. Use the --external-ip flag when running Docker components.') + ip = socket.gethostbyname(socket.gethostname()) + dlog.info("Docker host external IP address inferred to be {:}. If this is incorrect, use the --external-ip flag.".format(ip)) + return ip + + +def _run_container(client, config, name=None, wait=False): + '''Runs a container''' + if name is not None: + info = six.next(iter(client.containers(all=True, filters={'name': "^/{:}$".format(name)})), None) + if info is not None: + if info['State'] == 'running': + dlog.info("Container '{:}' was detected as already running.".format(name)) + return info + else: + client.remove_container(info['Id']) + + cont = doc.create_container_using_config(client, name, config) + client.start(cont) + info = client.inspect_container(cont) + name = info['Name'][1:] # remove '/' prefix + image = config['Image'] + dlog.info("Running image '{:}' as '{:}'".format(image, name)) + + if not wait: + return info + + cont_log = dlog.getChild(name) + try: + for msg in client.logs(cont, stream=True): + cont_log.info(msg.decode()) + else: + dlog.info("Container '{:}' exitted suddenly.".format(name)) + except (KeyboardInterrupt, SystemExit): + dlog.info("Stopping container '{:}' and cleaning up...".format(name)) + client.kill(cont) + client.remove_container(cont) + + +def _run_registrator(client, external_ip=None): + '''Ensures that Registrator is running''' + + ip = _infer_ip() if external_ip is None else external_ip + cmd = _reg_cmd.format(ip).split() + + binds={'/var/run/docker.sock': {'bind': '/tmp/docker.sock'}} + hconf = client.create_host_config(binds=binds, network_mode='host') + conf = client.create_container_config(image=_reg_img, command=cmd, host_config=hconf) + + _run_container(client, conf, name='registrator', wait=False) + + +# TODO: Need to revisit and reimplement _run_registrator(client, external_ip) + +# +# High level calls +# + +def deploy_component(profile, image, instance_name, docker_config, should_wait=False, + logins=[]): + """Deploy Docker component + + This calls runs a Docker container detached. The assumption is that the Docker + host already has registrator running. + + TODO: Split out the wait functionality + + Args + ---- + logins (list): List of objects where the objects are each a docker login of + the form: + + {"registry": .., "username":.., "password":.. } + + Returns + ------- + Dict that is the result from a Docker inspect call + """ + ports = docker_config.get("ports", None) + hcp = doc.add_host_config_params_ports(ports=ports) + volumes = docker_config.get("volumes", None) + hcp = doc.add_host_config_params_volumes(volumes=volumes, host_config_params=hcp) + # Thankfully passing in an IP will return back an IP + dh = profile.docker_host.split(":")[0] + _, _, dhips = socket.gethostbyname_ex(dh) + + if dhips: + hcp = doc.add_host_config_params_dns(dhips[0], hcp) + else: + raise DockerConstructionError("Could not resolve the docker hostname:{0}".format(dh)) + + envs = build_envs(profile, docker_config, instance_name) + client = get_docker_client(profile, logins=logins) + + config = doc.create_container_config(client, image, envs, hcp) + + return _run_container(client, config, name=instance_name, wait=should_wait) + + +def undeploy_component(client, image, instance_name): + """Undeploy Docker component + + TODO: Handle error scenarios. Look into: + * no container found error - docker.errors.NotFound + * failure to remove image - docker.errors.APIError: 409 Client Error + * retry, check for still running container + + Returns + ------- + True if the container and associated image has been removed False otherwise + """ + try: + client.remove_container(instance_name, force=True) + client.remove_image(image) + return True + except Exception as e: + dlog.error("Error while undeploying Docker container/image: {0}".format(e)) + return False + +def reconfigure(client, instance_name, command): + """ Execute the Reconfig script in the Docker container """ + + # 'command' has 3 parts in a list (1 Command and 2 ARGs) + exec_Id = client.exec_create(container=instance_name, cmd=command) + + exec_start_resp = client.exec_start(exec_Id, stream=True) + + # Using a 'single' generator response to solve issue of 'start_exec' returning control after 6 minutes + for response in exec_start_resp: + dlog.info("Reconfig Script execution response: {:}".format(response)) + exec_start_resp.close() + break diff --git a/mod/onboardingapi/dcae_cli/util/exc.py b/mod/onboardingapi/dcae_cli/util/exc.py new file mode 100644 index 0000000..7f41e0b --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/exc.py @@ -0,0 +1,35 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides base exceptions +""" +import click + + +class DcaeException(click.ClickException): + '''Base exception for dcae_util''' + + +try: + FileNotFoundError = FileNotFoundError +except NameError: + FileNotFoundError = IOError diff --git a/mod/onboardingapi/dcae_cli/util/inputs.py b/mod/onboardingapi/dcae_cli/util/inputs.py new file mode 100644 index 0000000..4b212e2 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/inputs.py @@ -0,0 +1,40 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +""" +Functions for handling inputs +""" + +class InputsValidationError(RuntimeError): + pass + +def filter_entries(inputs_map, spec): + """Filter inputs entries that are not in the spec""" + param_names = [ p["name"] for p in spec["parameters"] \ + if "sourced_at_deployment" in p and p["sourced_at_deployment"] ] + + # Identify any missing parameters from inputs_map + missing = list(filter(lambda pn: pn not in inputs_map, param_names)) + + if missing: + raise InputsValidationError( + "Inputs map is missing keys: {0}".format(missing)) + + return { pn: inputs_map[pn] for pn in param_names } diff --git a/mod/onboardingapi/dcae_cli/util/logger.py b/mod/onboardingapi/dcae_cli/util/logger.py new file mode 100644 index 0000000..e8f21ce --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/logger.py @@ -0,0 +1,56 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides logger utilities +""" +import logging + +import click + + +class ClickHandler(logging.StreamHandler): + + def emit(self, record): + msg = self.format(record) + click.echo(msg) + + +_clihandler = ClickHandler() +_formatter = logging.Formatter('%(name)s | %(levelname)s | %(message)s') +_clihandler.setFormatter(_formatter) + +_root = logging.getLogger('DCAE') +_root.setLevel(logging.WARNING) +_root.handlers = [_clihandler, ] +_root.propagate = False + + +def get_logger(name=None): + return _root if name is None else _root.getChild(name) + + +def set_verbose(): + _root.setLevel(logging.INFO) + + +def set_quiet(): + _root.setLevel(logging.WARNING) diff --git a/mod/onboardingapi/dcae_cli/util/policy.py b/mod/onboardingapi/dcae_cli/util/policy.py new file mode 100644 index 0000000..2da9f0b --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/policy.py @@ -0,0 +1,64 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +""" +Function for Policy schema validation +""" + +from jsonschema import validate, ValidationError +from dcae_cli.util.logger import get_logger +from dcae_cli.util import reraise_with_msg + +logger = get_logger('policy') + +_SCHEMA = { + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Schema for policy changes", + "type": "object", + "properties": { + "updated_policies": {"type": "array"}, + "removed_policies": {"type": "array"}, + "policies": {"type": "array"} + }, + "additionalProperties": False +} + +_validation_msg = """ +Is your Policy file a valid json? +Does your Policy file follow this format? + +{ + "updated_policies": [{},{},...], + "removed_policies": [{},{},...], + "policies": [{},{},...] +} +""" + + +def validate_against_policy_schema(policy_file): + """Validate the policy file against the schema""" + + try: + validate(policy_file, _SCHEMA) + except ValidationError as e: + logger.error("Policy file validation issue") + logger.error(_validation_msg) + reraise_with_msg(e, as_dcae=True) +
\ No newline at end of file diff --git a/mod/onboardingapi/dcae_cli/util/profiles.py b/mod/onboardingapi/dcae_cli/util/profiles.py new file mode 100644 index 0000000..83ff6b5 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/profiles.py @@ -0,0 +1,238 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Provides dcae cli profile variables +""" +import os +from collections import namedtuple + +import six +import click + +from dcae_cli import util +from dcae_cli.util import get_app_dir, get_pref, write_pref +from dcae_cli.util import config +from dcae_cli.util.config import get_config, update_config +from dcae_cli.util.exc import DcaeException +from dcae_cli.util.logger import get_logger + + +logger = get_logger('Profile') + + +# reserved profile names +ACTIVE = 'active' +_reserved_names = {ACTIVE} + + +# create enums for profile keys so that they can be imported for testing, instead of using literals +CONSUL_HOST = 'consul_host' +CONFIG_BINDING_SERVICE = 'config_binding_service' +CDAP_BROKER = 'cdap_broker' +DOCKER_HOST = 'docker_host' + +# TODO: Should probably lift this strict list of allowed keys and repurpose to be +# keys that are required. +_allowed_keys = set([CONSUL_HOST, CONFIG_BINDING_SERVICE, CDAP_BROKER, DOCKER_HOST]) +Profile = namedtuple('Profile', _allowed_keys) + + +def _create_stub_profile(): + """Create a new stub of a profile""" + return { k: "" for k in _allowed_keys } + + +def _fmt_seq(seq): + '''Returns a sorted string formatted list''' + return list(sorted(map(str, seq))) + + +def get_profiles_path(): + '''Returns the absolute path to the profiles file''' + return os.path.join(get_app_dir(), 'profiles.json') + + +def get_active_name(): + '''Returns the active profile name in the config''' + return config.get_active_profile() + + +def _set_active_name(name): + '''Sets the active profile name in the config''' + update_config(active_profile=name) + + +class ProfilesInitError(RuntimeError): + pass + +def reinit_profiles(): + """Reinitialize profiles + + Grab the remote profiles and merge with the local profiles if there is one. + + Returns: + -------- + Dict of complete new profiles + """ + # Grab the remote profiles and merge it in + try: + server_url = config.get_server_url() + new_profiles = util.fetch_file_from_web(server_url, "/dcae-cli/profiles.json") + except: + # Failing to pull seed profiles from remote server is not considered + # a problem. Just continue and give user the option to use an empty + # default. + if click.confirm("Could not download initial profiles from remote server. Set empty default?"): + new_profiles = {"default": { "consul_host": "", + "config_binding_service": "config_binding_service", + "cdap_broker": "cdap_broker", "docker_host": ""}} + else: + raise ProfilesInitError("Could not setup dcae-cli profiles") + + profiles_path = get_profiles_path() + + if util.pref_exists(profiles_path): + existing_profiles = get_profiles(include_active=False) + # Make sure to clobber existing values and not other way + existing_profiles.update(new_profiles) + new_profiles = existing_profiles + + write_pref(new_profiles, profiles_path) + return new_profiles + + +def get_profiles(user_only=False, include_active=True): + '''Returns a dict containing all available profiles + + Example of the returned dict: + { + "profile-foo": { + "some_variable_A": "some_value_A", + "some_variable_B": "some_value_B", + "some_variable_C": "some_value_C" + } + } + ''' + try: + profiles = get_pref(get_profiles_path(), reinit_profiles) + except ProfilesInitError as e: + raise DcaeException("Failed to initialize profiles: {0}".format(e)) + + if user_only: + return profiles + + if include_active: + active_name = get_active_name() + if active_name not in profiles: + raise DcaeException("Active profile '{}' does not exist. How did this happen?".format(active_name)) + profiles[ACTIVE] = profiles[active_name] + + return profiles + + +def get_profile(name=ACTIVE): + '''Returns a `Profile` object''' + profiles = get_profiles() + + if name not in profiles: + raise DcaeException("Specified profile '{}' does not exist.".format(name)) + + try: + profile = Profile(**profiles[name]) + except TypeError as e: + raise DcaeException("Specified profile '{}' is malformed.".format(name)) + + return profile + + +def create_profile(name, **kwargs): + '''Creates a new profile''' + _assert_not_reserved(name) + + profiles = get_profiles(user_only=True) + if name in profiles: + raise DcaeException("Profile '{}' already exists.".format(name)) + + profile = _create_stub_profile() + profile.update(kwargs) + _assert_valid_profile(profile) + + profiles[name] = profile + _write_profiles(profiles) + + +def delete_profile(name): + '''Deletes a profile''' + _assert_not_reserved(name) + profiles = get_profiles(user_only=True) + if name not in profiles: + raise DcaeException("Profile '{}' does not exist.".format(name)) + if name == get_active_name(): + logger.warning("Profile '{}' is currently active. Activate another profile first." + .format(name)) + return False + del profiles[name] + _write_profiles(profiles) + return True + + +def update_profile(name, **kwargs): + '''Creates or updates a profile''' + _assert_not_reserved(name) + _assert_valid_profile(kwargs) + + profiles = get_profiles(user_only=True) + if name not in profiles: + raise DcaeException("Profile '{}' does not exist.".format(name)) + + profiles[name].update(kwargs) + _write_profiles(profiles) + + +def _assert_valid_profile(params): + '''Raises DcaeException if the profile parameter dict is invalid''' + if not params: + raise DcaeException('No update key-value pairs were provided.') + keys = set(params.keys()) + if not _allowed_keys.issuperset(keys): + invalid_keys = keys - _allowed_keys + raise DcaeException("Invalid keys {} detected. Only keys {} are supported.".format(_fmt_seq(invalid_keys), _fmt_seq(_allowed_keys))) + + +def _assert_not_reserved(name): + '''Raises DcaeException if the profile is reserved''' + if name in _reserved_names: + raise DcaeException("Profile '{}' is reserved and cannot be modified.".format(name)) + + +def _write_profiles(profiles): + '''Writes the profiles dictionary to disk''' + return write_pref(profiles, path=get_profiles_path()) + + +def activate_profile(name): + '''Modifies the config and sets a new active profile''' + avail_profiles = set(get_profiles().keys()) - {ACTIVE, } + if name not in avail_profiles: + raise DcaeException("Profile name '{}' does not exist. Please select from {} or create a new profile.".format(name, _fmt_seq(avail_profiles))) + _set_active_name(name) diff --git a/mod/onboardingapi/dcae_cli/util/run.py b/mod/onboardingapi/dcae_cli/util/run.py new file mode 100644 index 0000000..293c725 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/run.py @@ -0,0 +1,293 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides utilities for running components +""" +import time +import six +from functools import partial +import click +from dcae_cli.util import docker_util as du +from dcae_cli.util import dmaap, inputs +from dcae_cli.util.cdap_util import run_component as run_cdap_component +from dcae_cli.util.exc import DcaeException +from dcae_cli.util import discovery as dis +from dcae_cli.util.discovery import get_user_instances, config_context, \ + replace_dots +import dcae_cli.util.profiles as profiles +from dcae_cli.util.logger import get_logger +from dcae_cli.catalog.mock.catalog import build_config_keys_map, \ + get_data_router_subscriber_route +# This seems to be an abstraction leak +from dcae_cli.catalog.mock.schema import apply_defaults_docker_config + + +log = get_logger('Run') + + +def _get_instances(user, additional_user=None): + instance_map = get_user_instances(user) + + if additional_user: + # Merge current user with another user's instance map to be available to + # connect to + instance_map_additional = get_user_instances(additional_user) + log.info("#Components for {0}: {1}".format(additional_user, + len(instance_map_additional))) + instance_map.update(instance_map_additional) + + # REVIEW: Getting instances always returns back component names with dots + # even though the component name could originally have dots or dashes. + # To put this dot vs dash headache to rest, we have to understand what the + # discovery abstraction should be. Should the discovery be aware of this type + # of naming magic? If so then the discovery abstraction may need to be + # enhanced to be catalog aware to do name verfication queries. If not then + # the dot-to-dash transformation might not belong inside of the discovery + # abstraction and the higher level should do that. + # + # Another possible fix is to map the dots to something that's less likely to + # be used multiple dashes. This would help disambiguate between a forced + # mapping vs component name with dashes. + # + # In the meantime, here is a fix to address the issue where a downstream component + # can't be matched when the downstream component uses dashes. This affects + # the subsequent calls: + # + # - catalog.get_discovery* query + # - create_config + # + # The instance map will contain entries where the names will be with dots and + # with dashes. There should be no harm because only one set should match. The + # assumption is that people won't have the same name as dots and as dashes. + instance_map_dashes = { (replace_dots(k[0]), k[1]): v + for k, v in six.iteritems(instance_map) } + instance_map.update(instance_map_dashes) + + return instance_map + + +def _update_delivery_urls(spec, target_host, dmaap_map): + """Updates the delivery urls for data router subscribers""" + # Try to stick in the more appropriate delivery url which is not realized + # until after deployment because you need the ip, port. + # Realized that this is not actually needed by the component but kept it because + # it might be useful for component developers to **see** this info. + get_route_func = partial(get_data_router_subscriber_route, spec) + target_base_url = "http://{0}".format(target_host) + return dmaap.update_delivery_urls(get_route_func, target_base_url, + dmaap_map) + + +def _verify_component(name, max_wait, consul_host): + """Verify that the component is healthy + + Args: + ----- + max_wait (integer): limit to how may attempts to make which translates to + seconds because each sleep is one second. 0 means infinite. + + Return: + ------- + True if component is healthy else returns False + """ + num_attempts = 1 + + while True: + if dis.is_healthy(consul_host, name): + return True + else: + num_attempts += 1 + + if max_wait > 0 and max_wait < num_attempts: + return False + + time.sleep(1) + + +def run_component(user, cname, cver, catalog, additional_user, attached, force, + dmaap_map, inputs_map, external_ip=None): + '''Runs a component based on the component type + + Args + ---- + force: (boolean) + Continue to run even when there are no valid downstream components, + when this flag is set to True. + dmaap_map: (dict) config_key to message router or data router connections. + Used as a manual way to make available this information for the component. + inputs_map: (dict) config_key to value that is intended to be provided at + deployment time as an input + ''' + cname, cver = catalog.verify_component(cname, cver) + ctype = catalog.get_component_type(cname, cver) + profile = profiles.get_profile() + + instance_map = _get_instances(user, additional_user) + neighbors = six.iterkeys(instance_map) + + + dmaap_config_keys = catalog.get_discovery_for_dmaap(cname, cver) + + if not dmaap.validate_dmaap_map_entries(dmaap_map, *dmaap_config_keys): + return + + if ctype == 'docker': + params, interface_map = catalog.get_discovery_for_docker(cname, cver, neighbors) + should_wait = attached + + spec = catalog.get_component_spec(cname, cver) + config_key_map = build_config_keys_map(spec) + inputs_map = inputs.filter_entries(inputs_map, spec) + + dmaap_map = _update_delivery_urls(spec, profile.docker_host.split(":")[0], + dmaap_map) + + with config_context(user, cname, cver, params, interface_map, + instance_map, config_key_map, dmaap_map=dmaap_map, inputs_map=inputs_map, + always_cleanup=should_wait, force_config=force) as (instance_name, _): + image = catalog.get_docker_image(cname, cver) + docker_config = catalog.get_docker_config(cname, cver) + + docker_logins = dis.get_docker_logins() + + if should_wait: + du.deploy_component(profile, image, instance_name, docker_config, + should_wait=True, logins=docker_logins) + else: + result = du.deploy_component(profile, image, instance_name, docker_config, + logins=docker_logins) + log.debug(result) + + if result: + log.info("Deployed {0}. Verifying..".format(instance_name)) + + # TODO: Be smarter here but for now wait longer i.e. 5min + max_wait = 300 # 300s == 5min + + if _verify_component(instance_name, max_wait, + dis.default_consul_host()): + log.info("Container is up and healthy") + + # This block of code is used to construct the delivery + # urls for data router subscribers and to display it for + # users to help with manually provisioning feeds. + results = dis.lookup_instance(dis.default_consul_host(), + instance_name) + target_host = dis.parse_instance_lookup(results) + + dmaap_map = _update_delivery_urls(spec, target_host, dmaap_map) + delivery_urls = dmaap.list_delivery_urls(dmaap_map) + + if delivery_urls: + msg = "\n".join(["\t{k}: {url}".format(k=k, url=url) + for k, url in delivery_urls]) + msg = "\n\n{0}\n".format(msg) + log.warn("Your component is a data router subscriber. Here are the delivery urls: {0}".format(msg)) + else: + log.warn("Container never became healthy") + else: + raise DcaeException("Failed to deploy docker component") + + elif ctype =='cdap': + (jar, config, spec) = catalog.get_cdap(cname, cver) + config_key_map = build_config_keys_map(spec) + inputs_map = inputs.filter_entries(inputs_map, spec) + + params, interface_map = catalog.get_discovery_for_cdap(cname, cver, neighbors) + + with config_context(user, cname, cver, params, interface_map, instance_map, + config_key_map, dmaap_map=dmaap_map, inputs_map=inputs_map, always_cleanup=False, + force_config=force) as (instance_name, templated_conf): + run_cdap_component(catalog, params, instance_name, profile, jar, config, spec, templated_conf) + else: + raise DcaeException("Unsupported component type for run") + + +def dev_component(user, catalog, specification, additional_user, force, dmaap_map, + inputs_map): + '''Sets up the discovery layer for in development component + + The passed-in component specification is + * Validated it + * Generates the corresponding application config + * Pushes the application config and rels key into Consul + + This allows developers to play with their spec and the resulting configuration + outside of being in the catalog and in a container. + + Args + ---- + user: (string) user name + catalog: (object) instance of MockCatalog + specification: (dict) experimental component specification + additional_user: (string) another user name used to source additional + component instances + force: (boolean) + Continue to run even when there are no valid downstream components when + this flag is set to True. + dmaap_map: (dict) config_key to message router connections. Used as a + manual way to make available this information for the component. + inputs_map: (dict) config_key to value that is intended to be provided at + deployment time as an input + ''' + instance_map = _get_instances(user, additional_user) + neighbors = six.iterkeys(instance_map) + + params, interface_map, dmaap_config_keys = catalog.get_discovery_from_spec( + user, specification, neighbors) + + if not dmaap.validate_dmaap_map_entries(dmaap_map, *dmaap_config_keys): + return + + cname = specification["self"]["name"] + cver = specification["self"]["version"] + config_key_map = build_config_keys_map(specification) + inputs_map = inputs.filter_entries(inputs_map, specification) + + dmaap_map = _update_delivery_urls(specification, "localhost", dmaap_map) + + with config_context(user, cname, cver, params, interface_map, instance_map, + config_key_map, dmaap_map, inputs_map=inputs_map, always_cleanup=True, + force_config=force) \ + as (instance_name, templated_conf): + + click.echo("Ready for component development") + + if specification["self"]["component_type"] == "docker": + # The env building is only for docker right now + docker_config = apply_defaults_docker_config(specification["auxilary"]) + envs = du.build_envs(profiles.get_profile(), docker_config, instance_name) + envs_message = "\n".join(["export {0}={1}".format(k, v) for k,v in envs.items()]) + envs_filename = "env_{0}".format(profiles.get_active_name()) + + with open(envs_filename, "w") as f: + f.write(envs_message) + + click.echo() + click.echo("Setup these environment varibles. Run \"source {0}\":".format(envs_filename)) + click.echo() + click.echo(envs_message) + click.echo() + else: + click.echo("Set the following as your HOSTNAME:\n {0}".format(instance_name)) + + input("Press any key to stop and to clean up") diff --git a/mod/onboardingapi/dcae_cli/util/tests/test_cdap_util.py b/mod/onboardingapi/dcae_cli/util/tests/test_cdap_util.py new file mode 100644 index 0000000..9282691 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/tests/test_cdap_util.py @@ -0,0 +1,93 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +from dcae_cli.util.cdap_util import _merge_spec_config_into_broker_put, normalize_cdap_params + + +def test_normalize_cdap_params(): + spec = {"parameters" : {}} + normalized = normalize_cdap_params(spec) + assert normalized == {"app_preferences" : {}, + "app_config" : {}, + "program_preferences" : []} + +def test_cdap_util(): + """ + Tests both _merge_spec_config_into_broker_put and normalize_cdap_params + """ + jar = "bahphomet.com/nexus/doomsday.jar" + config = { + "artifact_name" : "testname", + "artifact_version" : "6.6.6", + "streamname" : "stream", + "programs" : [{"program_type" : "flows", "program_id" : "flow_id"}], + "namespace" : "underworld" + } + spec = { + "self": { + "version": "6.6.6", + "description": "description", + "component_type": "cdap", + "name": "name" + }, + "parameters" : { + "app_preferences" : [{"name" : "he", "description" : "", "value" : "shall rise"}], + "program_preferences" : [{"program_type" : "flows", "program_id" : "flow_id", "program_pref" : [{"name": "foo", "description" : "", "value" : "bar"}]}] + }, + + "streams": { + "publishes": [], + "subscribes" : [] + }, + "services": { + "calls" : [], + 'provides': [ + {"request": {"format" : 'std.format_one', "version" : "1.0.0"}, + "response" : {"format" : "std.format_two", "version" : "1.5.0"}, + "service_name" : "baphomet", + "service_endpoint" : "rises", + "verb" : "GET"} + ] + }, + } + parsed_parameters = normalize_cdap_params(spec) + templated_conf = {"streams_publishes":{}, "streams_subscribes": {}, + "services_calls": {}} #TODO: Incorporate a test templated_conf + broker_put = _merge_spec_config_into_broker_put(jar, config, spec, parsed_parameters, templated_conf) + + expected = { + "app_config": {"services_calls" : {}, + "streams_publishes" : {}, + "streams_subscribes": {} + }, + "app_preferences": {"he" : "shall rise"}, + "artifact_name" : "testname", + "artifact_version" : "6.6.6", + "jar_url": "bahphomet.com/nexus/doomsday.jar", + "namespace": "underworld", + "program_preferences" : [{"program_type" : "flows", "program_id" : "flow_id", "program_pref" : {"foo" : "bar"}}], + "programs" : [{"program_type" : "flows", "program_id" : "flow_id"}], + "service_component_type": "cdap", + "services": [{"service_name" : "baphomet", "service_endpoint" : "rises", "endpoint_method" : "GET"}], + "streamname": "stream", + "cdap_application_type" : "program-flowlet" + } + + assert broker_put == expected diff --git a/mod/onboardingapi/dcae_cli/util/tests/test_config.py b/mod/onboardingapi/dcae_cli/util/tests/test_config.py new file mode 100644 index 0000000..3b4cd6e --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/tests/test_config.py @@ -0,0 +1,137 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Tests the config functionality +""" +import os, json +from functools import partial +from mock import patch + +import pytest +import click + +import dcae_cli +from dcae_cli.util import config, write_pref +from dcae_cli.util.config import get_app_dir, get_config, get_config_path + + +def test_no_config(monkeypatch, tmpdir): + '''Tests the creation and initialization of a config on a clean install''' + monkeypatch.setattr(click, "get_app_dir", lambda app: str(tmpdir.realpath())) + + mock_config = {'user': 'mock-user'} + + config_file = tmpdir.join("config.json") + config_file.write(json.dumps(mock_config)) + + assert get_config() == mock_config + + +def test_init_config_user(monkeypatch): + good_case = "abc123" + values = [ good_case, "d-e-f", "g*h*i", "j k l" ] + + def fake_input(values, message, type="red"): + return values.pop() + + monkeypatch.setattr(click, 'prompt', partial(fake_input, values)) + assert config._init_config_user() == good_case + + +def test_init_config(monkeypatch): + monkeypatch.setattr(config, '_init_config_user', lambda: "bigmama") + monkeypatch.setattr(config, '_init_config_server_url', + lambda: "http://some-nexus-in-the-sky.com") + monkeypatch.setattr(dcae_cli.util, 'fetch_file_from_web', + lambda server_url, path: { "db_url": "conn" }) + monkeypatch.setattr("dcae_cli._version.__version__", "2.X.X") + + expected = {'cli_version': '2.X.X', 'user': 'bigmama', 'db_url': 'conn', + 'server_url': 'http://some-nexus-in-the-sky.com', + 'active_profile': 'default' } + assert expected == config._init_config() + + # Test using of db fallback + + monkeypatch.setattr(dcae_cli.util, 'fetch_file_from_web', + lambda server_url, path: { "db_url": "" }) + + db_url = "postgresql://king:of@mountain:5432/dcae_onboarding_db" + + def fake_init_config_db_url(): + return db_url + + monkeypatch.setattr(config, "_init_config_db_url", + fake_init_config_db_url) + + assert db_url == config._init_config()["db_url"] + + monkeypatch.setattr(dcae_cli.util, 'fetch_file_from_web', + lambda server_url, path: {}) + + assert db_url == config._init_config()["db_url"] + + # Simulate error trying to fetch + + def fetch_simulate_error(server_url, path): + raise RuntimeError("Simulated error") + + monkeypatch.setattr(dcae_cli.util, 'fetch_file_from_web', + fetch_simulate_error) + # Case when user opts out of manually setting up + monkeypatch.setattr(click, "confirm", lambda msg: False) + + with pytest.raises(config.ConfigurationInitError): + config._init_config() + + +def test_should_force_reinit(): + bad_config = {} + assert config.should_force_reinit(bad_config) == True + + old_config = { "cli_version": "1.0.0" } + assert config.should_force_reinit(old_config) == True + + uptodate_config = { "cli_version": "2.0.0" } + assert config.should_force_reinit(uptodate_config) == False + + +def test_reinit_config(monkeypatch, tmpdir): + monkeypatch.setattr(click, "get_app_dir", lambda app: str(tmpdir.realpath())) + + new_config = { "user": "ninny", "db_url": "some-db" } + + def init(): + return new_config + + assert config._reinit_config(init) == new_config + + old_config = { "user": "super", "db_url": "other-db", "hidden": "yo" } + write_pref(old_config, get_config_path()) + + new_config["hidden"] = "yo" + assert config._reinit_config(init) == new_config + + +if __name__ == '__main__': + '''Test area''' + pytest.main([__file__, ]) diff --git a/mod/onboardingapi/dcae_cli/util/tests/test_discovery.py b/mod/onboardingapi/dcae_cli/util/tests/test_discovery.py new file mode 100644 index 0000000..2148ea3 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/tests/test_discovery.py @@ -0,0 +1,447 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Provides tests for the discovery module +''' +import json +from functools import partial +from copy import deepcopy + +import pytest + +from dcae_cli.util import discovery as dis +from dcae_cli.util.discovery import create_config, Consul, config_context, DiscoveryNoDownstreamComponentError + + +user = 'bob' +cname = 'asimov.test_comp' +cver = '0.0.0' +inst_pref = 'abc123' +params = {'param0': 12345} + + +def test_create_config(): + ''' + Test explanation: + 1. param1 in the component spec has 2 compatible component types, comp1 and comp2. however infrastructure + support only allows for 1. thus comp2 shouldn't make it to the rels. + 2. comp1 has two instances, so both should make it to the rels + 3. param2 is compatible with comp3, but there are no comp3 instances. thus it's missing from rels. + ''' + expected_ckey = 'bob.abc123.0-0-0.asimov-test_comp' + expected_conf = {'param1': '{{1-1-1.foo-bar-comp1}}', 'param0': 12345, 'param2': '{{3-3-3.foo-bar-comp3}}'} + expected_rkey = 'bob.abc123.0-0-0.asimov-test_comp:rel' + expected_rels = ['bob.aaa111.1-1-1.foo-bar-comp1.suffix', + 'bob.bbb222.1-1-1.foo-bar-comp1.suffix', + 'bob.ddd444.3-3-3.foo-bar-comp3.suffix'] + expected_dmaap_key = 'bob.abc123.0-0-0.asimov-test_comp:dmaap' + expected_dmaap_map = {} + + interface_map = {'param1': [('foo.bar.comp1', '1.1.1'), + ('foo.bar.comp2', '2.2.2')], + 'param2': [('foo.bar.comp3', '3.3.3')] + } + instance_map = {('foo.bar.comp1', '1.1.1'): ['bob.aaa111.1-1-1.foo-bar-comp1.suffix', + 'bob.bbb222.1-1-1.foo-bar-comp1.suffix'], + ('foo.bar.comp2', '2.2.2'): ['bob.ccc333.2-2-2.foo-bar-comp2.suffix'], + ('foo.bar.comp3', '3.3.3'): ['bob.ddd444.3-3-3.foo-bar-comp3.suffix']} + + ckey, conf, rkey, rels, dmaap_key, dmaap_map = create_config(user, cname, cver, + params, interface_map, instance_map, expected_dmaap_map, inst_pref) + + assert ckey == expected_ckey + assert conf == expected_conf + assert rkey == expected_rkey + assert sorted(rels) == sorted(expected_rels) + assert dmaap_key == expected_dmaap_key + assert dmaap_map == expected_dmaap_map + + # + # Fail cases: When a downstream dependency does not exist + # + + # (1) Case when there's no actual instance + instance_map_missing_3 = deepcopy(instance_map) + instance_map_missing_3[('foo.bar.comp3', '3.3.3')] = [] + + with pytest.raises(DiscoveryNoDownstreamComponentError): + create_config(user, cname, cver, params, interface_map, instance_map_missing_3, + expected_dmaap_map, inst_pref) + + # (2) Case when there's no existence in instance_map + interface_map_extra = deepcopy(interface_map) + interface_map_extra["param_not_exist"] = [] + + with pytest.raises(DiscoveryNoDownstreamComponentError): + create_config(user, cname, cver, params, interface_map_extra, instance_map, + expected_dmaap_map, inst_pref) + + # + # Force the fail cases to succeed + # + + # (1) + ckey, conf, rkey, rels, dmaap_key, dmaap_map = create_config(user, cname, cver, + params, interface_map, instance_map_missing_3, expected_dmaap_map, inst_pref, + force=True) + + assert ckey == expected_ckey + assert conf == expected_conf + assert rkey == expected_rkey + # Remove the foo.bar.comp3:3.3.3 instance because we are simulating when that + # instance does not exist + assert sorted(rels) == sorted(expected_rels[:2]) + assert dmaap_key == expected_dmaap_key + assert dmaap_map == expected_dmaap_map + + # (2) + ckey, conf, rkey, rels, dmaap_key, dmaap_map = create_config(user, cname, cver, + params, interface_map_extra, instance_map, expected_dmaap_map, inst_pref, + force=True) + + expected_conf["param_not_exist"] = "{{}}" + + assert ckey == expected_ckey + assert conf == expected_conf + assert rkey == expected_rkey + assert sorted(rels) == sorted(expected_rels) + assert dmaap_key == expected_dmaap_key + assert dmaap_map == expected_dmaap_map + + # + # Test differnt dashes scenario + # + + # Component has been added with dashes but the instance comes back with dots + # because the discovery layer always brings back instances with dots + interface_map_dashes = {'param1': [('foo-bar-comp1', '1.1.1')]} + instance_map_dashes = {('foo.bar.comp1', '1.1.1'): + ['bob.aaa111.1-1-1.foo-bar-comp1.suffix']} + + with pytest.raises(DiscoveryNoDownstreamComponentError): + create_config(user, cname, cver, params, interface_map_dashes, instance_map_dashes, + expected_dmaap_map, inst_pref) + + # The fix in v2.3.2 was to have the caller to send in instances with dots and + # with dashes + instance_map_dashes = { + ('foo.bar.comp1', '1.1.1'): ['bob.aaa111.1-1-1.foo-bar-comp1.suffix'], + ('foo-bar-comp1', '1.1.1'): ['bob.aaa111.1-1-1.foo-bar-comp1.suffix'] } + + ckey, conf, rkey, rels, dmaap_key, dmaap_map = create_config(user, cname, cver, + params, interface_map_dashes, instance_map_dashes, expected_dmaap_map, inst_pref) + + # The expecteds have changed because the inputs have been narrowed to just + # one + assert ckey == expected_ckey + assert conf == {'param1': '{{1-1-1.foo-bar-comp1}}', 'param0': 12345} + assert rkey == expected_rkey + assert sorted(rels) == sorted(['bob.aaa111.1-1-1.foo-bar-comp1.suffix']) + assert dmaap_key == expected_dmaap_key + assert dmaap_map == expected_dmaap_map + + # Pass in a non-empty dmaap map + dmaap_map_input = { "some-config-key": { "type": "message_router", + "dmaap_info": {"topic_url": "http://some-topic-url.com/abc"} } } + del expected_conf["param_not_exist"] + expected_conf["some-config-key"] = { "type": "message_router", + "dmaap_info": "<<some-config-key>>" } + + ckey, conf, rkey, rels, dmaap_key, dmaap_map = create_config(user, cname, cver, + params, interface_map, instance_map, dmaap_map_input, inst_pref) + + assert ckey == expected_ckey + assert conf == expected_conf + assert rkey == expected_rkey + assert sorted(rels) == sorted(expected_rels) + assert dmaap_key == expected_dmaap_key + assert dmaap_map == {'some-config-key': {'topic_url': 'http://some-topic-url.com/abc'}} + + +@pytest.mark.skip(reason="Not a pure unit test") +def test_config_context(mock_cli_config): + interface_map = {'param1': [('foo.bar.comp1', '1.1.1'), + ('foo.bar.comp2', '2.2.2')], + 'param2': [('foo.bar.comp3', '3.3.3')] + } + instance_map = {('foo.bar.comp1', '1.1.1'): ['bob.aaa111.1-1-1.foo-bar-comp1.suffix', + 'bob.bbb222.1-1-1.foo-bar-comp1.suffix'], + ('foo.bar.comp2', '2.2.2'): ['bob.ccc333.2-2-2.foo-bar-comp2.suffix'], + ('foo.bar.comp3', '3.3.3'): ['bob.ddd444.3-3-3.foo-bar-comp3.suffix']} + + config_key_map = {"param1": {"group": "streams_publishes", "type": "http"}, + "param2": {"group": "services_calls", "type": "http"}} + + ckey = 'bob.abc123.0-0-0.asimov-test_comp' + rkey = 'bob.abc123.0-0-0.asimov-test_comp:rel' + expected_conf = {"streams_publishes": {'param1': '{{1-1-1.foo-bar-comp1}}'}, + 'param0': 12345, "streams_subscribes": {}, + "services_calls": {'param2': '{{3-3-3.foo-bar-comp3}}'}} + expected_rels = ['bob.aaa111.1-1-1.foo-bar-comp1.suffix', + 'bob.bbb222.1-1-1.foo-bar-comp1.suffix', + 'bob.ddd444.3-3-3.foo-bar-comp3.suffix'] + + c = Consul(dis.default_consul_host()) + with config_context(user, cname, cver, params, interface_map, instance_map, + config_key_map, instance_prefix=inst_pref) as (instance,_): + assert json.loads(c.kv.get(ckey)[1]['Value'].decode('utf-8')) == expected_conf + assert sorted(json.loads(c.kv.get(rkey)[1]['Value'].decode('utf-8'))) \ + == sorted(expected_rels) + assert instance == ckey + + assert c.kv.get(ckey)[1] is None + assert c.kv.get(rkey)[1] is None + + # Fail case: When a downstream dependency does not exist + interface_map_extra = deepcopy(interface_map) + interface_map_extra["param_not_exist"] = [] + + with pytest.raises(DiscoveryNoDownstreamComponentError): + with config_context(user, cname, cver, params, interface_map_extra, + instance_map, config_key_map, instance_prefix=inst_pref) as (instance,_): + pass + + # Force fail case to succeed + expected_conf["param_not_exist"] = "{{}}" + + with config_context(user, cname, cver, params, interface_map_extra, + instance_map, config_key_map, instance_prefix=inst_pref, + force_config=True) as (instance,_): + assert json.loads(c.kv.get(ckey)[1]['Value'].decode('utf-8')) == expected_conf + assert sorted(json.loads(c.kv.get(rkey)[1]['Value'].decode('utf-8'))) \ + == sorted(expected_rels) + assert instance == ckey + + +def test_inst_regex(): + ckey = 'bob.abc123.0-0-0.asimov-test_comp' + match = dis._inst_re.match(ckey) + assert match != None + + # Big version case + + ckey = 'bob.abc123.100-100-100.asimov-test_comp' + match = dis._inst_re.match(ckey) + assert match != None + + +def test_is_healthy_pure(): + component = { 'CreateIndex': 204546, 'Flags': 0, + 'Key': 'mike.21fbcabd-fac1-4b9b-9d18-2f624bfa44a5.0-4-0.sandbox-platform-dummy_subscriber', 'LockIndex': 0, 'ModifyIndex': 204546, + 'Value': b'{}' } + + component_health_good = ('262892', + [{'Checks': [{'CheckID': 'serfHealth', + 'CreateIndex': 3, + 'ModifyIndex': 3, + 'Name': 'Serf Health Status', + 'Node': 'agent-one', + 'Notes': '', + 'Output': 'Agent alive and reachable', + 'ServiceID': '', + 'ServiceName': '', + 'Status': 'passing'}, + {'CheckID': 'service:rework-central-swarm-master:mike.21fbcabd-fac1-4b9b-9d18-2f624bfa44a5.0-4-0.sandbox-platform-dummy_subscriber:8080', + 'CreateIndex': 204550, + 'ModifyIndex': 204551, + 'Name': 'Service ' + "'mike.21fbcabd-fac1-4b9b-9d18-2f624bfa44a5.0-4-0.sandbox-platform-dummy_subscriber' " + 'check', + 'Node': 'agent-one', + 'Notes': '', + 'Output': '', + 'ServiceID': 'rework-central-swarm-master:mike.21fbcabd-fac1-4b9b-9d18-2f624bfa44a5.0-4-0.sandbox-platform-dummy_subscriber:8080', + 'ServiceName': 'mike.21fbcabd-fac1-4b9b-9d18-2f624bfa44a5.0-4-0.sandbox-platform-dummy_subscriber', + 'Status': 'passing'}], + 'Node': {'Address': '10.170.2.17', + 'CreateIndex': 3, + 'ModifyIndex': 262877, + 'Node': 'agent-one', + 'TaggedAddresses': {'wan': '10.170.2.17'}}, + 'Service': {'Address': '196.207.170.175', + 'CreateIndex': 204550, + 'EnableTagOverride': False, + 'ID': 'rework-central-swarm-master:mike.21fbcabd-fac1-4b9b-9d18-2f624bfa44a5.0-4-0.sandbox-platform-dummy_subscriber:8080', + 'ModifyIndex': 204551, + 'Port': 33064, + 'Service': 'mike.21fbcabd-fac1-4b9b-9d18-2f624bfa44a5.0-4-0.sandbox-platform-dummy_subscriber', + 'Tags': None}}]) + + assert True == dis._is_healthy_pure(lambda name: component_health_good, component) + + # Case: Check is failing + + component_health_bad = deepcopy(component_health_good) + # NOTE: The failed status here. Not sure if this is what Consul actually sends + # but at least its not "passing" + component_health_bad[1][0]["Checks"][0]["Status"] = "failing" + + assert False == dis._is_healthy_pure(lambda name: component_health_bad, component) + + # Case: No health for a component + + component_health_nothing = ('262892', []) + assert False == dis._is_healthy_pure(lambda name: component_health_nothing, component) + + +def test_get_instances_from_kv(): + + def get_from_kv_fake(result, user, recurse=True): + return "don't care about first arg", result + + user = "jane" + kvs_nothing = [] + + assert dis._get_instances_from_kv(partial(get_from_kv_fake, kvs_nothing), user) == [] + + kvs_success = [ { "Value": "some value", "Key": "jane.1344a03a-06a8-4b92-bfac-d8f89df0c0cd.1-0-0.dcae-controller-ves-collector:rel" + }, + { "Value": "some value", "Key": "jane.1344a03a-06a8-4b92-bfac-d8f89df0c0cd.1-0-0.dcae-controller-ves-collector" } ] + + assert dis._get_instances_from_kv(partial(get_from_kv_fake, kvs_success), user) == ["jane.1344a03a-06a8-4b92-bfac-d8f89df0c0cd.1-0-0.dcae-controller-ves-collector"] + + kvs_partial = [ { "Value": "some value", "Key": "jane.1344a03a-06a8-4b92-bfac-d8f89df0c0cd.1-0-0.dcae-controller-ves-collector:rel" + } ] + + assert dis._get_instances_from_kv(partial(get_from_kv_fake, kvs_partial), user) == ["jane.1344a03a-06a8-4b92-bfac-d8f89df0c0cd.1-0-0.dcae-controller-ves-collector"] + + +def test_get_instances_from_catalog(): + + def get_from_catalog_fake(result): + return ("some Consul index", result) + + user = "jane" + services_nothing = {} + + assert dis._get_instances_from_catalog( + partial(get_from_catalog_fake, services_nothing), user) == [] + + services_no_matching = { '4f09bb72-8578-4e82-a6a4-9b7d679bd711.cdap_app_hello_world.hello-world-cloudify-test': [], + '666.fake_testing_service.rework-central.com': [], + 'Platform_Dockerhost_Solutioning_Test': [], + 'jack.2271ec6b-9224-4f42-b0b0-bfa91b41218f.1-0-1.cdap-event-proc-map-app': [], + 'jack.bca28c8c-a352-41f1-81bc-63ff46db2582.1-0-1.cdap-event-proc-supplement-app': + [] } + + assert dis._get_instances_from_catalog( + partial(get_from_catalog_fake, services_no_matching), user) == [] + + services_success = { '4f09bb72-8578-4e82-a6a4-9b7d679bd711.cdap_app_hello_world.hello-world-cloudify-test': [], + '666.fake_testing_service.rework-central.com': [], + 'Platform_Dockerhost_Solutioning_Test': [], + 'jack.2271ec6b-9224-4f42-b0b0-bfa91b41218f.1-0-1.cdap-event-proc-map-app': [], + 'jane.bca28c8c-a352-41f1-81bc-63ff46db2582.1-0-1.cdap-event-proc-supplement-app': + [] } + + assert dis._get_instances_from_catalog( + partial(get_from_catalog_fake, services_success), user) == ['jane.bca28c8c-a352-41f1-81bc-63ff46db2582.1-0-1.cdap-event-proc-supplement-app'] + + +def test_merge_instances(): + user = "somebody" + group_one = [ "123", "456" ] + group_two = [ "123", "abc" ] + group_three = [] + + assert sorted(dis._merge_instances(user, lambda user: group_one, lambda user: group_two, + lambda user: group_three)) == sorted([ "123", "456", "abc" ]) + + +def test_make_instance_map(): + instances_latest_format = ["mike.112e4faa-2ac8-4b13-93e9-8924150538d5.0-5-0.sandbox-platform-laika"] + + instances_map = dis._make_instances_map(instances_latest_format) + assert instances_map.get(("sandbox.platform.laika", "0.5.0")) == set(instances_latest_format) + + +def test_get_component_instances(monkeypatch): + instances = [ + 'jane.b493b48b-5fdf-4c1d-bd2a-8ce747b918ba.1-0-0.dcae-controller-ves-collector', + 'jane.2455ec5c-67e6-4d4d-8581-79037c7b5f8e.1-0-0.dcae-controller-ves-collector.rework-central.dcae.com', + 'jane.bfbb1356-d703-4007-8799-759a9e1fc8c2.1-0-0.dcae-controller-ves-collector.rework-central.dcae.com', + 'jane.89d82ff6-1482-4c01-8758-db9325aad085.1-0-0.dcae-controller-ves-collector' + ] + + instances_map = { ('dcae.controller.ves.collector', '1.0.0'): set(instances) } + + def get_user_instances_mock(user, consul_host=None, filter_instances_func=None): + return instances_map + + monkeypatch.setattr(dis, 'get_user_instances', get_user_instances_mock) + + def always_true_filter(consul_host, instance): + return True + + # Test base case + + user = "jane" + cname = "dcae.controller.ves.collector" + cver = "1.0.0" + consul_host = "bogus" + + assert sorted(dis._get_component_instances(always_true_filter, user, cname, cver, + consul_host)) == sorted(instances) + + # Test for dashes + + cname = "dcae-controller-ves-collector" + + assert sorted(dis._get_component_instances(always_true_filter, user, cname, cver, + consul_host)) == sorted(instances) + + +def test_group_config(): + config_key_map = {'call1': {'group': 'services_calls'}, 'pub1': {'type': 'http', 'group': 'streams_publishes'}, 'sub2': {'type': 'message_router', 'group': 'streams_subscribes'}, 'pub2': {'type': 'message_router', 'group': 'streams_publishes'}} + + config = { "call1": "{{yo}}", "pub1": "{{target}}", "some-param": 123, + "sub2": { "dmaap_info": "<<sub2>>" }, "pub2": { "dmaap_info": "<<pub2>>" } } + + gc = dis._group_config(config, config_key_map) + expected = {'services_calls': {'call1': '{{yo}}'}, 'streams_publishes': {'pub2': {'dmaap_info': '<<pub2>>'}, 'pub1': '{{target}}'}, 'some-param': 123, 'streams_subscribes': {'sub2': {'dmaap_info': '<<sub2>>'}}} + + assert gc == expected + + +def test_parse_instance_lookup(): + results = [{"ServiceAddress": "192.168.1.100", "ServicePort": "8080"}, + {"ServiceAddress": "10.100.1.100", "ServicePort": "8081"}] + assert dis.parse_instance_lookup(results) == "192.168.1.100:8080" + + +def test_apply_inputs(): + updated_config = dis._apply_inputs({"foo": "bar"}, {"foo": "baz"}) + assert updated_config == {"foo": "baz"} + + +def test_choose_consul_host(monkeypatch): + def fake_default_consul_host(): + return "default-consul-host" + + monkeypatch.setattr(dis, "default_consul_host", fake_default_consul_host) + assert "default-consul-host" == dis._choose_consul_host(None) + assert "provided-consul-host" == dis._choose_consul_host("provided-consul-host") + + +if __name__ == '__main__': + '''Test area''' + pytest.main([__file__, ]) diff --git a/mod/onboardingapi/dcae_cli/util/tests/test_dmaap.py b/mod/onboardingapi/dcae_cli/util/tests/test_dmaap.py new file mode 100644 index 0000000..dabc737 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/tests/test_dmaap.py @@ -0,0 +1,259 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +""" +Tests for dmaap module +""" +import pytest +from dcae_cli.util import dmaap +from dcae_cli.util.exc import DcaeException + + +def test_validate_dmaap_map_schema_message_router(): + def pack_her_up(entry): + return { "some-config-key": entry } + + good = { + "type": "message_router", + "aaf_username": "foo3", + "aaf_password": "bar3", + "dmaap_info": { + "client_role":"com.dcae.member", + "client_id":"1500462518108", + "location":"mtc5", + "topic_url":"https://dcae-msrt-ftl2.com:3905/events/com.dcae.dmaap.FTL2.TommyTestTopic2" + } + } + dmaap.validate_dmaap_map_schema(pack_her_up(good)) + + good_minimal = { + "type": "message_router", + "dmaap_info": { + "topic_url":"https://dcae-msrt-ftl2.com:3905/events/com.dcae.dmaap.FTL2.TommyTestTopic2" + } + } + dmaap.validate_dmaap_map_schema(pack_her_up(good_minimal)) + + bad_extra = { + "type": "message_router", + "aaf_username": "foo3", + "aaf_password": "bar3", + "something_else": "boo", + "dmaap_info": { + "client_role":"com.dcae.member", + "client_id":"1500462518108", + "location":"mtc5", + "topic_url":"https://dcae-msrt-ftl2.com:3905/events/com.dcae.dmaap.FTL2.TommyTestTopic2" + } + } + dm = { "some-config-key": bad_extra } + + + with pytest.raises(DcaeException): + dmaap.validate_dmaap_map_schema(dm) + + bad_missing = { + "type": "message_router", + "aaf_username": "foo3", + "aaf_password": "bar3", + "dmaap_info": { + "client_role":"com.dcae.member", + "client_id":"1500462518108", + "location":"mtc5" + } + } + dm = { "some-config-key": bad_missing } + + with pytest.raises(DcaeException): + dmaap.validate_dmaap_map_schema(dm) + + +def test_validate_dmaap_map_schema_data_router(): + def pack_her_up(entry): + return { "some-config-key": entry } + + # Publishers + good = { + "type": "data_router", + "dmaap_info": { + "location": "mtc5", + "publish_url": "http://some-publish-url/123", + "log_url": "http://some-log-url/456", + "username": "jane", + "password": "abc" + } + } + dmaap.validate_dmaap_map_schema(pack_her_up(good)) + + good_minimal = { + "type": "data_router", + "dmaap_info": { + "publish_url": "http://some-publish-url/123" + } + } + dmaap.validate_dmaap_map_schema(pack_her_up(good_minimal)) + + bad_extra = { + "type": "data_router", + "dmaap_info": { + "publish_url": "http://some-publish-url/123", + "unknown_key": "value" + } + } + with pytest.raises(DcaeException): + dmaap.validate_dmaap_map_schema(pack_her_up(bad_extra)) + + # Subscribers + good = { + "type": "data_router", + "dmaap_info": { + "username": "drdeliver", + "password": "1loveDataR0uter", + "location": "loc00", + "delivery_url": "https://example.com/whatever", + "subscriber_id": "1550" + } + } + dmaap.validate_dmaap_map_schema(pack_her_up(good)) + + good_minimal = { + "type": "data_router", + "dmaap_info": { + "delivery_url": "https://example.com/whatever" + } + } + dmaap.validate_dmaap_map_schema(pack_her_up(good_minimal)) + + bad_extra = { + "type": "data_router", + "dmaap_info": { + "delivery_url": "https://example.com/whatever", + "unknown_key": "value" + } + } + with pytest.raises(DcaeException): + dmaap.validate_dmaap_map_schema(pack_her_up(bad_extra)) + + +def test_validate_dmaap_map_entries(): + + # Success + + dmaap_map = { "mr_pub_fun": { "foo": "bar" }, "mr_sub_fun": { "baz": "duh"} } + mr_config_keys = [ "mr_pub_fun", "mr_sub_fun" ] + dr_config_keys = [] + + assert dmaap.validate_dmaap_map_entries(dmaap_map, mr_config_keys, dr_config_keys) == True + + # Not supposed to be empty + + dmaap_map = {} + + assert dmaap.validate_dmaap_map_entries(dmaap_map, mr_config_keys, dr_config_keys) == False + + # Too many in dmaap map + + # NOTE: This scenario has been changed to be a success case per Tommy who + # believes that having extra keys in the dmaap_map is harmless. People would + # want to have a master dmaap_map that has a superset of connections used + # across many components. + + dmaap_map = { "mr_pub_fun": { "foo": "bar" }, "mr_sub_fun": { "baz": "duh"} } + mr_config_keys = [ "mr_pub_fun" ] + dr_config_keys = [] + + assert dmaap.validate_dmaap_map_entries(dmaap_map, mr_config_keys, dr_config_keys) == True + + # Too little in dmaap map + + dmaap_map = { "mr_pub_fun": { "foo": "bar" }, "mr_sub_fun": { "baz": "duh"} } + mr_config_keys = [ "mr_pub_fun", "mr_sub_fun", "mr_xxx" ] + dr_config_keys = [] + + assert dmaap.validate_dmaap_map_entries(dmaap_map, mr_config_keys, dr_config_keys) == False + + +def test_apply_defaults_dmaap_map(): + good = { + "type": "message_router", + "aaf_username": "foo3", + "aaf_password": "bar3", + "dmaap_info": { + "client_role":"com.dcae.member", + "client_id":"1500462518108", + "location":"mtc5", + "topic_url":"https://dcae-msrt-ftl2.com:3905/events/com.dcae.dmaap.FTL2.TommyTestTopic2" + } + } + dm = { "some-config-key": good } + + assert dmaap.apply_defaults_dmaap_map(dm) == dm + + minimal = { + "type": "message_router", + "dmaap_info": { + "topic_url":"https://dcae-msrt-ftl2.com:3905/events/com.dcae.dmaap.FTL2.TommyTestTopic2" + } + } + dm = { "some-config-key": minimal } + + result = dmaap.apply_defaults_dmaap_map(dm) + assert result == {'some-config-key': {'aaf_username': None, + 'aaf_password': None, 'dmaap_info': {'client_role': None, + 'topic_url': 'https://dcae-msrt-ftl2.com:3905/events/com.dcae.dmaap.FTL2.TommyTestTopic2', 'client_id': None, 'location': None}, + 'type': 'message_router'}} + + +def test_update_delivery_urls(): + def get_route_with_slash(config_key): + return "/eden" + + dmaap_map = {"spade-key": {"type": "data_router", "dmaap_info": {"delivery_url": "bleh","username": "dolittle"}}, + "clover-key": {"type": "data_router", "dmaap_info": {"publish_url": "manyfoos", + "username": "chickenlittle"}}} + + dmaap_map = dmaap.update_delivery_urls(get_route_with_slash, "http://some-host.io", dmaap_map) + + expected = {'spade-key': {"type": "data_router", 'dmaap_info': {'delivery_url': 'http://some-host.io/eden', + 'username': 'dolittle'}}, 'clover-key': {"type": "data_router", 'dmaap_info': {'publish_url': 'manyfoos', + 'username': 'chickenlittle'}}} + assert expected == dmaap_map + + def get_route_no_slash(config_key): + return "eden" + + dmaap_map = dmaap.update_delivery_urls(get_route_no_slash, "http://some-host.io", dmaap_map) + assert expected == dmaap_map + + # Case when there is nothing to update + dmaap_map = {"clover-key": {"type": "data_router", "dmaap_info": {"publish_url": "manyfoos", + "username": "chickenlittle"}}} + + assert dmaap_map == dmaap.update_delivery_urls(get_route_no_slash, "http://some-host.io", + dmaap_map) + + +def test_list_delivery_urls(): + dmaap_map = {"spade-key": {"type": "data_router", "dmaap_info": {"delivery_url": "bleh","username": "dolittle"}}, + "clover-key": {"type": "data_router", "dmaap_info": {"publish_url": "manyfoos", + "username": "chickenlittle"}}} + + result = dmaap.list_delivery_urls(dmaap_map) + assert result == [('spade-key', 'bleh')] diff --git a/mod/onboardingapi/dcae_cli/util/tests/test_docker_util.py b/mod/onboardingapi/dcae_cli/util/tests/test_docker_util.py new file mode 100644 index 0000000..1860357 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/tests/test_docker_util.py @@ -0,0 +1,62 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Provides tests for the docker_util module +''' +import pytest +from dcae_cli.util.profiles import Profile, CONSUL_HOST, CONFIG_BINDING_SERVICE, CDAP_BROKER, DOCKER_HOST +from dcae_cli.util import docker_util as du + + +# TODO: formalize tests +''' +from dcae_cli.util.logger import set_verbose +set_verbose() + +client = _get_docker_client() + +params = dict() +interface_map = dict() +instance_map = dict() +# TODO: make-me-valid? +external_ip ='196.207.143.209' + +# TODO: Need to replace the use of asimov +_run_component('asimov-anomaly-viz:0.0.0', + 'bob', 'asimov.anomaly.viz', '1.0.0', params, interface_map, instance_map, + external_ip) +''' + +def test_convert_profile_to_docker_envs(): + expected = { CONSUL_HOST.upper(): "some.consul.somewhere", + CONFIG_BINDING_SERVICE.upper(): "some.config_binding.somewhere", + CDAP_BROKER.upper(): "broker", + DOCKER_HOST.upper(): "some-docker-host" + } + profile = Profile(**{ CONSUL_HOST: expected[CONSUL_HOST.upper()], + CONFIG_BINDING_SERVICE: expected[CONFIG_BINDING_SERVICE.upper()], + CDAP_BROKER: expected[CDAP_BROKER.upper()], + DOCKER_HOST: expected[DOCKER_HOST.upper()] + }) + actual = du._convert_profile_to_docker_envs(profile) + + assert actual == expected diff --git a/mod/onboardingapi/dcae_cli/util/tests/test_inputs.py b/mod/onboardingapi/dcae_cli/util/tests/test_inputs.py new file mode 100644 index 0000000..5271705 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/tests/test_inputs.py @@ -0,0 +1,37 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +""" +Tests for inputs module +""" +import pytest +from dcae_cli.util import inputs + + +def test_filter_entries(): + spec = { "parameters": [{"name": "foo"}, {"name": "bar", + "sourced_at_deployment": False}, {"name": "baz", "sourced_at_deployment": True}] } + + with pytest.raises(inputs.InputsValidationError): + inputs.filter_entries({}, spec) + + inputs_map = { "foo": "do not copy", "baz": "hello world", "extra": "do not copy" } + + assert len(inputs.filter_entries(inputs_map, spec)) == 1 diff --git a/mod/onboardingapi/dcae_cli/util/tests/test_profiles.py b/mod/onboardingapi/dcae_cli/util/tests/test_profiles.py new file mode 100644 index 0000000..969697a --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/tests/test_profiles.py @@ -0,0 +1,162 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Tests the profiles module +""" +import os, json, copy +from functools import partial + +import click +import pytest + +from dcae_cli import util +from dcae_cli.util.exc import DcaeException +from dcae_cli.util import profiles +from dcae_cli.util.profiles import (get_active_name, get_profile, get_profiles, get_profiles_path, + create_profile, delete_profile, update_profile, ACTIVE, + activate_profile, CONSUL_HOST) +from dcae_cli.util import config + + +def test_profiles(monkeypatch, tmpdir): + '''Tests the creation and initialization of profiles on a clean install''' + # Setup config + config_dict = { "active_profile": "fake-solutioning", "db_url": "some-db" } + config_file = tmpdir.join("config.json") + config_file.write(json.dumps(config_dict)) + + # Setup profile + profile_dict = { "fake-solutioning": { "cdap_broker": "cdap_broker", + "config_binding_service": "config_binding_service", + "consul_host": "realsolcnsl00.dcae.solutioning.com", + "docker_host": "realsoldokr00.dcae.solutioning.com:2376" }} + profile_file = tmpdir.join("profiles.json") + profile_file.write(json.dumps(profile_dict)) + + monkeypatch.setattr(click, "get_app_dir", lambda app: str(tmpdir.realpath())) + + assert get_active_name() == config_dict["active_profile"] + assert get_profile() == profiles.Profile(**profile_dict["fake-solutioning"]) + + # Failures looking for unknown profile + + with pytest.raises(DcaeException): + get_profile('foo') + + with pytest.raises(DcaeException): + delete_profile('foo') + + with pytest.raises(DcaeException): + update_profile('foo', **{}) # doesn't exist + + # Cannot delete active profile + + assert delete_profile(get_active_name()) == False + + # Do different get_profiles queries + + assert get_profiles(user_only=True) == profile_dict + all_profiles = copy.deepcopy(profile_dict) + all_profiles[ACTIVE] = profile_dict["fake-solutioning"] + assert get_profiles(user_only=False) == all_profiles + + # Create and activate new profile + + create_profile('foo') + activate_profile('foo') + assert get_active_name() == 'foo' + + # Update new profile + + update_profile('foo', **{CONSUL_HOST:'bar'}) + assert get_profiles()['foo'][CONSUL_HOST] == 'bar' + assert get_profile()._asdict()[CONSUL_HOST] == 'bar' + + activate_profile("fake-solutioning") + assert delete_profile('foo') == True + + +def test_reinit_via_get_profiles(monkeypatch, tmpdir): + monkeypatch.setattr(click, "get_app_dir", lambda app: str(tmpdir.realpath())) + + def fake_reinit_failure(): + raise profiles.ProfilesInitError("Faked failure") + + monkeypatch.setattr(profiles, "reinit_profiles", fake_reinit_failure) + + with pytest.raises(DcaeException): + get_profiles() + + +def test_reinit_profiles(monkeypatch, tmpdir): + monkeypatch.setattr(click, "get_app_dir", lambda app: str(tmpdir.realpath())) + + # Setup config (need this because the "active_profile" is needed) + config_dict = { "active_profile": "fake-solutioning", "db_url": "some-db" } + config_file = tmpdir.join("config.json") + config_file.write(json.dumps(config_dict)) + + # Start with empty profiles + + profile_dict = { "fake-solutioning": { "cdap_broker": "cdap_broker", + "config_binding_service": "config_binding_service", + "consul_host": "realsolcnsl00.dcae.solutioning.com", + "docker_host": "realsoldokr00.dcae.solutioning.com:2376" }} + + def fetch_profile(target_profile, server_url, path): + return target_profile + + monkeypatch.setattr(util, "fetch_file_from_web", partial(fetch_profile, + profile_dict)) + profiles.reinit_profiles() + assert profiles.get_profiles(include_active=False) == profile_dict + + # Test update + + profile_dict = { "fake-5g": { "cdap_broker": "cdap_broker", + "config_binding_service": "config_binding_service", + "consul_host": "realsolcnsl00.dcae.solutioning.com", + "docker_host": "realsoldokr00.dcae.solutioning.com:2376" }} + + monkeypatch.setattr(util, "fetch_file_from_web", partial(fetch_profile, + profile_dict)) + profiles.reinit_profiles() + all_profiles = profiles.get_profiles(include_active=False) + assert "fake-5g" in all_profiles + assert "fake-solutioning" in all_profiles + + # Test fetch failure + + def fetch_failure(server_url, path): + raise RuntimeError("Mysterious error") + + monkeypatch.setattr(util, "fetch_file_from_web", fetch_failure) + # Case when user opts out of manually setting up + monkeypatch.setattr(click, "confirm", lambda msg: False) + + with pytest.raises(profiles.ProfilesInitError): + profiles.reinit_profiles() + + +if __name__ == '__main__': + '''Test area''' + pytest.main([__file__, ]) diff --git a/mod/onboardingapi/dcae_cli/util/tests/test_remove.py b/mod/onboardingapi/dcae_cli/util/tests/test_remove.py new file mode 100644 index 0000000..92b8ce9 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/tests/test_remove.py @@ -0,0 +1,24 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +TODO: Test removing components +''' diff --git a/mod/onboardingapi/dcae_cli/util/tests/test_undeploy.py b/mod/onboardingapi/dcae_cli/util/tests/test_undeploy.py new file mode 100644 index 0000000..664c69c --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/tests/test_undeploy.py @@ -0,0 +1,62 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +''' +Provides tests for the undeploy module +''' +from dcae_cli.util.undeploy import _handler, _handler_report + +def test_handler(): + instances = set(["some-instance-name", "another-instance-name"]) + + def fake_remove_config(config_key): + return True + + def undeploy_success(config_key): + return True + + failures, results = _handler([undeploy_success, fake_remove_config], instances) + + assert len(results) == 2 + assert len(failures) == 0 + + def undeploy_failure(config_key): + return False + + failures, results = _handler([undeploy_failure, fake_remove_config], instances) + + assert len(results) == 2 + assert len(failures) == 2 + + def undeploy_failure_sometimes(config_key): + if "some-instance-name" == config_key: + return False + return True + + failures, results = _handler([undeploy_failure_sometimes, fake_remove_config], instances) + + assert len(results) == 2 + assert len(failures) == 1 + + failures, results = _handler([undeploy_success, fake_remove_config], []) + + assert len(results) == 0 + assert len(failures) == 0 diff --git a/mod/onboardingapi/dcae_cli/util/undeploy.py b/mod/onboardingapi/dcae_cli/util/undeploy.py new file mode 100644 index 0000000..1ce4d76 --- /dev/null +++ b/mod/onboardingapi/dcae_cli/util/undeploy.py @@ -0,0 +1,111 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +""" +Provides utilities for undeploying components +""" +from functools import partial +from dcae_cli.util.exc import DcaeException +import dcae_cli.util.profiles as profiles +from dcae_cli.util.cdap_util import undeploy_component as undeploy_cdap_component +from dcae_cli.util.discovery import get_healthy_instances, get_defective_instances, \ + remove_config +from dcae_cli.util import docker_util as du +from dcae_cli.util.logger import get_logger + + +log = get_logger('Undeploy') + + +def _handler(undeploy_funcs, instances): + """Handles the undeployment + + Executes all undeployment functions for all instances and gathers up the + results. No short circuiting. + + Args + ---- + undeploy_funcs: List of functions that have the following signature `fn: string->boolean` + the input is a fully qualified instance name and the return is True upon + success and False for failures + instances: List of fully qualified instance names + + Returns + ------- + (failures, results) where each are a list of tuples. Each tuple has the + structure: `(<instance name>, result of func 1, result of func 2, ..)`. + """ + if not instances: + return [], [] + + # Invoke all undeploy funcs for all instances + def invoke_undeploys(instance): + return tuple([ undeploy_func(instance) for undeploy_func in undeploy_funcs ]) + + results = [ (instance, ) + invoke_undeploys(instance) for instance in instances ] + + # Determine failures + filter_failures_func = partial(filter, lambda result: not all(result[1:])) + failures = list(filter_failures_func(results)) + + return failures, results + + +def _handler_report(failures, results): + """Reports the result of handling""" + if len(failures) > 0: + failed_names = [ result[0] for result in failures ] + log.warn("Could not completely undeploy: {0}".format(", ".join(failed_names))) + + # This message captures a case where you are seeing a false negative. If + # you attempted to undeploy a component instance and it partially failed + # the first time but "succeeded" the second time, the second undeploy + # would get reported as a failure. The second undeploy would probably + # also be partial undeploy because the undeploy operation that succeeded + # the first time will fail the second time. + log.warn("NOTE: This could be expected since we are attempting to undeploy a component in a bad partial state") + elif len(results) == 0: + log.warn("No components found to undeploy") + else: + # This seems like important info so set it to warning so that it shows up + log.warn("Undeployed components: {0}".format(len(results))) + + +def undeploy_component(user, cname, cver, catalog): + '''Undeploys a component based on the component type''' + cname, cver = catalog.verify_component(cname, cver) + ctype = catalog.get_component_type(cname, cver) + profile = profiles.get_profile() + # Get *all* instances of the component whether running healthy or in a bad partial + # deployed state + instances = get_healthy_instances(user, cname, cver) + get_defective_instances(user, cname, cver) + + if ctype == 'docker': + client = du.get_docker_client(profile) + image = catalog.get_docker_image(cname, cver) + undeploy_func = partial(du.undeploy_component, client, image) + elif ctype == 'cdap': + undeploy_func = partial(undeploy_cdap_component, profile) + else: + raise DcaeException("Unsupported component type for undeploy") + + log.warn("Undeploying components: {0}".format(len(instances))) + _handler_report(*_handler([undeploy_func, remove_config], instances)) diff --git a/mod/onboardingapi/docs/Makefile b/mod/onboardingapi/docs/Makefile new file mode 100644 index 0000000..e0d2e69 --- /dev/null +++ b/mod/onboardingapi/docs/Makefile @@ -0,0 +1,230 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) + $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help +help: + @echo "Please use \`make <target>' where <target> is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " epub3 to make an epub3" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + @echo " dummy to check syntax errors of document sources" + +.PHONY: clean +clean: + rm -rf $(BUILDDIR)/* + +.PHONY: html +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +.PHONY: dirhtml +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +.PHONY: singlehtml +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +.PHONY: pickle +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +.PHONY: json +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +.PHONY: htmlhelp +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +.PHONY: qthelp +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/dcae_cli.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/dcae_cli.qhc" + +.PHONY: applehelp +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +.PHONY: devhelp +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/dcae_cli" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/dcae_cli" + @echo "# devhelp" + +.PHONY: epub +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +.PHONY: epub3 +epub3: + $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 + @echo + @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." + +.PHONY: latex +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +.PHONY: latexpdf +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: latexpdfja +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: text +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +.PHONY: man +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +.PHONY: texinfo +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +.PHONY: info +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +.PHONY: gettext +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +.PHONY: changes +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +.PHONY: linkcheck +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +.PHONY: doctest +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +.PHONY: coverage +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +.PHONY: xml +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +.PHONY: pseudoxml +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." + +.PHONY: dummy +dummy: + $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy + @echo + @echo "Build finished. Dummy builder generates no files." diff --git a/mod/onboardingapi/docs/README.md b/mod/onboardingapi/docs/README.md new file mode 100644 index 0000000..01685c8 --- /dev/null +++ b/mod/onboardingapi/docs/README.md @@ -0,0 +1,20 @@ +# Documentation build instructions + +The autodoc extension doesn't regenerate the `dcae_cli` package files in the build directory. They can be recreated via: + +``` +sphinx-apidoc -o source/apidoc/ ../dcae_cli/ +``` + +Then the HTML can be rebuilt via: + +``` +make clean +make html +``` + +The makefile was initially created via: + +``` +sphinx-quickstart +``` diff --git a/mod/onboardingapi/docs/source/_static/.gitkeep b/mod/onboardingapi/docs/source/_static/.gitkeep new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/mod/onboardingapi/docs/source/_static/.gitkeep diff --git a/mod/onboardingapi/docs/source/_templates/.gitkeep b/mod/onboardingapi/docs/source/_templates/.gitkeep new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/mod/onboardingapi/docs/source/_templates/.gitkeep diff --git a/mod/onboardingapi/docs/source/cli_usage.rst b/mod/onboardingapi/docs/source/cli_usage.rst new file mode 100644 index 0000000..299632a --- /dev/null +++ b/mod/onboardingapi/docs/source/cli_usage.rst @@ -0,0 +1,4 @@ +`dcae_cli` command line usage +================ + +.. dcae_cli:click-help:: dcae_cli diff --git a/mod/onboardingapi/docs/source/conf.py b/mod/onboardingapi/docs/source/conf.py new file mode 100644 index 0000000..1d9479e --- /dev/null +++ b/mod/onboardingapi/docs/source/conf.py @@ -0,0 +1,317 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2019 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# dcae_cli documentation build configuration file, created by +# sphinx-quickstart on Tue Dec 13 14:16:27 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +#make development dcae_cli package and click extension moduel visible +conf_dir = os.path.abspath(os.path.dirname(__file__)) +proj_root = os.path.abspath(os.path.join(conf_dir, os.path.pardir, os.path.pardir)) +sys.path.insert(1, conf_dir) +sys.path.insert(1, proj_root) + +from dcae_cli import __version__ as dcae_cli_ver + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + 'dcaeclidoctools' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'onboardingapi' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = dcae_cli_ver +# The full version, including alpha/beta/rc tags. +release = dcae_cli_ver + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. +# "<project> v<release> documentation" by default. +#html_title = 'dcae_cli v0.1.0' + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (relative to this directory) to use as a favicon of +# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not None, a 'Last updated on:' timestamp is inserted at every page +# bottom, using the given strftime format. +# The empty string is equivalent to '%b %d, %Y'. +#html_last_updated_fmt = None + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a <link> tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# 'ja' uses this config value. +# 'zh' user can custom change `jieba` dictionary path. +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'dcae_clidoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', + +# Latex figure (float) alignment +#'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'dcae_cli.tex', 'dcae\\_cli Documentation', + 'Paul Triantafyllou', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'dcae_cli', 'dcae_cli Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'dcae_cli', 'dcae_cli Documentation', + author, 'dcae_cli', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False diff --git a/mod/onboardingapi/docs/source/dcaeclidoctools.py b/mod/onboardingapi/docs/source/dcaeclidoctools.py new file mode 100644 index 0000000..b54e5e2 --- /dev/null +++ b/mod/onboardingapi/docs/source/dcaeclidoctools.py @@ -0,0 +1,78 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +import pkg_resources + +from docutils.nodes import literal_block +from sphinx.domains import Domain +from sphinx.util.compat import Directive + +import click + +from dcae_cli.cli import cli as group # PYTHONPATH dynamically altered in conf.py + + +def generate_help_texts(command, prefix): + ctx = click.Context(command) + yield make_block( + ' '.join(prefix), + command.get_help_option(ctx).opts[0], + command.get_help(ctx), + ) + + if isinstance(command, click.core.Group): + for c in command.list_commands(ctx): + c = command.resolve_command(ctx, [c])[1] + prefix.append(c.name) + for h in generate_help_texts(c, prefix): + yield h + prefix.pop() + + +def find_script_callable(name): + return list(pkg_resources.iter_entry_points( + 'console_scripts', name))[0].load() + + +def make_block(command, opt, content): + h = "$ {} {}\n".format(command, opt) + content + return literal_block(h, h, language='bash') + + +class ClickHelpDirective(Directive): + has_content = True + required_arguments = 1 + + def run(self): + root_cmd = self.arguments[0] + #group = find_script_callable(root_cmd) + return list(generate_help_texts(group, [root_cmd])) + + +class DcaeCliDomain(Domain): + name = 'dcae_cli' + label = 'DCAE-CLI' + directives = { + 'click-help': ClickHelpDirective, + } + + +def setup(app): + app.add_domain(DcaeCliDomain) diff --git a/mod/onboardingapi/docs/source/index.rst b/mod/onboardingapi/docs/source/index.rst new file mode 100644 index 0000000..2f99c20 --- /dev/null +++ b/mod/onboardingapi/docs/source/index.rst @@ -0,0 +1,25 @@ +.. dcae_cli documentation master file, created by + sphinx-quickstart on Tue Dec 13 14:16:27 2016. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to dcae_cli's documentation! +==================================== + +Contents: + +.. toctree:: + :maxdepth: 1 + + cli_usage + apidoc/dcae_cli + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/mod/onboardingapi/pom.xml b/mod/onboardingapi/pom.xml new file mode 100644 index 0000000..9b1c4f0 --- /dev/null +++ b/mod/onboardingapi/pom.xml @@ -0,0 +1,37 @@ +<?xml version="1.0"?> +<!-- +================================================================================ +Copyright (c) 2017-2019 AT&T Intellectual Property. All rights reserved. +================================================================================ +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============LICENSE_END========================================================= + +--> +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <groupId>org.onap.dcaegen2.platform.mod</groupId> + <artifactId>dcaegen2-platform-mod-onboardingapi</artifactId> + <name>dcaegen2-platform-mod-onboardingapi</name> + <version>2.12.0</version> + <url>http://maven.apache.org</url> + <properties> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <sonar.sources>.</sonar.sources> + <sonar.junit.reportsPath>xunit-results.xml</sonar.junit.reportsPath> + <sonar.python.coverage.reportPath>coverage.xml</sonar.python.coverage.reportPath> + <sonar.language>py</sonar.language> + <sonar.pluginName>Python</sonar.pluginName> + <sonar.inclusions>**/*.py</sonar.inclusions> + <sonar.exclusions>tests/*,setup.py</sonar.exclusions> + </properties> +</project> diff --git a/mod/onboardingapi/resources/README.md b/mod/onboardingapi/resources/README.md new file mode 100644 index 0000000..d3619ea --- /dev/null +++ b/mod/onboardingapi/resources/README.md @@ -0,0 +1,39 @@ +# Resources + +## `config.json` + +To be used to distribute backend configuration information like the onboarding database connection information to end users of dcae-cli. + +``` +curl -v --user <user>:<password> https://<your file server host>/dcae-cli/config.json --upload-file config.json +``` + +### Format + +``` +{ + "active_profile": <active profile option>, + "db_url": <onboarding catalog database connection> +} +``` + +## `profiles.json` + +To be used to distribute platform team approved environment profiles to end users of dcae-cli. + +``` +curl -v --user <user>:<password> https://<your file server host>/dcae-cli/profiles.json --upload-file profiles.json +``` + +### Format + +``` +{ + "env-name": { + "docker_host": <docker hostname:port>, + "cdap_broker": <cdap broker consul name>, + "consul_host": <consul hostname>, + "config_binding_service": <config binding service consul name> + } +} +``` diff --git a/mod/onboardingapi/setup.cfg b/mod/onboardingapi/setup.cfg new file mode 100644 index 0000000..b7e4789 --- /dev/null +++ b/mod/onboardingapi/setup.cfg @@ -0,0 +1,2 @@ +[aliases] +test=pytest diff --git a/mod/onboardingapi/setup.py b/mod/onboardingapi/setup.py new file mode 100644 index 0000000..438732c --- /dev/null +++ b/mod/onboardingapi/setup.py @@ -0,0 +1,61 @@ +# ============LICENSE_START======================================================= +# org.onap.dcae +# ================================================================================ +# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. + +# -*- coding: utf-8 -*- +import os +from setuptools import setup, find_packages + + +# extract __version__ from version file. importing dcae_cli will lead to install failures +setup_dir = os.path.dirname(__file__) +with open(os.path.join(setup_dir, 'dcae_cli', '_version.py')) as file: + globals_dict = dict() + exec(file.read(), globals_dict) + __version__ = globals_dict['__version__'] + + +setup( + name = "onap-dcae-cli", + version = __version__, + packages = find_packages(), + author = "Michael Hwang, Paul Triantafyllou, Tommy Carpenter", + description = ("DCAE component on-boarding utility"), + entry_points=""" + [console_scripts] + dcae_cli=dcae_cli.cli:cli + """, + setup_requires=['pytest-runner'], + install_requires=['python-consul<1.0.0', + 'six', + 'sqlalchemy', + 'SQLAlchemy-Utils', + 'click>=6.0,<7.0', + 'jsonschema', + 'terminaltables', + 'psycopg2==2.7.5', + 'psycopg2-binary==2.7.5', + 'genson', + 'flask-restplus', + 'onap-dcae-discovery-client>=2.0.0', + 'onap-dcae-dockering>=1.4.1,<2.0.0' + ], + tests_require=['pytest', + 'mock'], + ) diff --git a/mod/onboardingapi/start.sh b/mod/onboardingapi/start.sh new file mode 100755 index 0000000..8b3978d --- /dev/null +++ b/mod/onboardingapi/start.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +if [ -z $PG_CONN ]; then + echo "PG_CONN variable has not been set" + echo "PG_CONN contains the full postgresql URI" + exit 1 +fi + +if [ ! -f ~/.config/dcae-cli/config.json ]; then + echo "Creating dcae-cli config" + # TODO: Make this into a variable that gets fed in via docker run + echo "{\"server_url\": \"https://git.onap.org/dcaegen2/platform/cli/plain\", \"user\": \"api\", \"db_url\": \"$PG_CONN\", \"cli_version\": \"2.11.1\", \"path_component_spec\": \"/component-json-schemas/component-specification/dcae-cli-v2/component-spec-schema.json\", \"path_data_format\": \"/component-json-schemas/data-format/dcae-cli-v1/data-format-schema.json\"}" > ~/.config/dcae-cli/config.json +fi + +dcae_cli http --live diff --git a/mod/onboardingapi/tox.ini b/mod/onboardingapi/tox.ini new file mode 100644 index 0000000..addb463 --- /dev/null +++ b/mod/onboardingapi/tox.ini @@ -0,0 +1,16 @@ +# content of: tox.ini , put in same dir as setup.py +[tox] +envlist = py37 + +[testenv] +deps= + pytest + coverage + pytest-cov + mock +setenv = + PYTHONPATH={toxinidir} +passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY +commands= + pytest dcae_cli --junitxml xunit-results.xml --cov dcae_cli --cov-report xml --cov-report term + coverage xml |