summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--hpa/.testr.conf8
-rw-r--r--hpa/LICENSE26
-rw-r--r--hpa/README.md16
-rw-r--r--hpa/assembly.xml37
-rw-r--r--hpa/hpa/__init__.py13
-rw-r--r--hpa/hpa/base.py22
-rw-r--r--hpa/hpa/hpa_discovery.py450
-rwxr-xr-xhpa/mvn-phase-script.sh83
-rw-r--r--hpa/pom.xml199
-rw-r--r--hpa/setup.py43
-rw-r--r--hpa/tests/__init__.py1
-rw-r--r--hpa/tests/test.py148
-rw-r--r--hpa/tests/test_hpa_discovery.py105
-rw-r--r--hpa/tox.ini8
-rw-r--r--pom.xml1
15 files changed, 1160 insertions, 0 deletions
diff --git a/hpa/.testr.conf b/hpa/.testr.conf
new file mode 100644
index 00000000..6e39851b
--- /dev/null
+++ b/hpa/.testr.conf
@@ -0,0 +1,8 @@
+[DEFAULT]
+test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
+ OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
+ OS_LOG_CAPTURE=${OS_LOG_CAPTURE:-1} \
+ OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-160} \
+ ${PYTHON:-python} -m subunit.run discover -t ./ ${OS_TEST_PATH:-./hpa/tests/unit} $LISTOPT $IDOPTION
+test_id_option=--load-list $IDFILE
+test_list_option=--list
diff --git a/hpa/LICENSE b/hpa/LICENSE
new file mode 100644
index 00000000..fffadb02
--- /dev/null
+++ b/hpa/LICENSE
@@ -0,0 +1,26 @@
+
+The following licence applies to all files in this and subdirectories. Licences
+are included in individual source files where appropriate, and if it differs
+from this text, it supersedes this. Any file that does not have licence text
+defaults to being covered by this text; not all files support the addition of
+licenses.
+
+#
+# -------------------------------------------------------------------------
+# Copyright (c) 2015-2017 AT&T Intellectual Property
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# -------------------------------------------------------------------------
+#
+
diff --git a/hpa/README.md b/hpa/README.md
new file mode 100644
index 00000000..fe568ad6
--- /dev/null
+++ b/hpa/README.md
@@ -0,0 +1,16 @@
+# 1. compile hpa plugin
+cd hpa
+python setup.py compile
+
+# 2. install hpa plugin
+cd hpa
+python setup.py install
+
+# 3. test hpa plugin
+cd hpa/test
+# 3.1 test cloud extra info with dpdk
+python test.py -f "dpdk"
+# 3.2 test without cloud extra info
+python test.py -t "windriver"
+python test.py -t "starlingx"
+python test.py -t "pike"
diff --git a/hpa/assembly.xml b/hpa/assembly.xml
new file mode 100644
index 00000000..18126dc6
--- /dev/null
+++ b/hpa/assembly.xml
@@ -0,0 +1,37 @@
+<!--
+ Copyright (c) 2019 Intel Corporation. All rights reserved.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at:
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+ <id>hpa</id>
+ <formats>
+ <format>zip</format>
+ </formats>
+ <fileSets>
+ <fileSet>
+ <includes>
+ <include>*/**</include>
+ </includes>
+ <excludes>
+ <exclude>**/*.pyc</exclude>
+ <exclude>target/**</exclude>
+ <exclude>docker/**</exclude>
+ <exclude>cover/**</exclude>
+ <exclude>pom.xml</exclude>
+ <exclude>assembly.xml</exclude>
+ <exclude>xunit-results.xml</exclude>
+ </excludes>
+ </fileSet>
+ </fileSets>
+ <baseDirectory>hpa</baseDirectory>
+</assembly>
diff --git a/hpa/hpa/__init__.py b/hpa/hpa/__init__.py
new file mode 100644
index 00000000..81362a2b
--- /dev/null
+++ b/hpa/hpa/__init__.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2019 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/hpa/hpa/base.py b/hpa/hpa/base.py
new file mode 100644
index 00000000..90eb6432
--- /dev/null
+++ b/hpa/hpa/base.py
@@ -0,0 +1,22 @@
+import abc
+
+import six
+
+
+@six.add_metaclass(abc.ABCMeta)
+class HPA_DiscoveryBase(object):
+ """Base class for example plugin used in the tutorial.
+ """
+
+ def __init__(self):
+ """do nothing"""
+
+ @abc.abstractmethod
+ def get_hpa_capabilities(self, data):
+ """Get cpupinning capabilities.
+
+ :param data: A dictionary with string keys and simple types as
+ values.
+ :type data: dict(str:?)
+ :returns: Iterable producing the formatted text.
+ """
diff --git a/hpa/hpa/hpa_discovery.py b/hpa/hpa/hpa_discovery.py
new file mode 100644
index 00000000..69b9f222
--- /dev/null
+++ b/hpa/hpa/hpa_discovery.py
@@ -0,0 +1,450 @@
+import traceback
+import uuid
+import json
+import logging
+from hpa import base
+
+
+def ignore_case_get(args, key, def_val=""):
+ if not key:
+ return def_val
+ if key in args:
+ return args[key]
+ for old_key in args:
+ if old_key.upper() == key.upper():
+ return args[old_key]
+ return def_val
+
+class HPA_Discovery(base.HPA_DiscoveryBase):
+ """HPA Discovery implementation.
+ """
+ def __init__(self):
+ if not hasattr(self, "_logger"):
+ self._logger = logging.getLogger("hpa_discovery")
+ self.fh = logging.FileHandler('discovery.log')
+ self.fh.setLevel(logging.INFO)
+ self._logger.addHandler(self.fh)
+
+ def get_hpa_capabilities(self, data):
+ hpa_caps = []
+
+ # Basic capabilties
+ caps_dict = self._get_hpa_basic_capabilities(data)
+ if len(caps_dict) > 0:
+ self._logger.debug("basic_capabilities_info: %s" % caps_dict)
+ hpa_caps.append(caps_dict)
+
+ # cpupining capabilities
+ caps_dict = self._get_cpupining_capabilities(data)
+ if len(caps_dict) > 0:
+ self._logger.debug("cpupining_capabilities_info: %s" % caps_dict)
+ hpa_caps.append(caps_dict)
+
+ # cputopology capabilities
+ caps_dict = self._get_cputopology_capabilities(data)
+ if len(caps_dict) > 0:
+ self._logger.debug("cputopology_capabilities_info: %s" % caps_dict)
+ hpa_caps.append(caps_dict)
+
+ # hugepages capabilities
+ caps_dict = self._get_hugepages_capabilities(data)
+ if len(caps_dict) > 0:
+ self._logger.debug("hugepages_capabilities_info: %s" % caps_dict)
+ hpa_caps.append(caps_dict)
+
+ # numa capabilities
+ caps_dict = self._get_numa_capabilities(data)
+ if len(caps_dict) > 0:
+ self._logger.debug("numa_capabilities_info: %s" % caps_dict)
+ hpa_caps.append(caps_dict)
+
+ # storage capabilities
+ caps_dict = self._get_storage_capabilities(data)
+ if len(caps_dict) > 0:
+ self._logger.debug("storage_capabilities_info: %s" % caps_dict)
+ hpa_caps.append(caps_dict)
+
+ # CPU instruction set extension capabilities
+ caps_dict = self._get_instruction_set_capabilities(data)
+ if len(caps_dict) > 0:
+ self._logger.debug("instruction_set_capabilities_info: %s" % caps_dict)
+ hpa_caps.append(caps_dict)
+
+ # PCI passthrough capabilities
+ caps_dict = self._get_pci_passthrough_capabilities(data)
+ if len(caps_dict) > 0:
+ self._logger.debug("pci_passthrough_capabilities_info: %s" % caps_dict)
+ hpa_caps.append(caps_dict)
+
+ # SRIOV-NIC capabilities
+ caps_dict = self._get_sriov_nic_capabilities(data)
+ if len(caps_dict) > 0:
+ self._logger.debug("sriov_nic_capabilities_info: %s" % caps_dict)
+ hpa_caps.append(caps_dict)
+
+ # ovsdpdk capabilities
+ caps_dict = self._get_ovsdpdk_capabilities(data)
+ if len(caps_dict) > 0:
+ self._logger.debug("ovsdpdk_capabilities_info: %s" % caps_dict)
+ hpa_caps.append(caps_dict)
+
+ # self._logger.error("hpa_caps: %s" % (hpa_caps))
+ return hpa_caps
+
+ def _get_hpa_basic_capabilities(self, data):
+ basic_capability = {}
+ feature_uuid = uuid.uuid4()
+ flavor = data["flavor"]
+
+ try:
+ basic_capability['hpa-capability-id'] = str(feature_uuid)
+ basic_capability['hpa-feature'] = 'basicCapabilities'
+ basic_capability['architecture'] = 'generic'
+ basic_capability['hpa-version'] = 'v1'
+
+ basic_capability['hpa-feature-attributes'] = []
+ basic_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'numVirtualCpu',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(flavor['vcpus'])
+ })
+ basic_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key':'virtualMemSize',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\",\"unit\":\"{1}\"}}'.format(flavor['ram'],"MB")
+ })
+ except Exception as e:
+ self._logger.error(traceback.format_exc())
+ return (
+ 11, e.message
+ )
+
+ return basic_capability
+
+ def _get_cpupining_capabilities(self, data):
+ cpupining_capability = {}
+ feature_uuid = uuid.uuid4()
+ extra_specs = data["extra_specs"]
+
+ try:
+ if 'hw:cpu_policy' in extra_specs\
+ or 'hw:cpu_thread_policy' in extra_specs:
+ cpupining_capability['hpa-capability-id'] = str(feature_uuid)
+ cpupining_capability['hpa-feature'] = 'cpuPinning'
+ cpupining_capability['architecture'] = 'generic'
+ cpupining_capability['hpa-version'] = 'v1'
+
+ cpupining_capability['hpa-feature-attributes'] = []
+ if 'hw:cpu_thread_policy' in extra_specs:
+ cpupining_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'logicalCpuThreadPinningPolicy',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(
+ extra_specs['hw:cpu_thread_policy'])
+ })
+ if 'hw:cpu_policy' in extra_specs:
+ cpupining_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key':'logicalCpuPinningPolicy',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(
+ extra_specs['hw:cpu_policy'])
+ })
+ except Exception:
+ self._logger.error(traceback.format_exc())
+
+ return cpupining_capability
+
+ def _get_cputopology_capabilities(self, data):
+ cputopology_capability = {}
+ feature_uuid = uuid.uuid4()
+ extra_specs = data["extra_specs"]
+
+ try:
+ if 'hw:cpu_sockets' in extra_specs\
+ or 'hw:cpu_cores' in extra_specs\
+ or 'hw:cpu_threads' in extra_specs:
+ cputopology_capability['hpa-capability-id'] = str(feature_uuid)
+ cputopology_capability['hpa-feature'] = 'cpuTopology'
+ cputopology_capability['architecture'] = 'generic'
+ cputopology_capability['hpa-version'] = 'v1'
+
+ cputopology_capability['hpa-feature-attributes'] = []
+ if 'hw:cpu_sockets' in extra_specs:
+ cputopology_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'numCpuSockets',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(extra_specs['hw:cpu_sockets'])
+ })
+ if 'hw:cpu_cores' in extra_specs:
+ cputopology_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'numCpuCores',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(extra_specs['hw:cpu_cores'])
+ })
+ if 'hw:cpu_threads' in extra_specs:
+ cputopology_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'numCpuThreads',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(extra_specs['hw:cpu_threads'])
+ })
+ except Exception:
+ self._logger.error(traceback.format_exc())
+
+ return cputopology_capability
+
+ def _get_hugepages_capabilities(self, data):
+ hugepages_capability = {}
+ feature_uuid = uuid.uuid4()
+ extra_specs = data["extra_specs"]
+
+ try:
+ if 'hw:mem_page_size' in extra_specs:
+ hugepages_capability['hpa-capability-id'] = str(feature_uuid)
+ hugepages_capability['hpa-feature'] = 'hugePages'
+ hugepages_capability['architecture'] = 'generic'
+ hugepages_capability['hpa-version'] = 'v1'
+
+ hugepages_capability['hpa-feature-attributes'] = []
+ if extra_specs['hw:mem_page_size'] == 'large':
+ hugepages_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'memoryPageSize',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\",\"unit\":\"{1}\"}}'.format(2,"MB")
+ })
+ elif extra_specs['hw:mem_page_size'] == 'small':
+ hugepages_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'memoryPageSize',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\",\"unit\":\"{1}\"}}'.format(4,"KB")
+ })
+ elif extra_specs['hw:mem_page_size'] == 'any':
+ self._logger.info("Currently HPA feature memoryPageSize did not support 'any' page!!")
+ else :
+ hugepages_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'memoryPageSize',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\",\"unit\":\"{1}\"}}'.format(extra_specs['hw:mem_page_size'],"KB")
+ })
+ except Exception:
+ self._logger.error(traceback.format_exc())
+
+ return hugepages_capability
+
+ def _get_numa_capabilities(self, data):
+ numa_capability = {}
+ feature_uuid = uuid.uuid4()
+ extra_specs = data["extra_specs"]
+
+ try:
+ if 'hw:numa_nodes' in extra_specs:
+ numa_capability['hpa-capability-id'] = str(feature_uuid)
+ numa_capability['hpa-feature'] = 'numa'
+ numa_capability['architecture'] = 'generic'
+ numa_capability['hpa-version'] = 'v1'
+
+ numa_capability['hpa-feature-attributes'] = []
+ numa_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'numaNodes',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(extra_specs['hw:numa_nodes'] or 0)
+ })
+
+ for num in range(0, int(extra_specs['hw:numa_nodes'])):
+ numa_cpu_node = "hw:numa_cpus.%s" % num
+ numa_mem_node = "hw:numa_mem.%s" % num
+ numacpu_key = "numaCpu-%s" % num
+ numamem_key = "numaMem-%s" % num
+
+ if numa_cpu_node in extra_specs and numa_mem_node in extra_specs:
+ numa_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': numacpu_key,
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(extra_specs[numa_cpu_node])
+ })
+ numa_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': numamem_key,
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\",\"unit\":\"{1}\"}}'.format(extra_specs[numa_mem_node],"MB")
+ })
+ except Exception:
+ self._logger.error(traceback.format_exc())
+
+ return numa_capability
+
+ def _get_storage_capabilities(self, data):
+ storage_capability = {}
+ feature_uuid = uuid.uuid4()
+ flavor = data["flavor"]
+
+ try:
+ storage_capability['hpa-capability-id'] = str(feature_uuid)
+ storage_capability['hpa-feature'] = 'localStorage'
+ storage_capability['architecture'] = 'generic'
+ storage_capability['hpa-version'] = 'v1'
+
+ storage_capability['hpa-feature-attributes'] = []
+ storage_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'diskSize',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\",\"unit\":\"{1}\"}}'.format(
+ flavor['disk'] or 0, "GB")
+ })
+ storage_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'swapMemSize',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\",\"unit\":\"{1}\"}}'.format(
+ flavor['swap'] or 0, "MB")
+ })
+ storage_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'ephemeralDiskSize',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\",\"unit\":\"{1}\"}}'.format(
+ flavor['OS-FLV-EXT-DATA:ephemeral'] or 0, "GB")
+ })
+ except Exception:
+ self._logger.error(traceback.format_exc())
+
+ return storage_capability
+
+ def _get_instruction_set_capabilities(self, data):
+ instruction_capability = {}
+ feature_uuid = uuid.uuid4()
+ extra_specs = data["extra_specs"]
+
+ try:
+ if 'hw:capabilities:cpu_info:features' in extra_specs:
+ instruction_capability['hpa-capability-id'] = str(feature_uuid)
+ instruction_capability['hpa-feature'] = 'instructionSetExtensions'
+ instruction_capability['architecture'] = 'Intel64'
+ instruction_capability['hpa-version'] = 'v1'
+
+ instruction_capability['hpa-feature-attributes'] = []
+ instruction_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'instructionSetExtensions',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(
+ extra_specs['hw:capabilities:cpu_info:features'])
+ })
+ except Exception:
+ self._logger.error(traceback.format_exc())
+
+ return instruction_capability
+
+ def _get_pci_passthrough_capabilities(self, data):
+ pci_passthrough_capability = {}
+ feature_uuid = uuid.uuid4()
+ extra_specs = data["extra_specs"]
+
+ try:
+
+ if 'pci_passthrough:alias' in extra_specs:
+ value1 = extra_specs['pci_passthrough:alias'].split(':')
+ value2 = value1[0].split('-')
+
+ pci_passthrough_capability['hpa-capability-id'] = str(feature_uuid)
+ pci_passthrough_capability['hpa-feature'] = 'pciePassthrough'
+ pci_passthrough_capability['architecture'] = str(value2[2])
+ pci_passthrough_capability['hpa-version'] = 'v1'
+
+
+ pci_passthrough_capability['hpa-feature-attributes'] = []
+ pci_passthrough_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'pciCount',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(value1[1])
+ })
+ pci_passthrough_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'pciVendorId',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(value2[3])
+ })
+ pci_passthrough_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'pciDeviceId',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(value2[4])
+ })
+ except Exception:
+ self._logger.error(traceback.format_exc())
+
+ return pci_passthrough_capability
+
+ def _get_sriov_nic_capabilities(self, data):
+ sriov_capability = {}
+ feature_uuid = uuid.uuid4()
+ extra_specs = data["extra_specs"]
+
+ try:
+ if 'aggregate_instance_extra_specs:sriov_nic' in extra_specs:
+ value1 = extra_specs['aggregate_instance_extra_specs:sriov_nic'].split(':')
+ value2 = value1[0].split('-', 5)
+
+ sriov_capability['hpa-capability-id'] = str(feature_uuid)
+ sriov_capability['hpa-feature'] = 'sriovNICNetwork'
+ sriov_capability['architecture'] = str(value2[2])
+ sriov_capability['hpa-version'] = 'v1'
+
+ sriov_capability['hpa-feature-attributes'] = []
+ sriov_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'pciCount',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(value1[1])})
+ sriov_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'pciVendorId',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(value2[3])})
+ sriov_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'pciDeviceId',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(value2[4])})
+ sriov_capability['hpa-feature-attributes'].append(
+ {'hpa-attribute-key': 'physicalNetwork',
+ 'hpa-attribute-value':
+ '{{\"value\":\"{0}\"}}'.format(value2[5])})
+ except Exception:
+ self._logger.error(traceback.format_exc())
+
+ return sriov_capability
+
+ def _get_ovsdpdk_capabilities(self, data):
+ ovsdpdk_capability = {}
+ feature_uuid = uuid.uuid4()
+ extra_specs = data["extra_specs"]
+ viminfo = data["viminfo"]
+ vimtype = data["vimtype"]
+ libname = "dataProcessingAccelerationLibrary"
+ libversion = "12.1"
+
+ try:
+ ovsdpdk_capability['hpa-capability-id'] = str(feature_uuid)
+ ovsdpdk_capability['hpa-feature'] = 'ovsDpdk'
+ ovsdpdk_capability['architecture'] = 'Intel64'
+ ovsdpdk_capability['hpa-version'] = 'v1'
+
+ cloud_extra_info_str = viminfo.get('cloud_extra_info')
+ if cloud_extra_info_str in [None, '']:
+ if vimtype in ["windriver", "starlingx"]:
+ libname = "dataProcessingAccelerationLibrary"
+ libversion = "17.2"
+ else:
+ if not isinstance(cloud_extra_info_str, dict):
+ try:
+ cloud_extra_info_str = json.loads(cloud_extra_info_str)
+ except Exception as ex:
+ logger.error("Can not convert cloud extra info %s %s" % (
+ str(ex), cloud_extra_info_str))
+ return {}
+ if cloud_extra_info_str :
+ cloud_dpdk_info = cloud_extra_info_str.get("ovsDpdk")
+ if cloud_dpdk_info :
+ libname = cloud_dpdk_info.get("libname")
+ libversion = cloud_dpdk_info.get("libversion")
+
+ ovsdpdk_capability['hpa-feature-attributes'] = [
+ {
+ 'hpa-attribute-key': str(libname),
+ 'hpa-attribute-value': '{{\"value\":\"{0}\"}}'.format(libversion)
+ },]
+ except Exception:
+ self._logger.error(traceback.format_exc())
+
+ return ovsdpdk_capability
diff --git a/hpa/mvn-phase-script.sh b/hpa/mvn-phase-script.sh
new file mode 100755
index 00000000..6cc1b2bb
--- /dev/null
+++ b/hpa/mvn-phase-script.sh
@@ -0,0 +1,83 @@
+#!/bin/bash
+# Copyright (c) 2017-2018 Wind River Systems, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+set -e
+
+echo "running script: [$0] for module [$1] at stage [$2]"
+
+export SETTINGS_FILE=${SETTINGS_FILE:-$HOME/.m2/settings.xml}
+MVN_PROJECT_MODULEID="$1"
+MVN_PHASE="$2"
+
+
+FQDN="${MVN_PROJECT_GROUPID}.${MVN_PROJECT_ARTIFACTID}"
+if [ "$MVN_PROJECT_MODULEID" == "__" ]; then
+ MVN_PROJECT_MODULEID=""
+fi
+
+if [ -z "$WORKSPACE" ]; then
+ WORKSPACE=$(pwd)
+fi
+
+# mvn phase in life cycle
+MVN_PHASE="$2"
+
+
+echo "MVN_PROJECT_MODULEID is [$MVN_PROJECT_MODULEID]"
+echo "MVN_PHASE is [$MVN_PHASE]"
+echo "MVN_PROJECT_GROUPID is [$MVN_PROJECT_GROUPID]"
+echo "MVN_PROJECT_ARTIFACTID is [$MVN_PROJECT_ARTIFACTID]"
+echo "MVN_PROJECT_VERSION is [$MVN_PROJECT_VERSION]"
+
+run_tox_test()
+{
+ set -x
+ echo $PWD
+ CURDIR=$(pwd)
+ TOXINIS=$(find . -name "tox.ini")
+ cd ..
+ for TOXINI in "${TOXINIS[@]}"; do
+ DIR=$(echo "$TOXINI" | rev | cut -f2- -d'/' | rev)
+ cd "${CURDIR}/${DIR}"
+ rm -rf ./venv-tox ./.tox
+ virtualenv ./venv-tox --python=python3
+ source ./venv-tox/bin/activate
+ pip install --upgrade pip
+ pip install --upgrade tox argparse
+ pip freeze
+ cd ${CURDIR}
+ tox
+ deactivate
+ cd ..
+ rm -rf ./venv-tox ./.tox
+ done
+}
+
+
+case $MVN_PHASE in
+clean)
+ echo "==> clean phase script"
+ rm -rf ./venv-*
+ ;;
+test)
+ echo "==> test phase script"
+ run_tox_test
+ ;;
+*)
+ echo "==> unprocessed phase"
+ ;;
+esac
+
diff --git a/hpa/pom.xml b/hpa/pom.xml
new file mode 100644
index 00000000..7d658344
--- /dev/null
+++ b/hpa/pom.xml
@@ -0,0 +1,199 @@
+<?xml version="1.0"?>
+<!--
+ Copyright (c) Intel 2020, Inc.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <groupId>org.onap.multicloud.openstack</groupId>
+ <artifactId>multicloud-openstack-root</artifactId>
+ <version>1.5.3-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.onap.multicloud.openstack</groupId>
+ <artifactId>multicloud-openstack-hpa</artifactId>
+ <version>1.5.3-SNAPSHOT</version>
+ <packaging>pom</packaging>
+ <name>multicloud-openstack-hpar</name>
+ <description>multicloud for hpa plugin</description>
+ <properties>
+ <encoding>UTF-8</encoding>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ <nexusproxy>https://nexus.onap.org</nexusproxy>
+ <sonar.sources>.</sonar.sources>
+ <sonar.junit.reportsPath>xunit-results.xml</sonar.junit.reportsPath>
+ <sonar.python.coverage.reportPath>coverage.xml</sonar.python.coverage.reportPath>
+ <sonar.language>py</sonar.language>
+ <sonar.pluginName>Python</sonar.pluginName>
+ <sonar.inclusions>**/*.py</sonar.inclusions>
+ <sonar.exclusions>**/venv-tox/**,**/.tox/**, **/tests/**,setup.py</sonar.exclusions>
+ </properties>
+ <build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>1.2.1</version>
+ <configuration>
+ <executable>${project.basedir}/mvn-phase-script.sh</executable>
+ <environmentVariables>
+ <!-- make mvn properties as env for our script -->
+ <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID>
+ <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID>
+ <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION>
+ </environmentVariables>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <version>1.2.1</version>
+ <executions>
+ <execution>
+ <id>clean phase script</id>
+ <phase>clean</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <arguments>
+ <argument>__</argument>
+ <argument>clean</argument>
+ </arguments>
+ </configuration>
+ </execution>
+ <execution>
+ <id>test script</id>
+ <phase>test</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <arguments>
+ <argument>__</argument>
+ <argument>test</argument>
+ </arguments>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <configuration>
+ <appendAssemblyId>false</appendAssemblyId>
+ <descriptors>
+ <descriptor>assembly.xml</descriptor>
+ </descriptors>
+ </configuration>
+ <executions>
+ <execution>
+ <id>make-assembly</id>
+ <phase>package</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <profiles>
+ <profile>
+ <id>docker</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ <version>3.1.0</version>
+ <executions>
+ <execution>
+ <id>copy-resources</id>
+ <phase>install</phase>
+ <goals>
+ <goal>copy-resources</goal>
+ </goals>
+ <configuration>
+ <overwrite>true</overwrite>
+ <nonFilteredFileExtensions>
+ <nonFilteredFileExtension>zip</nonFilteredFileExtension>
+ </nonFilteredFileExtensions>
+ <outputDirectory>${project.basedir}/docker_target</outputDirectory>
+ <resources>
+ <resource>
+ <directory>${project.basedir}/docker</directory>
+ <filtering>true</filtering>
+ </resource>
+ <resource>
+ <directory>${project.basedir}/target</directory>
+ <filtering>true</filtering>
+ <includes>
+ <include>*.zip</include>
+ </includes>
+ </resource>
+ </resources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <version>1.6</version>
+ <executions>
+ <execution>
+ <phase>install</phase>
+ <inherited>false</inherited>
+ <configuration>
+ <target>
+ <exec executable="docker">
+ <arg value="build"/>
+ <arg value="-t"/>
+ <arg value="${CONTAINER_PUSH_REGISTRY}/onap/multicloud/openstack-hpa:${project.version}"/>
+ <arg value="docker_target"/>
+ </exec>
+ <exec executable="docker">
+ <arg value="tag"/>
+ <arg value="${CONTAINER_PUSH_REGISTRY}/onap/multicloud/openstack-hpa:${project.version}"/>
+ <arg value="${CONTAINER_PUSH_REGISTRY}/onap/multicloud/openstack-hpa:latest"/>
+ </exec>
+ <exec executable="docker">
+ <arg value="push"/>
+ <arg value="${CONTAINER_PUSH_REGISTRY}/onap/multicloud/openstack-hpa:${project.version}"/>
+ </exec>
+ <exec executable="docker">
+ <arg value="push"/>
+ <arg value="${CONTAINER_PUSH_REGISTRY}/onap/multicloud/openstack-hpa:latest"/>
+ </exec>
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ </profile>
+ </profiles>
+</project>
diff --git a/hpa/setup.py b/hpa/setup.py
new file mode 100644
index 00000000..93babd2a
--- /dev/null
+++ b/hpa/setup.py
@@ -0,0 +1,43 @@
+from setuptools import setup, find_packages
+
+setup(
+ name='hpa',
+ version='1.0',
+
+ description='HPA discovery package for stevedore',
+
+ author='Haibin Huang',
+ author_email='haibin.huang@intel.com',
+
+ url='https://opendev.org/openstack/stevedore',
+
+ classifiers=['Development Status :: 3 - Alpha',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ 'Intended Audience :: Developers',
+ 'Environment :: Console',
+ ],
+
+ platforms=['Any'],
+
+ scripts=[],
+
+ provides=['hpa',
+ ],
+
+ packages=find_packages(),
+ install_requires=['stevedore'],
+ include_package_data=True,
+
+ entry_points={
+ 'hpa.discovery': [
+ 'discovery = hpa.hpa_discovery:HPA_Discovery',
+ ],
+ },
+
+ zip_safe=False,
+)
diff --git a/hpa/tests/__init__.py b/hpa/tests/__init__.py
new file mode 100644
index 00000000..792d6005
--- /dev/null
+++ b/hpa/tests/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/hpa/tests/test.py b/hpa/tests/test.py
new file mode 100644
index 00000000..a9fdcd77
--- /dev/null
+++ b/hpa/tests/test.py
@@ -0,0 +1,148 @@
+from __future__ import print_function
+
+import argparse
+
+from stevedore import extension
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-f', action='store', dest='dpdk', help='dpdk use?')
+ parser.add_argument('-t', action='store', dest='vimtype', help='vim type')
+ parser.add_argument('--version', action='version', version='%(prog)s 1.0')
+ results = parser.parse_args()
+ dpdk = results.dpdk
+ vimtype = results.vimtype
+
+ flavor = {
+ "vcpus": 2,
+ "ram": "2048",
+ "disk": "2G",
+ "swap": False,
+ "OS-FLV-EXT-DATA:ephemeral": False
+ }
+
+ # viminfo
+ viminfo = {
+ "createTime": "2017-04-01 02:22:27",
+ "domain": "Default",
+ "name": "TiS_R4",
+ "password": "admin",
+ "tenant": "admin",
+ "type": "openstack",
+ "url": "http://128.224.180.14:5000/v3",
+ "userName": "admin",
+ "vendor": "WindRiver",
+ "version": "newton",
+ "vimId": "windriver-hudson-dc_RegionOne",
+ 'cloud_owner': 'windriver-hudson-dc',
+ 'cloud_region_id': 'RegionOne',
+ 'insecure': 'True'
+ }
+
+ # Add cloud_extra_info in convert_vim_info
+ viminfo_with_dpdk = {
+ "createTime": "2017-04-01 02:22:27",
+ "domain": "Default",
+ "name": "TiS_R4",
+ "password": "admin",
+ "tenant": "admin",
+ "type": "openstack",
+ "url": "http://128.224.180.14:5000/v3",
+ "userName": "admin",
+ "vendor": "WindRiver",
+ "version": "newton",
+ "vimId": "windriver-hudson-dc_RegionOne",
+ 'cloud_owner': 'windriver-hudson-dc',
+ 'cloud_region_id': 'RegionOne',
+ 'insecure': 'True',
+ 'cloud_extra_info': '{ \
+ "ovsDpdk": { \
+ "version": "v1", \
+ "arch": "Intel64", \
+ "libname": "dataProcessingAccelerationLibrary", \
+ "libversion": "v12.1" \
+ } \
+ }'
+ }
+
+ # flavor extra specs
+ extra_specs = [
+ # HPA UT1: CPU-Pinning
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:cpu_policy": "dedicated",
+ "hw:cpu_thread_policy": "prefer"
+ },
+ # HPA UT2: CPU-Topology
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:cpu_sockets": "2",
+ "hw:cpu_cores": "4",
+ "hw:cpu_threads": "16"
+ },
+ # HPA UT3: mem_page_size
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:mem_page_size": "large"
+ },
+ # HPA UT4: numa_nodes
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:numa_nodes": "2",
+ "hw:numa_cpus.0": "0,1",
+ "hw:numa_cpus.1": "2,3,4,5",
+ "hw:numa_mem.0": "2048",
+ "hw:numa_mem.1": "2048"
+ },
+ # HPA UT5: instruction set
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:capabilities:cpu_info:features": "avx,acpi"
+ },
+ # HPA UT6: pci passthrough
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "pci_passthrough:alias": "sriov-vf-intel-8086-15b3:4"
+ },
+ # HPA UT7: sriov-nic
+ {
+ "aggregate_instance_extra_specs:sriov_nic": "sriov-nic-intel-8086-15b3-physnet-1:1",
+ "capabilities:cpu_info:model": "Haswell"
+ }
+ ]
+
+
+ def get_hpa_capabilities(ext, data):
+ return (ext.obj.get_hpa_capabilities(data))
+
+ extra_specs = [
+ # HPA UT1: CPU-Pinning
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:cpu_policy": "dedicated",
+ "hw:cpu_thread_policy": "prefer"
+ },
+ ]
+ if dpdk != "dpdk":
+ viminfo = viminfo
+ else:
+ viminfo = viminfo_with_dpdk
+
+ for extra_spec in extra_specs:
+ data = {"flavor": flavor, "extra_specs": extra_spec, "viminfo": viminfo, "vimtype": vimtype}
+ mgr = extension.ExtensionManager(
+ namespace='hpa.discovery',
+ invoke_on_load=True,
+ )
+
+ results = mgr.map(get_hpa_capabilities, data)
+ print(results)
diff --git a/hpa/tests/test_hpa_discovery.py b/hpa/tests/test_hpa_discovery.py
new file mode 100644
index 00000000..c63f377f
--- /dev/null
+++ b/hpa/tests/test_hpa_discovery.py
@@ -0,0 +1,105 @@
+import unittest
+
+from hpa import hpa_discovery
+
+class TestDiscovery(unittest.TestCase):
+ def test_hpa_discovery(self):
+ """
+ Test that it can discovery hpa capability
+ """
+
+ flavor = {
+ "vcpus": 2,
+ "ram": "2048",
+ "disk": "2G",
+ "swap": False,
+ "OS-FLV-EXT-DATA:ephemeral": False
+ }
+
+ # Add cloud_extra_info in convert_vim_info
+ viminfo = {
+ "createTime": "2017-04-01 02:22:27",
+ "domain": "Default",
+ "name": "TiS_R4",
+ "password": "admin",
+ "tenant": "admin",
+ "type": "openstack",
+ "url": "http://128.224.180.14:5000/v3",
+ "userName": "admin",
+ "vendor": "WindRiver",
+ "version": "newton",
+ "vimId": "windriver-hudson-dc_RegionOne",
+ 'cloud_owner': 'windriver-hudson-dc',
+ 'cloud_region_id': 'RegionOne',
+ 'insecure': 'True',
+ 'cloud_extra_info': '{ \
+ "ovsDpdk": { \
+ "version": "v1", \
+ "arch": "Intel64", \
+ "libname": "dataProcessingAccelerationLibrary", \
+ "libversion": "v12.1" \
+ } \
+ }'
+ }
+
+ # flavor extra specs
+ extra_specs = [
+ # HPA UT1: CPU-Pinning
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:cpu_policy": "dedicated",
+ "hw:cpu_thread_policy": "prefer"
+ },
+ # HPA UT2: CPU-Topology
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:cpu_sockets": "2",
+ "hw:cpu_cores": "4",
+ "hw:cpu_threads": "16"
+ },
+ # HPA UT3: mem_page_size
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:mem_page_size": "large"
+ },
+ # HPA UT4: numa_nodes
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:numa_nodes": "2",
+ "hw:numa_cpus.0": "0,1",
+ "hw:numa_cpus.1": "2,3,4,5",
+ "hw:numa_mem.0": "2048",
+ "hw:numa_mem.1": "2048"
+ },
+ # HPA UT5: instruction set
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "hw:capabilities:cpu_info:features": "avx,acpi"
+ },
+ # HPA UT6: pci passthrough
+ {
+ "aggregate_instance_extra_specs:storage": "local_image",
+ "capabilities:cpu_info:model": "Haswell",
+ "pci_passthrough:alias": "sriov-vf-intel-8086-15b3:4"
+ },
+ # HPA UT7: sriov-nic
+ {
+ "aggregate_instance_extra_specs:sriov_nic": "sriov-nic-intel-8086-15b3-physnet-1:1",
+ "capabilities:cpu_info:model": "Haswell"
+ }
+ ]
+
+ vimtype = "windriver"
+ hpa = hpa_discovery.HPA_Discovery()
+ for extra_spec in extra_specs:
+ data = {"flavor": flavor, "extra_specs": extra_spec, "viminfo": viminfo, "vimtype": vimtype}
+ results = hpa.get_hpa_capabilities(data)
+
+if __name__ == '__main__':
+ unittest.main()
+
diff --git a/hpa/tox.ini b/hpa/tox.ini
new file mode 100644
index 00000000..8727707e
--- /dev/null
+++ b/hpa/tox.ini
@@ -0,0 +1,8 @@
+[tox]
+envlist = py35
+
+[testenv]
+deps =
+
+commands =
+ python3 -m unittest discover
diff --git a/pom.xml b/pom.xml
index 478df46b..e1d32a66 100644
--- a/pom.xml
+++ b/pom.xml
@@ -45,6 +45,7 @@
<module>pike</module>
<module>starlingx</module>
<!--module>lenovo</module-->
+ <module>hpa</module>
</modules>
<build>