diff options
author | Stone, Avi (as206k) <as206k@att.com> | 2018-04-12 15:46:31 +0300 |
---|---|---|
committer | Stone, Avi (as206k) <as206k@att.com> | 2018-04-12 15:49:38 +0300 |
commit | 5032434b101f25fa44d2e1f8dc8393e30af1ed4f (patch) | |
tree | 2dc7d37a8048e025c7412af080640da4c9a22b65 | |
parent | 2205633792f95f46a02bbf8f87f0c2637265d924 (diff) |
DCAE-D be initial commit
DCAE-D be initial commit
Issue-ID: SDC-1218
Change-Id: Id18ba96c499e785aa9ac395fbaf32d57f08c281b
Signed-off-by: Stone, Avi (as206k) <as206k@att.com>
264 files changed, 49917 insertions, 0 deletions
diff --git a/dcaedt_be/.gitignore b/dcaedt_be/.gitignore new file mode 100644 index 0000000..e6bf4f8 --- /dev/null +++ b/dcaedt_be/.gitignore @@ -0,0 +1,30 @@ +target/ +!.mvn/wrapper/maven-wrapper.jar + +### STS ### +.classpath +.factorypath +.project +.settings +.springBeans + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr + +### NetBeans ### +nbproject/private/ +build/ +nbbuild/ +dist/ +nbdist/ +.nb-gradle/ +/compositiondb* + +bin/ + +### testng ### +test-output/ + diff --git a/dcaedt_be/pom.xml b/dcaedt_be/pom.xml new file mode 100644 index 0000000..b236858 --- /dev/null +++ b/dcaedt_be/pom.xml @@ -0,0 +1,193 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project + xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <groupId>org.onap.sdc.dcae.composition</groupId> + <artifactId>dcae_dt_be</artifactId> + <packaging>war</packaging> + <name>DCAE DT BE</name> + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>dcae_dt_be_main</artifactId> + <version>1806.0.1-SNAPSHOT</version> + </parent> + <dependencies> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-web</artifactId> + <exclusions> + <exclusion> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-tomcat</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-test</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-lang3</artifactId> + <version>3.5</version> + </dependency> + <dependency> + <groupId>commons-net</groupId> + <artifactId>commons-net</artifactId> + <version>3.3</version> + </dependency> + <dependency> + <groupId>com.jcraft</groupId> + <artifactId>jsch</artifactId> + <version>0.1.54</version> + </dependency> + <dependency> + <groupId>com.google.code.gson</groupId> + <artifactId>gson</artifactId> + <version>2.8.0</version> + </dependency> + <dependency> + <groupId>org.json</groupId> + <artifactId>json</artifactId> + <version>20160810</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog-ASDC</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog-API</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog-Commons</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog-DB</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog-Service</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.powermock</groupId> + <artifactId>powermock-module-junit4</artifactId> + <version>1.6.4</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.powermock</groupId> + <artifactId>powermock-api-mockito</artifactId> + <version>1.6.4</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.testng</groupId> + <artifactId>testng</artifactId> + <version>6.9.10</version> + <scope>test</scope> + </dependency> + <!-- https://mvnrepository.com/artifact/com.googlecode.json-simple/json-simple --> + <dependency> + <groupId>com.googlecode.json-simple</groupId> + <artifactId>json-simple</artifactId> + <version>1.1.1</version> + </dependency> + <dependency> + <groupId>io.springfox</groupId> + <artifactId>springfox-swagger2</artifactId> + <version>2.6.1</version> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>io.springfox</groupId> + <artifactId>springfox-swagger-ui</artifactId> + <version>2.6.1</version> + <scope>compile</scope> + </dependency> + </dependencies> + <profiles> + <profile> + <id>local</id> + <activation> + <activeByDefault>false</activeByDefault> + </activation> + <dependencies> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-jetty</artifactId> + <version>1.5.2.RELEASE</version> + <exclusions> + <exclusion> + <groupId>org.eclipse.jetty.websocket</groupId> + <artifactId>*</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>javax.servlet</groupId> + <artifactId>javax.servlet-api</artifactId> + <version>4.0.0</version> + <scope>provided</scope> + </dependency> + </dependencies> + </profile> + <profile> + <id>server</id> + <activation> + <activeByDefault>true</activeByDefault> + </activation> + <dependencies> + <dependency> + <groupId>javax.servlet</groupId> + <artifactId>javax.servlet-api</artifactId> + <version>4.0.0</version> + </dependency> + </dependencies> + </profile> + </profiles> + <build> + <finalName>dcae</finalName> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + </plugin> + <plugin> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-maven-plugin</artifactId> + <configuration> + <webXml>src/main/webapp/WEB-INF/web.xml</webXml> + <webResources> + <resource> + <!-- this is relative to the pom.xml directory --> + <directory>src/main/webapp/</directory> + </resource> + </webResources> + </configuration> + <!-- <executions><execution><goals><goal>repackage</goal></goals></execution></executions><configuration><jvmArguments> -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8096 + </jvmArguments></configuration> --> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-war-plugin</artifactId> + <configuration> + <archive> + <manifestEntries> + <Specification-Version>${project.version}</Specification-Version> + </manifestEntries> + </archive> + </configuration> + </plugin> + </plugins> + </build> +</project> diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/CompositionConfig.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/CompositionConfig.java new file mode 100644 index 0000000..ee8f5c6 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/CompositionConfig.java @@ -0,0 +1,87 @@ +package org.onap.sdc.dcae.composition; + +import java.lang.reflect.Type; +import java.util.Map; +import java.util.Set; + +import javax.annotation.PostConstruct; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.PropertySource; +import org.springframework.context.annotation.PropertySources; +import org.springframework.stereotype.Component; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.gson.Gson; +import com.google.gson.reflect.TypeToken; + +@Component +@PropertySources({ + @PropertySource(value="classpath:application-fe.properties", ignoreResourceNotFound=true), + @PropertySource(value="file:${jetty.base}/config/dcae-be/application.properties", ignoreResourceNotFound=true) +}) + +public class CompositionConfig { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + @Value("${compositionConfig.flowTypes}") + private String flowTypes; + @JsonIgnore + private Map<String, FlowType> flowTypesMap; + @Value("${compositionConfig.isRuleEditorActive}") + private boolean isRuleEditorActive; + + // get flowTypes as the parsed keySet + public Set<String> getFlowTypes() { + return flowTypesMap.keySet(); + } + + @JsonProperty("isRuleEditorActive") + public boolean isRuleEditorActive() { + return isRuleEditorActive; + } + + public Map<String, FlowType> getFlowTypesMap() { + return flowTypesMap; + } + + public static class FlowType { + + private String entryPointPhaseName; + private String lastPhaseName; + + public String getEntryPointPhaseName() { + return entryPointPhaseName; + } + + public void setEntryPointPhaseName(String entryPointPhaseName) { + this.entryPointPhaseName = entryPointPhaseName; + } + + public String getLastPhaseName() { + return lastPhaseName; + } + + public void setLastPhaseName(String lastPhaseName) { + this.lastPhaseName = lastPhaseName; + } + } + + + @PostConstruct + public void init() { + try { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Reading flow type definitions from configuration"); + Type map = new TypeToken<Map<String, FlowType>>(){}.getType(); + flowTypesMap = new Gson().fromJson(flowTypes, map); + } catch (Exception e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Error – Failed to read flow type definitions"); + } + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/CompositionEngine.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/CompositionEngine.java new file mode 100644 index 0000000..186f3f6 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/CompositionEngine.java @@ -0,0 +1,140 @@ +package org.onap.sdc.dcae.composition; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Properties; +import java.util.jar.Attributes; +import java.util.jar.Manifest; + +import javax.servlet.ServletContext; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.util.SystemProperties; +import org.onap.sdc.dcae.filter.LoggingFilter; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ErrorConfiguration; +import org.onap.sdc.dcae.errormng.ErrorConfigurationLoader; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.CommandLineRunner; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.web.servlet.FilterRegistrationBean; +import org.springframework.boot.web.support.SpringBootServletInitializer; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.EnableScheduling; +import org.springframework.web.servlet.config.annotation.CorsRegistry; +import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; +import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter; + +@Configuration +@EnableScheduling +@SpringBootApplication +@ComponentScan("org.onap.sdc.dcae") +@EnableAutoConfiguration +@PropertySource("file:${jetty.base}/config/dcae-be/application.properties") +public class CompositionEngine extends SpringBootServletInitializer implements CommandLineRunner{ + private static final String SPECIFICATION_VERSION = "Specification-Version"; + @Autowired + ServletContext servletContext; + private static final String MANIFEST_FILE_NAME = "/META-INF/MANIFEST.MF"; + private static String dcaeVersion; + private OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + public static void main(String[] args) { + SpringApplication.run(CompositionEngine.class, args); + } + + /** + * Creates and returns a new instance of a {@link SystemProperties} class. + * + * @return New instance of {@link SystemProperties}. + */ + @Override + protected SpringApplicationBuilder configure(SpringApplicationBuilder application) { + return application.sources(CompositionEngine.class); + } + + + @Bean + public WebMvcConfigurer corsConfigurer() { + return new WebMvcConfigurerAdapter() { + @Override + public void addCorsMappings(CorsRegistry registry) { + registry.addMapping("/**") + .allowedOrigins("*") + .allowedHeaders("*") + .allowedMethods("GET", "POST", "OPTIONS", "PUT") + .allowCredentials(false) + .maxAge(3600); + + } + }; + } + + @Override + public void run(String... args) throws Exception { + + ErrorConfigurationLoader errorConfigurationLoader = new ErrorConfigurationLoader(System.getProperty("jetty.base")); + ErrConfMgr instance = ErrConfMgr.INSTANCE; + InputStream inputStream = servletContext.getResourceAsStream(MANIFEST_FILE_NAME); + + //setLogbackXmlLocation(); + + String version = null; + try { + Manifest mf = new Manifest(inputStream); + Attributes atts = mf.getMainAttributes(); + version = atts.getValue(SPECIFICATION_VERSION); + if (version == null || version.isEmpty()) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "failed to read DCAE version from MANIFEST."); + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "DCAE version from MANIFEST is {}", version); + dcaeVersion = version; + } + + } catch (IOException e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "failed to read DCAE version from MANIFEST: {}", e.getMessage()); + } + + } + + private void setLogbackXmlLocation() throws Exception { + String jettyBase = System.getProperty("config.home"); + Properties props = System.getProperties(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Current System Properties are: {}", props); + if (jettyBase == null) { + String msg = "Couldn't resolve config.home environmental variable"; + errLogger.log(LogLevel.ERROR, this.getClass().getName(), msg); + throw new Exception(msg + ". Failed to configure logback.xml location... aborting."); + } + String logbackXmlLocation = jettyBase+"/dcae-be/logback.xml"; + props.setProperty("logback.configurationFile", logbackXmlLocation); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Successfuly set the logback.xml location to {}", logbackXmlLocation); + } + + @Bean + public FilterRegistrationBean contextLifecycleFilter() { + Collection<String> urlPatterns = new ArrayList<>(); + urlPatterns.add("/*"); + + FilterRegistrationBean filterRegistrationBean = new FilterRegistrationBean(new LoggingFilter()); + filterRegistrationBean.setUrlPatterns(urlPatterns); + + return filterRegistrationBean; + } + + public static String getDcaeVersion() { + return dcaeVersion; + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/BaseController.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/BaseController.java new file mode 100644 index 0000000..8b590ca --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/BaseController.java @@ -0,0 +1,80 @@ +package org.onap.sdc.dcae.composition.controller; + +import javax.servlet.http.HttpServletRequest; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.impl.BaseBusinessLogic; +import org.onap.sdc.dcae.composition.restmodels.sdc.Asset; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.composition.util.SystemProperties; +import org.onap.sdc.dcae.enums.AssetType; +import org.onap.sdc.dcae.enums.LifecycleOperationType; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.DcaeException; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ErrConfMgr.ApiType; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.ModelAttribute; + +import com.google.gson.Gson; + +public abstract class BaseController { + + protected Gson gson = new Gson(); + + @Autowired + protected SystemProperties systemProperties; + + @Autowired + protected BaseBusinessLogic baseBusinessLogic; + + protected OnapLoggerError errLogger = OnapLoggerError.getInstance(); + protected OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + @ModelAttribute("requestId") + public String getRequestId(HttpServletRequest request) { + return request.getAttribute("requestId").toString(); + } + + Asset checkout(String userId, String uuid, AssetType assetType, String requestId) throws Exception { + return baseBusinessLogic.getSdcRestClient().changeAssetLifecycleState(userId, uuid, LifecycleOperationType.CHECKOUT.name(), null, assetType, requestId); + } + + Asset checkin(String userId, String uuid, AssetType assetType, String requestId) throws Exception { + return baseBusinessLogic.getSdcRestClient().changeAssetLifecycleState(userId, uuid, LifecycleOperationType.CHECKIN.name(), "checking in " + assetType.name() + uuid, assetType, requestId); + } + + + boolean isNeedToCheckOut(String lifecycleState) { + return DcaeBeConstants.LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT != DcaeBeConstants.LifecycleStateEnum.findState(lifecycleState); + } + + void checkUserIfResourceCheckedOut(String userId, Asset asset) throws DcaeException { + if (DcaeBeConstants.LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT == DcaeBeConstants.LifecycleStateEnum.findState(asset.getLifecycleState())) { + String lastUpdaterUserId = asset.getLastUpdaterUserId(); + if (lastUpdaterUserId != null && !lastUpdaterUserId.equals(userId)) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "User conflicts. Operation not allowed for user {} on resource checked out by {}", userId, lastUpdaterUserId); + ResponseFormat responseFormat = ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.USER_CONFLICT, null, userId, asset.getName(), lastUpdaterUserId); + throw new DcaeException(HttpStatus.FORBIDDEN, responseFormat.getRequestError()); + } + } + } + + void checkVfcmtType(ResourceDetailed vfcmt) { + if (!"VFCMT".equals(vfcmt.getResourceType()) || !"Template".equals(vfcmt.getCategory())) { + ResponseFormat responseFormat = ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.RESOURCE_NOT_VFCMT_ERROR, null, vfcmt.getUuid()); + throw new DcaeException(HttpStatus.BAD_REQUEST, responseFormat.getRequestError()); + } + } + + ResponseEntity handleException(Exception e, ApiType apiType, String... variables){ + errLogger.log(LogLevel.ERROR, this.getClass().getName(), e.getMessage()); + return ErrConfMgr.INSTANCE.handleException(e, apiType, variables); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/BlueprintController.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/BlueprintController.java new file mode 100644 index 0000000..a12c6b8 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/BlueprintController.java @@ -0,0 +1,239 @@ +package org.onap.sdc.dcae.composition.controller; + +import org.apache.commons.lang.StringUtils; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.MessageResponse; +import org.onap.sdc.dcae.catalog.asdc.ASDC; +import org.onap.sdc.dcae.catalog.asdc.ASDCUtils; +import org.onap.sdc.dcae.catalog.asdc.Blueprinter; +import org.onap.sdc.dcae.composition.restmodels.sdc.*; +import org.onap.sdc.dcae.utils.Normalizers; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.enums.ArtifactType; +import org.onap.sdc.dcae.enums.AssetType; +import org.onap.sdc.dcae.enums.LifecycleOperationType; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ErrConfMgr.ApiType; +import org.onap.sdc.dcae.utils.SdcRestClientUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.util.Base64Utils; +import org.springframework.util.CollectionUtils; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.PostConstruct; +import java.io.StringReader; +import java.net.URI; + +@RestController +@EnableAutoConfiguration +@CrossOrigin +public class BlueprintController extends BaseController{ + + @Autowired + private Blueprinter blueprinter; + + @Autowired + private ASDC asdc; + + private static final String CREATE_DESC = "creating new artifact blueprint on the service vfi"; + private static final String UPDATE_DESC = "updating artifact blueprint on the service vfi"; + + + + @PostConstruct + public void init(){ + URI sdcUri = URI.create(systemProperties.getProperties().getProperty(DcaeBeConstants.Config.URI)); + asdc.setUri(sdcUri); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "SDC uri: {}", sdcUri); + } + + /*** + * VFCMT - Resource, blueprint - as an artifact as an service. + * @param userId + * @param vfcmtUuid + * @param serviceUuid + * @param serviceInstanceName + * @param monitoringFlowType + * @return ResponseEntity + */ + @RequestMapping(value = "/createBluePrint/{VFCMTUuid}/{serviceUuid}/{instanceName}/{monitoringFlowType}", method = RequestMethod.POST) + public ResponseEntity createBluePrint(@RequestHeader("USER_ID") String userId, + @PathVariable("VFCMTUuid") String vfcmtUuid, + @PathVariable("serviceUuid") String serviceUuid, + @PathVariable("instanceName") String serviceInstanceName, + @PathVariable("monitoringFlowType") String monitoringFlowType, + @ModelAttribute("requestId") String requestId) { + try { + + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(vfcmtUuid, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), vfcmt.toString()); + checkVfcmtType(vfcmt); + Artifact cdumpArtifactData = findCdumpArtifactData(vfcmt); + if (null != cdumpArtifactData) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Found the cdump (composition.yml) on top of VFCMT {}", vfcmtUuid); + String cdump = baseBusinessLogic.getSdcRestClient().getResourceArtifact(vfcmtUuid, cdumpArtifactData.getArtifactUUID(), requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "---------------------------------------------------------------CDUMP: -----------------------------------------------------------------------------"); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), cdump); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "---------------------------------------------------------------------------------------------------------------------------------------------------"); + ASDCUtils utils = new ASDCUtils(asdc, blueprinter); + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Going to use python procedure to create a blueprint...."); + String resultBlueprintCreation; + try{ + resultBlueprintCreation = utils.buildBlueprintViaToscaLab(new StringReader(cdump)).waitForResult().waitForResult(); + }catch (Exception e){ + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.GENERATE_BLUEPRINT_ERROR, e.getMessage(), vfcmt.getName()); + } + if (StringUtils.isEmpty(resultBlueprintCreation)) { + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.GENERATE_BLUEPRINT_ERROR, "", vfcmt.getName()); + } + + // 1806 US374595 flowType in cdump + String flowTypeFromCdump = StringUtils.substringBetween(cdump,"\"flowType\":\"","\""); + if(StringUtils.isNotBlank(flowTypeFromCdump)) { + monitoringFlowType = flowTypeFromCdump; + } + // saving to serviceVfInstance + Artifact savedBluePrint = saveBluePrint(userId, serviceUuid, serviceInstanceName, resultBlueprintCreation, monitoringFlowType, vfcmt.getName(), requestId); + if(savedBluePrint!=null){ + MessageResponse response = new MessageResponse(); + response.setSuccessResponse("Blueprint build complete \n. Blueprint="+savedBluePrint.getArtifactName()); + //1806 US374593 - certify VFCMT after BP generation + certifyVfcmt(vfcmt, requestId); + return new ResponseEntity<>(response, HttpStatus.OK); + } + else{ + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.SUBMIT_BLUEPRINT_ERROR); + } + + }else{ + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.MISSING_TOSCA_FILE, "", vfcmt.getName()); + } + } catch (Exception e) { + return handleException(e, ApiType.SUBMIT_BLUEPRINT); + } + } + + + /********************* private function ********************/ + + /** + * @param userId + * @param serviceUuid + * @param resourceInstanceName + * @param bluePrint + * @param monitoringFlowType + * @param vfcmtName + * @param requestId + * @return + * @throws Exception + */ + private Artifact saveBluePrint(String userId, String serviceUuid, String resourceInstanceName, String bluePrint, String monitoringFlowType, String vfcmtName, String requestId) throws Exception { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "BLUEPRINT:\n{}", bluePrint); + try { + ServiceDetailed service = baseBusinessLogic.getSdcRestClient().getService(serviceUuid, requestId); + //Validations + checkUserIfResourceCheckedOut(userId, service); + ResourceInstance vfi = findVfiOnService(service, resourceInstanceName); + if(null == vfi){ + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "VF instance {} not found on service {}", resourceInstanceName, serviceUuid); + return null; + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), service.toString()); + + String normalizedArtifactLabel = Normalizers.normalizeArtifactLabel("blueprint-" + monitoringFlowType); + Artifact blueprintArtifact = CollectionUtils.isEmpty(vfi.getArtifacts()) ? null : vfi.getArtifacts().stream() + .filter(p -> normalizedArtifactLabel.equals(Normalizers.normalizeArtifactLabel(p.getArtifactLabel()))) + .findAny() + .orElse(null); + + boolean isNeed2Checkout = isNeedToCheckOut(service.getLifecycleState()); + if (isNeed2Checkout) { + Asset result = checkout(userId, serviceUuid, AssetType.SERVICE, requestId); + if (result != null) { + serviceUuid = result.getUuid(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "New service after checkout is: {}", serviceUuid); + } + } + //update mode + if (null != blueprintArtifact) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Found that service {} already consist of {} ----> updateMode", serviceUuid, normalizedArtifactLabel); + blueprintArtifact.setDescription(UPDATE_DESC); + blueprintArtifact.setPayloadData(Base64Utils.encodeToString(bluePrint.getBytes())); + blueprintArtifact = baseBusinessLogic.getSdcRestClient().updateVfInstanceArtifact(userId, serviceUuid, Normalizers.normalizeComponentInstanceName(resourceInstanceName), blueprintArtifact, requestId); + //create mode + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Service {} does not consist {} ----> createMode", serviceUuid, normalizedArtifactLabel); + blueprintArtifact = SdcRestClientUtils.generateDeploymentArtifact(CREATE_DESC, generateBlueprintFileName(monitoringFlowType, vfcmtName), ArtifactType.DCAE_INVENTORY_BLUEPRINT.name(), normalizedArtifactLabel, bluePrint.getBytes()); + blueprintArtifact = baseBusinessLogic.getSdcRestClient().createVfInstanceArtifact(userId, serviceUuid, Normalizers.normalizeComponentInstanceName(resourceInstanceName), blueprintArtifact, requestId); + } + + //No need to check the service in in 1806 +// Asset blueprintAsJson = checkin(user_id, serviceUuid, AssetType.SERVICE); +// debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "service result after check-in: {}", blueprintAsJson.toString()); + + return blueprintArtifact; + + } catch (Exception e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Error occurred while trying to save blueprint {}", e.toString()); + throw e; + } + } + + /** + * + * @param monitoringFlowType + * @param vfcmtName + * @return + */ + private String generateBlueprintFileName(String monitoringFlowType, String vfcmtName) { + StringBuffer sb = new StringBuffer(); + sb.append(monitoringFlowType); + sb.append("."); + sb.append(Normalizers.normalizeComponentName(vfcmtName)); + sb.append("."); + sb.append(DcaeBeConstants.Composition.fileNames.EVENT_PROC_BP_YAML); + return sb.toString(); + } + + private ResourceInstance findVfiOnService(ServiceDetailed service, String vfiName) { + return null == service ? null : CollectionUtils.isEmpty(service.getResources()) ? null : service.getResources().stream().filter(p -> vfiName.equals(p.getResourceInstanceName())).findAny().orElse(null); + } + + private Artifact findCdumpArtifactData(ResourceDetailed vfcmt) { + return null == vfcmt ? null : CollectionUtils.isEmpty(vfcmt.getArtifacts()) ? null : vfcmt.getArtifacts().stream() + .filter(p -> DcaeBeConstants.Composition.fileNames.COMPOSITION_YML.equals(p.getArtifactName())).findAny().orElse(null); + } + + private void certifyVfcmt(ResourceDetailed vfcmt, String requestId){ + String state = vfcmt.getLifecycleState(); + if(null == state) { + debugLogger.log(LogLevel.ERROR, this.getClass().getName(), "Couldn't read Vfcmt lifecycle state"); + return; + } + DcaeBeConstants.LifecycleStateEnum lifeCycleState = DcaeBeConstants.LifecycleStateEnum.findState(state); + if(null == lifeCycleState) { + debugLogger.log(LogLevel.ERROR, this.getClass().getName(), "Undefined lifecycle state: {}", state); + return; + } + try{ + switch (lifeCycleState){ + case NOT_CERTIFIED_CHECKOUT: + baseBusinessLogic.getSdcRestClient().changeResourceLifecycleState(vfcmt.getLastUpdaterUserId(), vfcmt.getUuid(), LifecycleOperationType.CHECKIN.name(), "check in VFCMT after blueprint successful submission", requestId); + case NOT_CERTIFIED_CHECKIN: + baseBusinessLogic.getSdcRestClient().changeResourceLifecycleState(vfcmt.getLastUpdaterUserId(), vfcmt.getUuid(), LifecycleOperationType.CERTIFY.name(), "certify VFCMT after blueprint successful submission", requestId); + } + } + catch (Exception e){ + //informative only. no message to user (TBA) + debugLogger.log(LogLevel.ERROR, this.getClass().getName(), "Error occurred during vfcmt lifecycle operation: {}", e.toString()); + } + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/CompositionController.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/CompositionController.java new file mode 100644 index 0000000..5cba14f --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/CompositionController.java @@ -0,0 +1,338 @@ +package org.onap.sdc.dcae.composition.controller; + +import org.json.JSONArray; +import org.json.JSONException; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.MessageResponse; +import org.onap.sdc.dcae.composition.restmodels.sdc.Artifact; +import org.onap.sdc.dcae.composition.restmodels.sdc.Asset; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.onap.sdc.dcae.catalog.Catalog; +import org.onap.sdc.dcae.catalog.Catalog.*; +import org.onap.sdc.dcae.catalog.engine.*; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.enums.ArtifactType; +import org.onap.sdc.dcae.enums.LifecycleOperationType; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ErrConfMgr.ApiType; +import org.onap.sdc.dcae.utils.SdcRestClientUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.util.Base64Utils; +import org.springframework.util.CollectionUtils; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.context.request.async.DeferredResult; + +import javax.annotation.PostConstruct; +import java.net.URI; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +@RestController +@EnableAutoConfiguration +@CrossOrigin +public class CompositionController extends BaseController{ + + @Autowired + private CatalogController catalogController; + + @PostConstruct + public void init() { + catalogController.setDefaultCatalog(URI.create(systemProperties.getProperties().getProperty(DcaeBeConstants.Config.ASDC_CATALOG_URL))); + } + + @RequestMapping(value = { "/utils/clone/{assetType}/{sourceId}/{targetId}" }, method = {RequestMethod.GET }, produces = { "application/json" }) + public ResponseEntity clone(@RequestHeader("USER_ID") String userId, @PathVariable("assetType") String theAssetType, @PathVariable("sourceId") String theSourceId, @PathVariable("targetId") String theTargetId, + @ModelAttribute("requestId") String requestId) { + MessageResponse response = new MessageResponse(); + + try { + // fetch the source and assert it is a vfcmt containing clone worthy artifacts (composition + rules) + ResourceDetailed sourceVfcmt = baseBusinessLogic.getSdcRestClient().getResource(theSourceId, requestId); + checkVfcmtType(sourceVfcmt); + List<Artifact> artifactsToClone = CollectionUtils.isEmpty(sourceVfcmt.getArtifacts()) ? null : sourceVfcmt.getArtifacts().stream() + .filter(p -> DcaeBeConstants.Composition.fileNames.COMPOSITION_YML.equals(p.getArtifactName()) || p.getArtifactName().endsWith(DcaeBeConstants.Composition.fileNames.MAPPING_RULE_POSTFIX)) + .collect(Collectors.toList()); + if(CollectionUtils.isEmpty(artifactsToClone)) { + response.setSuccessResponse("Nothing to clone"); + return new ResponseEntity<>(response ,HttpStatus.NO_CONTENT); + } + + // fetch the target + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(theTargetId, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), vfcmt.toString()); + checkVfcmtType(vfcmt); + checkUserIfResourceCheckedOut(userId, vfcmt); + boolean isTargetNeed2Checkout = isNeedToCheckOut(vfcmt.getLifecycleState()); + if (isTargetNeed2Checkout) { + ResourceDetailed targetVfcmt = baseBusinessLogic.getSdcRestClient().changeResourceLifecycleState(userId, theTargetId, LifecycleOperationType.CHECKOUT.name(), "checking out VFCMT before clone", requestId); + if(null == targetVfcmt){ + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.GENERAL_ERROR); + } + theTargetId = targetVfcmt.getUuid(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "New targetVfcmt (for artifact clone) after checkout is: {}", theTargetId); + } + + Map<String, Artifact> currentArtifacts = CollectionUtils.isEmpty(vfcmt.getArtifacts()) ? new HashMap<>() : vfcmt.getArtifacts().stream() + .collect(Collectors.toMap(Artifact::getArtifactName, Function.identity())); + + //TODO target VFCMT rule artifacts should be removed + for(Artifact artifactToClone : artifactsToClone) { + String payload = baseBusinessLogic.getSdcRestClient().getResourceArtifact(theSourceId, artifactToClone.getArtifactUUID(), requestId); + baseBusinessLogic.cloneArtifactToTarget(userId, theTargetId, payload, artifactToClone, currentArtifacts.get(artifactToClone.getArtifactName()), requestId); + } + + baseBusinessLogic.getSdcRestClient().changeResourceLifecycleState(userId, theTargetId, LifecycleOperationType.CHECKIN.name(), "check in VFCMT after clone", requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Cloning {} from {} has finished successfully", theSourceId, theTargetId); + response.setSuccessResponse("Clone VFCMT complete"); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ApiType.CLONE_VFCMT); + } + } + + @RequestMapping(value = "/elements", method = { RequestMethod.POST, RequestMethod.GET }, produces = "application/json") + public DeferredResult<CatalogResponse> items(@RequestBody(required = false) ItemsRequest theRequest) { + + final ItemsRequest request = (theRequest == null) ? ItemsRequest.EMPTY_REQUEST : theRequest; + + Catalog catalog = catalogController.getCatalog(request.getCatalog()); + DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout()); + + catalog.rootsByLabel(request.getStartingLabel()) + .setHandler(catalogController.new CatalogHandler<Folders>(request, result) { + public CatalogResponse handleData(Folders theFolders) { + JSONArray ja = new JSONArray(); + if (theFolders != null) { + for (Folder folder : theFolders) { + ja.put(catalogController.patchData(catalog, folder.data())); + } + } + CatalogResponse response = new CatalogResponse(this.request); + try { + response.data().put("elements", ja); + } catch (JSONException e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "JSONException putting json elements to response {}", e); + } + return response; + } + }); + return result; + } + + @RequestMapping(value = "/{theItemId}/elements", method = { RequestMethod.POST, RequestMethod.GET }, produces = "application/json") + public DeferredResult<CatalogResponse> items(@RequestBody(required = false) ItemsRequest theRequest, @PathVariable String theItemId) { + + final ItemsRequest request = (theRequest == null) ? ItemsRequest.EMPTY_REQUEST : theRequest; + + Catalog catalog = catalogController.getCatalog(request.getCatalog()); + DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout()); + + catalog + // .fetchFolderByItemId(theItemId) + .folder(theItemId).withParts().withPartAnnotations().withItems().withItemAnnotations().withItemModels() + .execute().setHandler(catalogController.new CatalogHandler<Folder>(request, result) { + public CatalogResponse handleData(Folder theFolder) { + CatalogResponse response = new CatalogResponse(this.request); + if (theFolder == null) { + return response; + } + + try { + Elements folders = theFolder.elements("parts", Folders.class); + if (folders != null) { + for (Object folder : folders) { + catalogController.patchData(catalog, ((Element) folder).data()); + // lots of ephemere proxies created here .. + Elements annotations = ((Element) folder).elements("annotations", + Annotations.class); + if (annotations != null) { + for (Object a : annotations) { + catalogController.patchData(catalog, ((Annotation) a).data()); + } + } + } + } + Elements items = theFolder.elements("items", Items.class); + if (items != null) { + for (Object i : items) { + catalogController.patchData(catalog, ((Element) i).data()); + // lots of ephemere proxies created here .. + Elements annotations = ((Element) i).elements("annotations", Annotations.class); + if (annotations != null) { + for (Object a : annotations) { + catalogController.patchData(catalog, ((Annotation) a).data()); + } + } + } + } + } catch (Exception x) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Exception processing catalog {}", x); + return new CatalogError(this.request, "", x); + } + + try { + response.data().put("element", theFolder.data()); + } catch (JSONException e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "JSONException putting element to response {}", e); + } + return response; + } + }); + + return result; + } + + @RequestMapping(value = "/{theItemId}/model", method = { RequestMethod.POST,RequestMethod.GET }, produces = "application/json") + public DeferredResult model(@RequestBody(required = false) ElementRequest theRequest, + @PathVariable String theItemId) { + final ElementRequest request = (theRequest == null) ? ElementRequest.EMPTY_REQUEST : theRequest; + + Catalog catalog = catalogController.getCatalog(request.getCatalog()); + DeferredResult<CatalogResponse> result = new DeferredResult<>(request.getTimeout()); + + catalog + .item(theItemId).withModels().execute() + .setHandler(catalogController.new CatalogHandler<Item>(request, result) { + public CatalogResponse handleData(Item theItem) { + if (theItem == null) { + return new CatalogError(this.request, "No such item"); + } + Templates models = null; + try { + models = (Templates) theItem.elements("models", Templates.class); + if (models == null || models.isEmpty()) { + return new CatalogError(this.request, "Item has no models"); + } + if (models.size() > 1) { + return new CatalogError(this.request, "Item has more than one model !?"); + } + catalog.template(models.get(0).id()).withInputs().withOutputs().withNodes() + .withNodeProperties().withNodePropertiesAssignments().withNodeRequirements() + .withNodeCapabilities().withNodeCapabilityProperties() + .withNodeCapabilityPropertyAssignments().withPolicies().withPolicyProperties() + .withPolicyPropertiesAssignments().execute().setHandler( + catalogController.new CatalogHandler<Template>(this.request, this.result) { + public CatalogResponse handleData(Template theTemplate) { + CatalogResponse response = new CatalogResponse(this.request); + if (theTemplate != null) { + try { + response.data().put("model", catalogController + .patchData(catalog, theTemplate.data())); + } catch (JSONException e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "JSONException putting model to response {}", e); + } + } + return response; + } + }); + } catch (Exception e) { + handleException(e, ApiType.GET_MODEL, models.get(0).name()); + } + return null; + } + }); + + return result; + } + + @RequestMapping(value = "/{theItemId}/type/{theTypeName}", method = { RequestMethod.POST, RequestMethod.GET }, produces = "application/json") + public DeferredResult<CatalogResponse> model(@RequestBody(required = false) ElementRequest theRequest, @PathVariable String theItemId, @PathVariable String theTypeName) { + final ElementRequest request = (theRequest == null) ? ElementRequest.EMPTY_REQUEST : theRequest; + + Catalog catalog = catalogController.getCatalog(request.getCatalog()); + DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout()); + + catalog.type(theItemId, theTypeName).withHierarchy().withCapabilities().withRequirements().execute() + .setHandler(catalogController.new CatalogHandler<Type>(request, result) { + public CatalogResponse handleData(Type theType) { + CatalogResponse response = new CatalogResponse(this.request); + if (theType != null) { + try { + response.data().put("type", catalogController.patchData(catalog, theType.data())); + } catch (JSONException e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Exception processing catalog {}", e); + } + } + return response; + } + }); + + return result; + } + + @RequestMapping(value = { "/getComposition/{vfcmtUuid}" }, method = { RequestMethod.GET }, produces = {"application/json" }) + public ResponseEntity getComposition(@PathVariable("vfcmtUuid") String vfcmtUuid, @ModelAttribute("requestId") String requestId) { + MessageResponse response = new MessageResponse(); + try { + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(vfcmtUuid, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), vfcmt.toString()); + checkVfcmtType(vfcmt); + + Artifact compositionArtifact = CollectionUtils.isEmpty(vfcmt.getArtifacts()) ? null : vfcmt.getArtifacts().stream().filter(a -> DcaeBeConstants.Composition.fileNames.COMPOSITION_YML.equals(a.getArtifactName())).findAny().orElse(null); + + if(null == compositionArtifact){ + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Couldn't find {} in VFCMT artifacts", DcaeBeConstants.Composition.fileNames.COMPOSITION_YML); + response.setErrorResponse("No Artifacts"); + return new ResponseEntity<>(response, HttpStatus.NO_CONTENT); + } + + String artifact = baseBusinessLogic.getSdcRestClient().getResourceArtifact(vfcmtUuid, compositionArtifact.getArtifactUUID(), requestId); + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "ARTIFACT: {}", artifact); + response.setSuccessResponse(artifact); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ApiType.GET_CDUMP); + } + } + + @RequestMapping(value = "/saveComposition/{vfcmtUuid}", method = RequestMethod.POST) + public ResponseEntity saveComposition(@RequestHeader("USER_ID") String userId, @RequestBody String theCdump, @PathVariable("vfcmtUuid") String vfcmtUuid, @ModelAttribute("requestId") String requestId) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "ARTIFACT CDUMP: {}", theCdump); + + try { + + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(vfcmtUuid, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "VFCMT: {}", vfcmt); + + checkVfcmtType(vfcmt); + checkUserIfResourceCheckedOut(userId, vfcmt); + boolean isNeed2Checkout = isNeedToCheckOut(vfcmt.getLifecycleState()); + Artifact compositionArtifact = CollectionUtils.isEmpty(vfcmt.getArtifacts()) ? null : vfcmt.getArtifacts().stream().filter(a -> DcaeBeConstants.Composition.fileNames.COMPOSITION_YML.equals(a.getArtifactName())).findAny().orElse(null); + String resourceUuid = vfcmtUuid; // by default the resource is the original vfcmtId unless a checkout will be done + if (isNeed2Checkout) { + vfcmt = baseBusinessLogic.getSdcRestClient().changeResourceLifecycleState(userId, resourceUuid, LifecycleOperationType.CHECKOUT.name(), null, requestId); + if (vfcmt != null) { + resourceUuid = vfcmt.getUuid(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "New resource after checkout is: {}", resourceUuid); + } + } + boolean isUpdateMode = null != compositionArtifact; + if (isUpdateMode) { + compositionArtifact.setDescription("updating composition file"); + compositionArtifact.setPayloadData(Base64Utils.encodeToString(theCdump.getBytes())); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "VFCMT {} does consist {} ----> updateMode", resourceUuid, DcaeBeConstants.Composition.fileNames.COMPOSITION_YML); + baseBusinessLogic.getSdcRestClient().updateResourceArtifact(userId, resourceUuid, compositionArtifact, requestId); + + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "VFCMT {} does not consist {} ----> createMode", resourceUuid, DcaeBeConstants.Composition.fileNames.COMPOSITION_YML); + compositionArtifact = SdcRestClientUtils.generateDeploymentArtifact("creating composition file", DcaeBeConstants.Composition.fileNames.COMPOSITION_YML, ArtifactType.DCAE_TOSCA.name(), "composition", theCdump.getBytes()); + baseBusinessLogic.getSdcRestClient().createResourceArtifact(userId, resourceUuid, compositionArtifact, requestId); + } + Asset result = checkin(userId, resourceUuid, org.onap.sdc.dcae.enums.AssetType.RESOURCE, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "vfcmt check-in result: {}", result); + + return new ResponseEntity<>(result, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ApiType.SAVE_CDUMP); + } + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/ConfigurationController.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/ConfigurationController.java new file mode 100644 index 0000000..4f083ca --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/ConfigurationController.java @@ -0,0 +1,63 @@ +package org.onap.sdc.dcae.composition.controller; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.CompositionConfig; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.ves.VesStructureLoader; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.CrossOrigin; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiResponse; +import io.swagger.annotations.ApiResponses; + +@RestController +@EnableAutoConfiguration +@CrossOrigin +@RequestMapping("/conf") +public class ConfigurationController extends BaseController{ + + @Autowired + private CompositionConfig compositionConfig; + + @ApiOperation(value = "Get a list of available flow types", response = CompositionConfig.class) + @ApiResponses(value = { + @ApiResponse(code = 200, message = "Successfully retrieved available flow types list"), + @ApiResponse(code = 500, message = "Flow types couldn't be fetched due to internal error")}) + @RequestMapping(value = "/composition", method = RequestMethod.GET) + public ResponseEntity getCompositionConfig() { + try { + return new ResponseEntity<>(compositionConfig, HttpStatus.OK); + }catch (Exception e) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"Exception:{}",e); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.FLOW_TYPES_CONFIGURATION_ERROR); + } + } + + @RequestMapping(value = "/ves/schemaversions", method = RequestMethod.GET) + public ResponseEntity getCommonEventFormatVersion() { + try { + Set<String> availableVersionsSet = VesStructureLoader.getAvailableVersionsList(); + List<String> availableVersionsList = new ArrayList<>(availableVersionsSet.size()); + availableVersionsList.addAll(availableVersionsSet); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Got a request to return available ves schema versions: {}", availableVersionsSet); + return new ResponseEntity<>(availableVersionsList, HttpStatus.OK); + }catch (Exception e) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"Exception:{}",e); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.VES_SCHEMA_NOT_FOUND); + } + } + + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/LifecycleController.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/LifecycleController.java new file mode 100644 index 0000000..3007335 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/LifecycleController.java @@ -0,0 +1,84 @@ +package org.onap.sdc.dcae.composition.controller; + +import org.onap.sdc.dcae.composition.restmodels.sdc.Asset; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.onap.sdc.dcae.enums.AssetType; +import org.onap.sdc.dcae.enums.LifecycleOperationType; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; + +import java.util.UUID; + +@RestController +@EnableAutoConfiguration +@CrossOrigin +public class LifecycleController extends BaseController { + + private static final String VFCMT = "vfcmt"; + + @RequestMapping(value={"/checkin/{assetType}/{uuid}"}, method={RequestMethod.PUT}, produces={"application/json"}) + public ResponseEntity putCheckin( + @PathVariable("assetType") String assetType, + @PathVariable("uuid") UUID uuid, + @RequestHeader("USER_ID") String user_id, + @ModelAttribute("requestId") String requestId) { + + try { + switch (assetType) { + case VFCMT: + Asset res_checkin = checkin(user_id, uuid.toString(), AssetType.RESOURCE, requestId); + return new ResponseEntity<>(res_checkin, HttpStatus.OK); + + default: + return new ResponseEntity<>(HttpStatus.BAD_REQUEST); + } + } catch (Exception e) { + return handleException(e, ErrConfMgr.ApiType.CHECK_IN_RESOURCE); + } + } + + @RequestMapping(value={"/checkout/{assetType}/{uuid}"}, method={RequestMethod.PUT}, produces={"application/json"}) + public ResponseEntity putCheckout( + @PathVariable("assetType") String assetType, + @PathVariable("uuid") UUID uuid, + @RequestHeader("USER_ID") String user_id, + @ModelAttribute("requestId") String requestId) { + + try { + switch (assetType) { + case VFCMT: + Asset asset = checkout(user_id, uuid.toString(), AssetType.RESOURCE, requestId); + return new ResponseEntity<>(asset, HttpStatus.OK); + + default: + return new ResponseEntity<>(HttpStatus.BAD_REQUEST); + } + } catch (Exception e) { + return handleException(e, ErrConfMgr.ApiType.CHECK_OUT_RESOURCE); + } + } + + @RequestMapping(value={"/certify/{assetType}/{uuid}"}, method={RequestMethod.PUT}, produces={"application/json"}) + public ResponseEntity putCertify( + @PathVariable("assetType") String assetType, + @PathVariable("uuid") String uuid, + @RequestHeader("USER_ID") String user_id, + @ModelAttribute("requestId") String requestId) { + + try { + switch (assetType) { + case VFCMT: + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().changeResourceLifecycleState(user_id, uuid, LifecycleOperationType.CERTIFY.name(), "certifying VFCMT", requestId); + return new ResponseEntity<>(vfcmt, HttpStatus.OK); + + default: + return new ResponseEntity<>(HttpStatus.BAD_REQUEST); + } + } catch (Exception e) { + return handleException(e, ErrConfMgr.ApiType.CHECK_OUT_RESOURCE); + } + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/RuleEditorController.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/RuleEditorController.java new file mode 100644 index 0000000..3f5ff1a --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/RuleEditorController.java @@ -0,0 +1,453 @@ +package org.onap.sdc.dcae.composition.controller; + +import com.google.gson.JsonParseException; +import org.apache.commons.collections.ListUtils; +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang3.StringUtils; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.sdc.Artifact; +import org.onap.sdc.dcae.composition.restmodels.sdc.Asset; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.onap.sdc.dcae.composition.CompositionConfig; +import org.onap.sdc.dcae.utils.Normalizers; +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.*; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.enums.ArtifactType; +import org.onap.sdc.dcae.enums.AssetType; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ErrConfMgr.ApiType; +import org.onap.sdc.dcae.errormng.ServiceException; +import org.onap.sdc.dcae.rule.editor.impl.RulesBusinessLogic; +import org.onap.sdc.dcae.rule.editor.utils.RulesPayloadUtils; +import org.onap.sdc.dcae.utils.SdcRestClientUtils; +import org.onap.sdc.dcae.ves.VesDataItemsDefinition; +import org.onap.sdc.dcae.ves.VesDataTypeDefinition; +import org.onap.sdc.dcae.ves.VesSimpleTypesEnum; +import org.onap.sdc.dcae.ves.VesStructureLoader; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.util.Base64Utils; +import org.springframework.util.CollectionUtils; +import org.springframework.web.bind.annotation.*; + +import java.util.*; +import java.util.Map.Entry; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +@RestController +@EnableAutoConfiguration +@CrossOrigin +@RequestMapping("/rule-editor") +public class RuleEditorController extends BaseController { + + private static final String EXCEPTION = "Exception {}"; + @Autowired + private CompositionConfig compositionConfig; + + @Autowired + private RulesBusinessLogic rulesBusinessLogic; + + @RequestMapping(value = "/list-events-by-versions", method = RequestMethod.GET) + public ResponseEntity getEventsByVersion() { + try { + + Map<String, Set<String>> eventsByVersions = VesStructureLoader.getAvailableVersionsAndEventTypes(); + + List<EventTypesByVersionUI> resBody = eventsByVersions.entrySet().stream().map(entry -> { + Set<String> events = entry.getValue().stream().filter(event -> !EventTypesByVersionUI.DEFAULT_EVENTS.contains(event)).collect(Collectors.toSet()); + return new EventTypesByVersionUI(entry.getKey(), events); + }).collect(Collectors.toList()); + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Got a request to return all ves event types by versions {}", eventsByVersions); + return new ResponseEntity<>(resBody, HttpStatus.OK); + + } catch (Exception e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), EXCEPTION, e); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.VES_SCHEMA_NOT_FOUND); + } + } + + @RequestMapping(value = { "/definition/{version:.*}/{eventType}" }, method = { RequestMethod.GET }, produces = { "application/json" }) + public ResponseEntity getDefinition(@PathVariable("version") String version, + @PathVariable("eventType") String eventType) { + + try { + List<EventTypeDefinitionUI> result = getEventTypeDefinitionUIs(version, eventType); + + return new ResponseEntity<>(result, HttpStatus.OK); + + } catch (Exception e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), EXCEPTION, e); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.VES_SCHEMA_NOT_FOUND); + } + } + + /** + * This endpoint functions as a 'create/update' service for the rule editor UI + * + * @param json - json representing the saved rule + * @param vfcmtUuid - VFCMT that the rule editor ui is saved in + * @param dcaeCompLabel - the name of the DCAE Component which the rule is applied to + * @param nid - A unique id of the DCAE Component which the rule is applied to - exists also in the cdump + * @param configParam - the name of the DCAE Component configuration property the rule is linked to + * @return json representing the rule editor UI + * Validations: + * 1. That the user is able to edit the VFCMT + * 2. That the cdump holds a dcae component with such nid (to avoid orphan rules) + * 3. Check that the fetched VFCMT is actually a VFCMT and not a regular VF + */ + @RequestMapping(value = "/rule/{vfcmtUuid}/{dcaeCompLabel}/{nid}/{configParam}", method = { RequestMethod.POST }, produces = "application/json") + public ResponseEntity saveRule(@RequestBody String json, @ModelAttribute("requestId") String requestId, + @RequestHeader("USER_ID") String userId, + @PathVariable("vfcmtUuid") String vfcmtUuid, + @PathVariable("dcaeCompLabel") String dcaeCompLabel, + @PathVariable("nid") String nid, + @PathVariable("configParam") String configParam) { + try { + Rule rule = RulesPayloadUtils.parsePayloadToRule(json); + if (null == rule) { + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.INVALID_RULE_FORMAT); + } + + List<ServiceException> errors = rulesBusinessLogic.validateRule(rule); + if(!errors.isEmpty()){ + return ErrConfMgr.INSTANCE.buildErrorArrayResponse(errors); + } + + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(vfcmtUuid, requestId); + checkVfcmtType(vfcmt); + + if (CollectionUtils.isEmpty(vfcmt.getArtifacts())) { + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.SAVE_RULE_FAILED); + } + + String artifactLabel = Normalizers.normalizeArtifactLabel(dcaeCompLabel + nid + configParam); + + // check for MappingRules artifact in existing artifacts + Artifact artifactFound = vfcmt.getArtifacts().stream() + .filter(a -> artifactLabel.equals(Normalizers.normalizeArtifactLabel(a.getArtifactLabel()))) + .findAny().orElse(null); + + // exception thrown if vfcmt is checked out and current user is not its owner + // performs vfcmt checkout if required + String vfcmtId = assertOwnershipOfVfcmtId(userId, vfcmt, requestId); + // new mappingRules artifact, validate nid exists in composition before creating new artifact + if (null == artifactFound) { + if(cdumpContainsNid(vfcmt, nid, requestId)) { + return saveNewRulesArtifact(rule, vfcmtId, generateMappingRulesFileName(dcaeCompLabel, nid, configParam), artifactLabel , userId, requestId); + } + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.NODE_NOT_FOUND, "", dcaeCompLabel); + } + + //update artifact flow - append new rule or edit existing rule + return addOrEditRuleInArtifact(rule, vfcmtId, userId, artifactFound, requestId); + + } catch (JsonParseException je) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Error: Rule format is invalid: {}", je); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.INVALID_RULE_FORMAT, "", je.getMessage()); + } catch (Exception e) { + return handleException(e, ErrConfMgr.ApiType.SAVE_RULE_ARTIFACT); + } + + } + + + /** + * This endpoint functions as a 'fetch' service for the rule editor UI + * + * @param vfcmtUuid - VFCMT that the rule editor ui is saved in + * @param dcaeCompLabel - the name of the DCAE Component which the rule is applied to + * @param nid - A unique id of the DCAE Component which the rule is applied to - exists also in the cdump + * @param configParam - the name of the DCAE Component configuration property the rule is linked to + * @return json representing the rule editor UI + */ + @RequestMapping(value = "/rule/{vfcmtUuid}/{dcaeCompLabel}/{nid}/{configParam}", method = { RequestMethod.GET }, produces = "application/json") + public ResponseEntity getRules( + @PathVariable("vfcmtUuid") String vfcmtUuid, + @PathVariable("dcaeCompLabel") String dcaeCompLabel, + @PathVariable("nid") String nid, + @PathVariable("configParam") String configParam, + @ModelAttribute("requestId") String requestId) { + + try { + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(vfcmtUuid, requestId); + if (CollectionUtils.isEmpty(vfcmt.getArtifacts())) { + return new ResponseEntity<>("{}", HttpStatus.OK); + } + String artifactLabel = Normalizers.normalizeArtifactLabel(dcaeCompLabel + nid + configParam); + + // check for MappingRules artifact in existing artifacts + Artifact artifactListed = vfcmt.getArtifacts().stream().filter(a -> artifactLabel.equals(Normalizers.normalizeArtifactLabel(a.getArtifactLabel()))).findAny().orElse(null); + if (null == artifactListed) { + return new ResponseEntity<>("{}", HttpStatus.OK); + } + String ruleFile = baseBusinessLogic.getSdcRestClient().getResourceArtifact(vfcmtUuid, artifactListed.getArtifactUUID(), requestId); + + // To avoid opening the file for reading we search for the eventType and SchemaVer from the artifact metadata's description + SchemaInfo schemainfo = RulesPayloadUtils.extractInfoFromDescription(artifactListed); + List<EventTypeDefinitionUI> schema = null == schemainfo? new ArrayList<>() : getEventTypeDefinitionUIs(schemainfo.getVersion(), schemainfo.getEventType()); + return new ResponseEntity<>(RulesPayloadUtils.buildSchemaAndRulesResponse(ruleFile, schema), HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ApiType.GET_RULE_ARTIFACT); + } + + } + + /** + * This endpoint functions as a 'delete' service for the rule editor UI + * + * @param vfcmtUuid - VFCMT that the rule editor ui is saved in + * @param dcaeCompLabel - the name of the DCAE Component which the rule is applied to + * @param nid - A unique id of the DCAE Component which the rule is applied to - exists also in the cdump + * @param configParam - the name of the DCAE Component configuration property the rule is linked to + * @param ruleUid - the unique id of the rule to delete + * @return operation result + */ + @RequestMapping(value = "/rule/{vfcmtUuid}/{dcaeCompLabel}/{nid}/{configParam}/{ruleUid}", method = { RequestMethod.DELETE }, produces = "application/json") + public ResponseEntity deleteRule( + @RequestHeader("USER_ID") String userId, + @PathVariable("vfcmtUuid") String vfcmtUuid, + @PathVariable("dcaeCompLabel") String dcaeCompLabel, + @PathVariable("nid") String nid, + @PathVariable("configParam") String configParam, + @PathVariable("ruleUid") String ruleUid, + @ModelAttribute("requestId") String requestId){ + + try { + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(vfcmtUuid, requestId); + if (null == vfcmt.getArtifacts()) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "VFCMT {} doesn't have artifacts", vfcmtUuid); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.DELETE_RULE_FAILED); + } + String artifactLabel = Normalizers.normalizeArtifactLabel(dcaeCompLabel + nid + configParam); + + // check for MappingRules artifact in existing artifacts + Artifact mappingRuleFile = vfcmt.getArtifacts().stream() + .filter(a -> artifactLabel.equals(Normalizers.normalizeArtifactLabel(a.getArtifactLabel()))) + .findAny().orElse(null); + + if (null == mappingRuleFile) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "{} doesn't exist for VFCMT {}", artifactLabel, vfcmtUuid); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.DELETE_RULE_FAILED); + } + + String vfcmtId = assertOwnershipOfVfcmtId(userId, vfcmt, requestId); + String payload = baseBusinessLogic.getSdcRestClient().getResourceArtifact(vfcmtId, mappingRuleFile.getArtifactUUID(), requestId); + MappingRules rules = RulesPayloadUtils.parseMappingRulesArtifactPayload(payload); + Rule removedRule = rulesBusinessLogic.deleteRule(rules, ruleUid); + if(null == removedRule){ + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Rule {} not found.", ruleUid); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.DELETE_RULE_FAILED); + } + if(rules.isEmpty()){ // if file doesn't contain any rules after last deletion -> let's delete the file + baseBusinessLogic.getSdcRestClient().deleteResourceArtifact(userId, vfcmtId, mappingRuleFile.getArtifactUUID(), requestId); + } else { + updateRulesArtifact(vfcmtId, userId, mappingRuleFile, rules, requestId); + } + return checkInAndReturnSaveArtifactResult(removedRule, vfcmtId, userId, requestId); + } catch (Exception e) { + return handleException(e, ApiType.SAVE_RULE_ARTIFACT); + } + + } + + /** + * This endpoint functions as a 'translate' service for the rule editor UI + * + * @param vfcmtUuid - VFCMT that the rule editor ui is saved in + * @param dcaeCompLabel - the name of the DCAE Component which the rule is applied to + * @param nid - A unique id of the DCAE Component which the rule is applied to - exists also in the cdump + * @param configParam - the name of the DCAE Component configuration property the rule is linked to + * @param flowType - the mapping rules flow type (SNMP,Syslog,FOI) + * @return translateJson representing the translated Rules + * Validations: + * 1. That the user is able to edit the VFCMT + * 2. That the cdump holds a dcae component with such nid (to avoid orphan rules) + * 3. Check that the fetched VFCMT is actually a VFCMT and not a regular VF + * @throws Exception + */ + @RequestMapping(value = "/rule/translate/{vfcmtUuid}/{dcaeCompLabel}/{nid}/{configParam}", method = { RequestMethod.GET }, produces = "application/json") + public ResponseEntity translateRules(@PathVariable("vfcmtUuid") String vfcmtUuid, @ModelAttribute("requestId") String requestId, + @PathVariable("dcaeCompLabel") String dcaeCompLabel, + @PathVariable("nid") String nid, + @PathVariable("configParam") String configParam, + @RequestParam("flowType") String flowType) throws Exception { + + try { + + if (StringUtils.isBlank(flowType) || MapUtils.isEmpty(compositionConfig.getFlowTypesMap()) || null == compositionConfig.getFlowTypesMap().get(flowType)) { + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.TRANSLATE_FAILED, "", "Flow type " + flowType + " not found"); + } + + // extract entry phase name and last phase name from configuration: + String entryPointPhaseName = compositionConfig.getFlowTypesMap().get(flowType).getEntryPointPhaseName(); + String lastPhaseName = compositionConfig.getFlowTypesMap().get(flowType).getLastPhaseName(); + + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(vfcmtUuid, requestId); + checkVfcmtType(vfcmt); + + if (CollectionUtils.isEmpty(vfcmt.getArtifacts())) { + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.TRANSLATE_FAILED, "", "No rules found on VFCMT " + vfcmtUuid); + } + String artifactLabel = Normalizers.normalizeArtifactLabel(dcaeCompLabel + nid + configParam); + + // check for MappingRules artifact in existing artifacts + Artifact rulesArtifact = vfcmt.getArtifacts().stream().filter(a -> artifactLabel.equals(Normalizers.normalizeArtifactLabel(a.getArtifactLabel()))).findAny().orElse(null); + + if (rulesArtifact == null) { + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.TRANSLATE_FAILED, "", artifactLabel + " doesn't exist on VFCMT " + vfcmtUuid); + } + + String payload = baseBusinessLogic.getSdcRestClient().getResourceArtifact(vfcmtUuid, rulesArtifact.getArtifactUUID(), requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Retrieved mapping rules artifact {}, start parsing rules...", artifactLabel); + MappingRules rules = RulesPayloadUtils.parseMappingRulesArtifactPayload(payload); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Finished parsing rules, calling validator..."); + List<ServiceException> errors = rulesBusinessLogic.validateRules(rules); + if (!errors.isEmpty()) { + return ErrConfMgr.INSTANCE.buildErrorArrayResponse(errors); + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Validation completed successfully, calling translator..."); + String translateJson = rulesBusinessLogic.translateRules(rules, entryPointPhaseName, lastPhaseName, vfcmt.getName()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Translation completed successfully"); + return new ResponseEntity<>(translateJson, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ApiType.SAVE_RULE_ARTIFACT); + } + } + + + ///////////////////PRIVATE METHODS//////////////////////////////////////////////////////////////////////// + + private String assertOwnershipOfVfcmtId(String userId, ResourceDetailed vfcmt, String requestId) throws Exception { + checkUserIfResourceCheckedOut(userId, vfcmt); + String newVfcmtId = vfcmt.getUuid(); // may change after checking out a certified vfcmt + if (isNeedToCheckOut(vfcmt.getLifecycleState())) { + Asset result = checkout(userId, newVfcmtId, AssetType.RESOURCE, requestId); + if (result != null) { + newVfcmtId = result.getUuid(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "New resource after checkout is: {}", newVfcmtId); + } + } + return newVfcmtId; + } + + + + // called after validating vfcmt.getArtifacts() is not null + private boolean cdumpContainsNid(ResourceDetailed vfcmt, String nid, String requestId) { + Artifact cdump = vfcmt.getArtifacts().stream() + .filter(a -> DcaeBeConstants.Composition.fileNames.COMPOSITION_YML.equalsIgnoreCase(a.getArtifactName())) + .findAny().orElse(null); + if (null == cdump || null == cdump.getArtifactUUID()) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "No {} found on vfcmt {}", DcaeBeConstants.Composition.fileNames.COMPOSITION_YML, vfcmt.getUuid()); + return false; + } + try { + String artifact = baseBusinessLogic.getSdcRestClient().getResourceArtifact(vfcmt.getUuid(), cdump.getArtifactUUID(), requestId); + if (!artifact.contains("\"nid\":\""+nid)) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "{} doesn't contain nid {}. Cannot save mapping rule file", DcaeBeConstants.Composition.fileNames.COMPOSITION_YML, nid); + return false; + } + } catch (Exception e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), EXCEPTION, e); + return false; + } + return true; + } + + private ResponseEntity<String> saveNewRulesArtifact(Rule rule, String vfcmtUuid, String artifactFileName, String artifactLabel, String userId, String requestId) throws Exception { + MappingRules body = new MappingRules(rule); + Artifact artifact = SdcRestClientUtils.generateDeploymentArtifact(body.describe(), artifactFileName, ArtifactType.OTHER.name(), artifactLabel, body.convertToPayload()); + baseBusinessLogic.getSdcRestClient().createResourceArtifact(userId, vfcmtUuid, artifact, requestId); + return checkInAndReturnSaveArtifactResult(rule, vfcmtUuid, userId, requestId); + } + + private ResponseEntity addOrEditRuleInArtifact(Rule rule, String vfcmtUuid, String userId, Artifact rulesArtifact, String requestId) throws Exception { + String payload = baseBusinessLogic.getSdcRestClient().getResourceArtifact(vfcmtUuid, rulesArtifact.getArtifactUUID(), requestId); + MappingRules rules = RulesPayloadUtils.parseMappingRulesArtifactPayload(payload); + + // in case the rule id is passed but the rule doesn't exist on the mapping rule file: + if(!rulesBusinessLogic.addOrEditRule(rules, rule)) { + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.SAVE_RULE_FAILED); + } + updateRulesArtifact(vfcmtUuid, userId, rulesArtifact, rules, requestId); + return checkInAndReturnSaveArtifactResult(rule, vfcmtUuid, userId, requestId); + } + + // regardless of check in result, return save artifact success + private ResponseEntity<String> checkInAndReturnSaveArtifactResult(Rule rule, String vfcmtUuid, String userId, String requestId) { + try { + checkin(userId, vfcmtUuid, AssetType.RESOURCE, requestId); + } catch (Exception e) { + // swallowing the exception intentionally since it is on the check in action + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Error occurred while performing check in on VFCMT {}:{}", vfcmtUuid, e); + } + return new ResponseEntity<>(rule.toJson(), HttpStatus.OK); + } + + private void updateRulesArtifact(String vfcmtUuid, String userId, Artifact artifactInfo, MappingRules rules, String requestId) throws Exception { + artifactInfo.setPayloadData(Base64Utils.encodeToString(rules.convertToPayload())); + // POST must contain 'description' while GET returns 'artifactDescription' + artifactInfo.setDescription(artifactInfo.getArtifactDescription()); + baseBusinessLogic.getSdcRestClient().updateResourceArtifact(userId, vfcmtUuid, artifactInfo, requestId); + } + + + /** + * @param eventMapStream + * @param parent + * @param path + * @return + */ + private List<EventTypeDefinitionUI> convertToEventTypeDefinition(Stream<Entry<String, VesDataTypeDefinition>> eventMapStream, VesDataTypeDefinition parent, String path) { + + return eventMapStream.map(entry -> { + Map<String, VesDataTypeDefinition> properties = entry.getValue().getProperties(); + VesDataItemsDefinition items = entry.getValue().getItems(); + String newPath = path + "." + entry.getKey(); + List<EventTypeDefinitionUI> children = (properties == null) ? null : convertToEventTypeDefinition(properties.entrySet().stream(), entry.getValue(), newPath); + if(VesSimpleTypesEnum.ARRAY.getType().equals(entry.getValue().getType())) { + newPath += "[]"; + if(innerTypeIsComplex(items)) { + children = convertComplexArrayType(items, newPath); + } else if(innerTypeIsArray(items)) { + newPath += "[]"; + } + } + + boolean isRequired = (parent != null) ? parent.getRequired().contains(entry.getKey()) : false; + return new EventTypeDefinitionUI(entry.getKey(), children, isRequired, newPath); + }).collect(Collectors.toList()); + } + + private boolean innerTypeIsComplex(VesDataItemsDefinition items){ + return items != null && items.stream().anyMatch(p -> p.getProperties() != null); + } + + private boolean innerTypeIsArray(VesDataItemsDefinition items){ + return items != null && items.stream().anyMatch(p -> p.getItems() != null); + } + + private List<EventTypeDefinitionUI> convertComplexArrayType(VesDataItemsDefinition items, String path){ + return items.stream().map(item -> item.getProperties() != null ? convertToEventTypeDefinition(item.getProperties().entrySet().stream(), item, path) : new ArrayList<EventTypeDefinitionUI>()) + .flatMap(List::stream).collect(Collectors.toList()); + } + + + private String generateMappingRulesFileName(String dcaeCompLabel, String nid, String configParam) { + return dcaeCompLabel + "_" + nid + "_" + configParam + DcaeBeConstants.Composition.fileNames.MAPPING_RULE_POSTFIX; + } + + private List<EventTypeDefinitionUI> getEventTypeDefinitionUIs(String version, String eventType) { + List<String> eventNamesToReturn = ListUtils.union(EventTypesByVersionUI.DEFAULT_EVENTS, Arrays.asList(eventType)); + Map<String, VesDataTypeDefinition> eventDefs = VesStructureLoader.getEventListenerDefinitionByVersion(version); + Stream<Entry<String, VesDataTypeDefinition>> filteredEvents = eventDefs.entrySet().stream().filter(entry -> eventNamesToReturn.contains(entry.getKey())); + + return convertToEventTypeDefinition(filteredEvents, null, "event"); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/ServicesController.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/ServicesController.java new file mode 100644 index 0000000..257d1a9 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/ServicesController.java @@ -0,0 +1,230 @@ +package org.onap.sdc.dcae.composition.controller; + +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.AttachVFCMTServiceRequest; +import org.onap.sdc.dcae.composition.restmodels.DcaeMinimizedService; +import org.onap.sdc.dcae.composition.restmodels.MessageResponse; +import org.onap.sdc.dcae.composition.restmodels.sdc.*; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants.LifecycleStateEnum; +import org.onap.sdc.dcae.enums.ArtifactType; +import org.onap.sdc.dcae.enums.LifecycleOperationType; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.DcaeException; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.onap.sdc.dcae.utils.SdcRestClientUtils; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.util.Base64Utils; +import org.springframework.util.CollectionUtils; +import org.springframework.web.bind.annotation.*; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.stream.Collectors; + +@RestController +@EnableAutoConfiguration +@CrossOrigin +public class ServicesController extends BaseController { + + /*** + * GET services list by VFCMT + * @param userId + * @param vfcmtUuid + * @return ResponseEntity + */ + @RequestMapping(value = { "/services/{vfcmtUuid}" }, method = { RequestMethod.GET }, produces = {"application/json" }) + public ResponseEntity services(@RequestHeader("USER_ID") String userId, @PathVariable String vfcmtUuid, @ModelAttribute("requestId") String requestId) { + try { + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(vfcmtUuid, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "GET ({}) Vfcmt form SDC: {}", vfcmtUuid, vfcmt); + checkVfcmtType(vfcmt); + checkUserIfResourceCheckedOut(userId, vfcmt); + + List<Service> services = baseBusinessLogic.getSdcRestClient().getServices(requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "GET services data from SDC: {}", services); + List<Service> uuids = filterServicesByUser(services, userId); + return new ResponseEntity<>(uuids, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ErrConfMgr.ApiType.GET_SERVICE); + } + } + + /*** + * GET a single service + * @param theServiceId + * @return ResponseEntity + */ + @RequestMapping(value = { "/service/{theServiceId}" }, method = { RequestMethod.GET }, produces = {"application/json" }) + public ResponseEntity service(@PathVariable String theServiceId, @ModelAttribute("requestId") String requestId) { + try { + ServiceDetailed service = baseBusinessLogic.getSdcRestClient().getService(theServiceId, requestId); + if (service != null) { + if(service.getResources()!=null){ + List<ResourceInstance> vfResourcesOnly = service.getResources().stream().filter(vfi -> vfi.getResoucreType().equals("VF")).collect(Collectors.toList()); + service.setResources(vfResourcesOnly); + }else{ + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Service {} doesn't have any resources (e.g VFi's)", theServiceId); + } + } else { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Couldn't fetch service with uuid {} from SDC", theServiceId); + } + return new ResponseEntity<>(service, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ErrConfMgr.ApiType.GET_SERVICE); + } + } + + + /*** + * Attach service and service instance to VFCMT + * @param userId + * @param request + * @return ResponseEntity + */ + @RequestMapping(value = "/{vfcmtUuid}/attachment", method = RequestMethod.POST, produces = {"application/json" }) + public ResponseEntity attachService( + @PathVariable("vfcmtUuid") String vfcmtUuid, + @RequestHeader("USER_ID") String userId, + @RequestBody AttachVFCMTServiceRequest request, + @ModelAttribute("requestId") String requestId) { + + String serviceUuid = request.getServiceUuid(); + String vfiName = request.getInstanceName(); + String resourceUuid = vfcmtUuid; + MessageResponse response = new MessageResponse(); + + try { + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(vfcmtUuid, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), vfcmt.toString()); + + checkVfcmtType(vfcmt); + verifyVfiExists(serviceUuid, vfiName, requestId); + + boolean isUpdateMode = false; + Artifact artifactObj = null; + + String reference = serviceUuid + "/resources/" + vfiName; + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "*****************************************"); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Reference between service and vfi {}", reference); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "*****************************************"); + + if(!CollectionUtils.isEmpty(vfcmt.getArtifacts())){ + artifactObj = vfcmt.getArtifacts().stream().filter(a -> DcaeBeConstants.Composition.fileNames.SVC_REF.equals(a.getArtifactName())).findAny().orElse(null); + isUpdateMode = null != artifactObj; + } + + if (isNeedToCheckOut(vfcmt.getLifecycleState())) { + vfcmt = baseBusinessLogic.getSdcRestClient().changeResourceLifecycleState(userId, vfcmtUuid, LifecycleOperationType.CHECKOUT.name(), null, requestId); + if (vfcmt != null) { + resourceUuid = vfcmt.getUuid(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "New vfcmt uuid after checkout is: {}", resourceUuid); + } + } + + if(isUpdateMode){ + updateReferenceArtifact(userId, resourceUuid, artifactObj, reference, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Artifact {} updated with content: {}", reference, DcaeBeConstants.Composition.fileNames.SVC_REF, reference); + }else{ + Artifact artifact = SdcRestClientUtils.generateDeploymentArtifact("createReferenceArtifact", DcaeBeConstants.Composition.fileNames.SVC_REF, ArtifactType.DCAE_TOSCA.name(), "servicereference", reference.getBytes()); + baseBusinessLogic.getSdcRestClient().createResourceArtifact(userId, resourceUuid, artifact, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Artifact {} created with content: {}", DcaeBeConstants.Composition.fileNames.SVC_REF, reference); + } + checkin(userId, resourceUuid, org.onap.sdc.dcae.enums.AssetType.RESOURCE, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Attachment of reference={} in VFCMT {} has finished successfully", reference, resourceUuid); + + response.setSuccessResponse("Artifact updated"); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ErrConfMgr.ApiType.ATTACH_TO_SERVICE); + } + } + + @RequestMapping(value = { "/{vfcmtUuid}/attachment" }, method = { RequestMethod.GET }, produces = {"application/json" }) + public ResponseEntity getAttachedService(@PathVariable("vfcmtUuid") String vfcmtUuid, @ModelAttribute("requestId") String requestId) { + + MessageResponse response = new MessageResponse(); + + try { + ResourceDetailed vfcmt = baseBusinessLogic.getSdcRestClient().getResource(vfcmtUuid, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), vfcmt.toString()); + checkVfcmtType(vfcmt); + String artifact = "No Artifacts"; + + if (!CollectionUtils.isEmpty(vfcmt.getArtifacts())) { + Artifact artifactObj = vfcmt.getArtifacts().stream().filter(a -> DcaeBeConstants.Composition.fileNames.SVC_REF.equals(a.getArtifactName())).findAny().orElse(null); + if (null != artifactObj) + artifact = baseBusinessLogic.getSdcRestClient().getResourceArtifact(vfcmtUuid, artifactObj.getArtifactUUID(), requestId); + } + response.setSuccessResponse(artifact); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ErrConfMgr.ApiType.GET_VFCMT); + } + } + + /**** PRIVATE METHODS ****/ + + private void updateReferenceArtifact(String userId, String VFCMTUuid, Artifact artifactObj, String reference, String requestId) throws Exception { + artifactObj.setDescription("updateReferenceArtifact"); + artifactObj.setPayloadData(Base64Utils.encodeToString(reference.getBytes())); + baseBusinessLogic.getSdcRestClient().updateResourceArtifact(userId, VFCMTUuid, artifactObj, requestId); + } + + + /** + * + * @param lastUpdaterUserId + * @param services + * @param userId + * @return + */ + + //TODO move method to ci tests + public List<DcaeMinimizedService> parseAndFilterServicesByUser(String lastUpdaterUserId, List<LinkedHashMap<String, String>> services, String userId) { + List<DcaeMinimizedService> uuids = null; + if (services != null) { + //services.stream().filter(predicate) + uuids = services.stream() + .map(x -> new DcaeMinimizedService(x.get("uuid"), x.get("name"), x.get("lastUpdaterUserId"), x.get("lifecycleState"), x.get("version"), x.get("invariantUUID"))) + .collect(Collectors.groupingBy(DcaeMinimizedService::getInvariantUUID)).values().stream() + .map(p -> p.stream() + .sorted(Comparator.comparing(DcaeMinimizedService::getVersionAsFloat).reversed())).map(p -> p.collect(Collectors.toList())).map(p -> p.get(0)) + .filter(x -> (!(!x.getLastUpdaterUserId().equals(userId) && x.getLifeCycleState().equals(LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT.name())))) + .sorted(Comparator.comparing(DcaeMinimizedService::getName)).collect(Collectors.toList()); + } + return uuids; + } + + private List<Service> filterServicesByUser(List<Service> services, String userId) { + return CollectionUtils.isEmpty(services) ? new ArrayList<>() : services.stream() + .collect(Collectors.groupingBy(Service::getInvariantUUID)).values().stream() + .map(p -> p.stream() + .sorted(Comparator.comparing(Service::versionAsFloat).reversed())).map(p -> p.collect(Collectors.toList())).map(p -> p.get(0)) + .filter(x -> (!(!x.getLastUpdaterUserId().equals(userId) && x.getLifecycleState().equals(LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT.name())))) + .sorted(Comparator.comparing(Service::getName)).collect(Collectors.toList()); + } + + /** + * + * @param serviceUuid + * @param vfiName + * @param requestId + * @throws Exception + */ + private void verifyVfiExists(String serviceUuid, String vfiName, String requestId) throws Exception { + ServiceDetailed service = baseBusinessLogic.getSdcRestClient().getService(serviceUuid, requestId); + boolean isServiceContainsVfi = null != service && !CollectionUtils.isEmpty(service.getResources()) && service.getResources().stream() + .filter(vfi -> "VF".equals(vfi.getResoucreType())) + .anyMatch(vfi -> vfiName.equals(vfi.getResourceInstanceName())); + if (!isServiceContainsVfi) { + ResponseFormat responseFormat = ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.VFI_FETCH_ERROR, null, serviceUuid, vfiName); + throw new DcaeException(HttpStatus.NOT_FOUND, responseFormat.getRequestError()); + } + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/VfcmtController.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/VfcmtController.java new file mode 100644 index 0000000..0e1b209 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/VfcmtController.java @@ -0,0 +1,183 @@ +package org.onap.sdc.dcae.composition.controller; + +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.impl.ReferenceBusinessLogic; +import org.onap.sdc.dcae.composition.impl.VfcmtBusinessLogic; +import org.onap.sdc.dcae.composition.restmodels.CreateVFCMTRequest; +import org.onap.sdc.dcae.composition.restmodels.ImportVFCMTRequest; +import org.onap.sdc.dcae.composition.restmodels.sdc.ExternalReferencesMap; +import org.onap.sdc.dcae.composition.restmodels.sdc.Resource; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.onap.sdc.dcae.errormng.ErrConfMgr.ApiType; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; + +import java.util.List; + +@RestController +@EnableAutoConfiguration +@CrossOrigin +public class VfcmtController extends BaseController{ + + + @Autowired + private VfcmtBusinessLogic vfcmtBusinessLogic; + @Autowired + private ReferenceBusinessLogic referenceBusinessLogic; + + private static final String VFCMT = "VFCMT"; + private static final String TEMPLATE = "Template"; + private static final String BASE_MONITORING_TEMPLATE = "Base Monitoring Template"; + + + + /*** + * Get one resource information + * @param theResourceId retrieved resource id + * @return ResponseEntity + */ + @RequestMapping(value = { "/resource/{theResourceId}" }, method = { RequestMethod.GET }, produces = {"application/json" }) + public ResponseEntity resource(@PathVariable String theResourceId, @ModelAttribute("requestId") String requestId) { + try { + ResourceDetailed resource = baseBusinessLogic.getSdcRestClient().getResource(theResourceId, requestId); + return new ResponseEntity<>(resource, HttpStatus.OK); + }catch (Exception e) { + return handleException(e, ApiType.GET_VFCMT); + } + } + + /*** + * Get All resources + * @return ResponseEntity + */ + @RequestMapping(value = { "/getResourcesByCategory" }, method = { RequestMethod.GET }, produces = {"application/json" }) + public ResponseEntity getResourcesByCategory(@ModelAttribute("requestId") String requestId) { + try { + List<Resource> resources = baseBusinessLogic.getSdcRestClient().getResources(VFCMT, null, null, requestId); + return new ResponseEntity<>(resources, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ApiType.GET_ALL_VFCMTS); + } + } + + /*** + * Get All resources by Service + * @return ResponseEntity + */ + + @RequestMapping(value = { "/{contextType}/{uuid}/{version}/getVfcmtsForMigration" }, method = { RequestMethod.GET }, produces = {"application/json" }) + public ResponseEntity getVfcmtsForMigration(@RequestHeader("USER_ID") String userId, + @PathVariable String contextType, + @PathVariable String uuid, + @PathVariable String version, + @ModelAttribute("requestId") String requestId){ + + return vfcmtBusinessLogic.getVfcmtsForMigration(userId, contextType, uuid, version, requestId); + } + + /*** + * Get All resources by Monitoring Template Category + * @return ResponseEntity + */ + @RequestMapping(value = { "/getResourcesByMonitoringTemplateCategory" }, method = { RequestMethod.GET }, produces = {"application/json" }) + public ResponseEntity getResourcesByMonitoringTemplateCategory(@ModelAttribute("requestId") String requestId) { + try { + List<Resource> resources = baseBusinessLogic.getSdcRestClient().getResources(VFCMT, TEMPLATE, BASE_MONITORING_TEMPLATE, requestId); + return new ResponseEntity<>(resources, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ApiType.GET_ALL_VFCMTS); + } + } + + /*** + * Create new Vfcmt + * @param userId retrieved user ID + * @param request retrieved request + * @return ResponseEntity + */ + @RequestMapping(value = "/createVFCMT", method = RequestMethod.POST, produces = {"application/json" }) + public ResponseEntity createVFCMT(@RequestHeader("USER_ID") String userId, @RequestBody CreateVFCMTRequest request, @ModelAttribute("requestId") String requestId) { + vfcmtBusinessLogic.addSdcMandatoryFields(request, userId); + try { + ResourceDetailed response = baseBusinessLogic.getSdcRestClient().createResource(userId, request, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "createVFCMT after post: {}", response); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ApiType.CREATE_NEW_VFCMT); + } + } + + /*** + * Create new Vfcmt from general screen + * @param userId retrieved user ID + * @param request retrieved request + * @return ResponseEntity + */ + @RequestMapping(value = "/createMC", method = RequestMethod.POST, produces = {"application/json" }) + public ResponseEntity createMC(@RequestHeader("USER_ID") String userId, @RequestBody CreateVFCMTRequest request, @ModelAttribute("requestId") String requestId) { + return vfcmtBusinessLogic.createMcFromTemplate(userId, request, requestId); + } + + + /*** + * Clone or import existing VFCMT and attach to selected service/resource + * @param userId + * @param request + * @return ResponseEntity + */ + @RequestMapping(value = "/importMC", method = RequestMethod.POST, produces = {"application/json" }) + public ResponseEntity importMC(@RequestHeader("USER_ID") String userId, @RequestBody ImportVFCMTRequest request, @ModelAttribute("requestId") String requestId) { + return vfcmtBusinessLogic.importMC(userId, request, requestId); + } + + /*** + * GET a list of Monitoring Components of a service by uuid and version + * @param context the context type of this request + * @param uuid the uuid of the type requested + * @param version the version of the entity requested + * @return ResponseEntity + */ + @RequestMapping(value = { "/{context}/{uuid}/{version}/monitoringComponents" }, method = { RequestMethod.GET }, produces = {"application/json" }) + public ResponseEntity getMonitoringComponents(@PathVariable String context, @PathVariable String uuid, @PathVariable String version, @ModelAttribute("requestId") String requestId) { + try { + ExternalReferencesMap mcRefs = baseBusinessLogic.getSdcRestClient().getMonitoringReferences(context, uuid, version, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Got monitoring references map from SDC: {}", mcRefs.values()); + return new ResponseEntity<>(referenceBusinessLogic.fetchMonitoringComponents(mcRefs, requestId), HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ApiType.GET_SERVICE); + } + } + + @RequestMapping(value = { "/{context}/{serviceUuid}/{vfiName}/{vfcmtUuid}/deleteVfcmtReference" }, method = { RequestMethod.DELETE }, produces = {"application/json" }) + public ResponseEntity deleteVfcmtReference(@RequestHeader("USER_ID") String userId, @PathVariable String context, @PathVariable String serviceUuid, @PathVariable String vfiName, @PathVariable String vfcmtUuid, @ModelAttribute String requestId) { + try { + referenceBusinessLogic.deleteVfcmtReference(userId, context, serviceUuid, vfiName, vfcmtUuid, requestId); + return new ResponseEntity<>(HttpStatus.OK); + } catch (Exception e) { + return handleException(e, ApiType.DELETE_VFCMT_REFERENCE); + } + } + + @RequestMapping(value = { "/{context}/{monitoringComponentName}/{serviceUuid}/{vfiName}/{vfcmtUuid}/deleteVfcmtReference" }, method = { RequestMethod.DELETE }, produces = {"application/json" }) + public ResponseEntity deleteVfcmtReferenceWithBlueprint(@RequestHeader("USER_ID") String userId, @PathVariable String context, @PathVariable String monitoringComponentName, @PathVariable String serviceUuid, @PathVariable String vfiName, @PathVariable String vfcmtUuid, @ModelAttribute String requestId) { + try { + referenceBusinessLogic.deleteVfcmtReference(userId, context, serviceUuid, vfiName, vfcmtUuid, requestId); + } catch (Exception e) { + return handleException(e, ApiType.DELETE_VFCMT_REFERENCE); + } + return referenceBusinessLogic.deleteVfcmtReferenceBlueprint(userId, context, monitoringComponentName, serviceUuid, vfiName, vfcmtUuid, requestId); + } + + @RequestMapping(value = { "/getVfcmtReferenceData/{vfcmtUuid}" }, method = { RequestMethod.GET }, produces = {"application/json" }) + public ResponseEntity getVfcmtReferenceData(@PathVariable String vfcmtUuid, @ModelAttribute String requestId) { + try { + return vfcmtBusinessLogic.getVfcmtReferenceData(vfcmtUuid, requestId); + } catch (Exception e) { + return handleException(e, ApiType.GET_VFCMT); + } + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/health/HealthController.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/health/HealthController.java new file mode 100644 index 0000000..eaad1b0 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/health/HealthController.java @@ -0,0 +1,77 @@ +package org.onap.sdc.dcae.composition.controller.health; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.health.ComponentsInfo; +import org.onap.sdc.dcae.composition.restmodels.health.HealthResponse; +import org.onap.sdc.dcae.composition.CompositionEngine; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.CrossOrigin; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import com.google.gson.Gson; + +@RestController +@EnableAutoConfiguration +@CrossOrigin +public class HealthController { + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + Gson gson = new Gson(); + + @Autowired + ToscaLabHealthState toscaLabHealthState; + + @RequestMapping(value = "/healthCheck", method = RequestMethod.GET) + public ResponseEntity<String> healthCheck() { + HttpStatus httpSts = HttpStatus.OK; + try{ + HealthResponse healthResponse = new HealthResponse(); + healthResponse.setHealthCheckComponent(DcaeBeConstants.Health.APP_NAME); + healthResponse.setHealthCheckStatus(DcaeBeConstants.Health.UP); + healthResponse.setSdcVersion(CompositionEngine.getDcaeVersion()); + healthResponse.setDescription(DcaeBeConstants.Health.OK); + + List<ComponentsInfo> componentsInfoList = new ArrayList<ComponentsInfo>(); + ComponentsInfo componentsInfo = new ComponentsInfo(); + componentsInfo.setHealthCheckComponent(DcaeBeConstants.Health.BE); + componentsInfo.setHealthCheckStatus(DcaeBeConstants.Health.UP); + componentsInfo.setVersion(CompositionEngine.getDcaeVersion()); + componentsInfo.setDescription(DcaeBeConstants.Health.OK); + componentsInfoList.add(componentsInfo); + + ComponentsInfo toscaLab = new ComponentsInfo(); + ComponentsInfo toscaLabHealthRes = toscaLabHealthState.getToscaLabHealthResponse(); + if(toscaLabHealthRes.getHealthCheckStatus().equals(DcaeBeConstants.Health.DOWN)){ + healthResponse.setHealthCheckStatus(DcaeBeConstants.Health.DOWN); + healthResponse.setDescription(toscaLabHealthRes.getHealthCheckComponent()+" is down"); + httpSts = HttpStatus.INTERNAL_SERVER_ERROR; + } + toscaLab.setHealthCheckComponent(toscaLabHealthRes.getHealthCheckComponent()); + toscaLab.setHealthCheckStatus(toscaLabHealthRes.getHealthCheckStatus()); + toscaLab.setVersion(toscaLabHealthRes.getVersion()); + toscaLab.setDescription(toscaLabHealthRes.getDescription()); + componentsInfoList.add(toscaLab); + + healthResponse.setComponentsInfo(componentsInfoList); + String json = gson.toJson(healthResponse, HealthResponse.class); + + return new ResponseEntity<String>(json, httpSts); + } + catch(Exception e){ + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Error occured while performing HealthCheck: {}", e.getLocalizedMessage()); + return new ResponseEntity<String>(e.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR); + } + } + +}
\ No newline at end of file diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/health/HealthPoller.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/health/HealthPoller.java new file mode 100644 index 0000000..609480d --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/health/HealthPoller.java @@ -0,0 +1,97 @@ +package org.onap.sdc.dcae.composition.controller.health; + +import java.net.URI; +import java.util.Collections; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.health.ComponentsInfo; +import org.onap.sdc.dcae.catalog.commons.Future; +import org.onap.sdc.dcae.catalog.commons.Http; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Configuration; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.scheduling.annotation.EnableAsync; +import org.springframework.scheduling.annotation.EnableScheduling; +import org.springframework.scheduling.annotation.Scheduled; + +import com.google.gson.Gson; + +@Configuration +@EnableAsync +@EnableScheduling +@ConfigurationProperties(prefix="blueprinter") +public class HealthPoller { + private URI hcuri; + private String hcretrynum; + private Gson gson; + + private OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + @Autowired + private ToscaLabHealthState toscaLabHealthState; + + public HealthPoller() { + super(); + gson = new Gson(); + } + + @Scheduled(fixedDelayString="${healthpoller.fixedDelay}") + public void pollToscaLabHealth() { + ComponentsInfo toscaLabHealthRes = null; + ResponseEntity<String> healthRes = null; + try { + for(int i=0; i<Integer.valueOf(hcretrynum); i++){ // 3 tries + healthRes = sendHealthCheck(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Try #{}: {}", i, healthRes); + if(healthRes.getStatusCode()==HttpStatus.OK){ + String result = (String) healthRes.getBody(); + toscaLabHealthRes = gson.fromJson(result, ComponentsInfo.class); + break; + } + } + } catch (Exception e) { + toscaLabHealthRes = getNegativeHealth(e.getMessage()); + } + if(toscaLabHealthRes == null){ + toscaLabHealthRes = getNegativeHealth(healthRes.getBody() + "-" + healthRes.getStatusCode()); + } + toscaLabHealthState.setToscaLabHealthResponse(toscaLabHealthRes); + } + + private ComponentsInfo getNegativeHealth(String msg) { + ComponentsInfo toscaLabHealthRes = new ComponentsInfo(); + String description = "DCAE-D BE failed while trying to fetch Tosca_Lab healthcheck. Exception: " +msg; + toscaLabHealthRes.setDescription(description); + toscaLabHealthRes.setHealthCheckComponent(DcaeBeConstants.Health.TOSCA_LAB); + toscaLabHealthRes.setHealthCheckStatus(DcaeBeConstants.Health.DOWN); + errLogger.log(LogLevel.ERROR, this.getClass().getName(), description); + return toscaLabHealthRes; + } + + public ResponseEntity<String> sendHealthCheck() { + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.setAccept(Collections.singletonList(MediaType.APPLICATION_JSON)); + HttpEntity<String> entity = new HttpEntity<String>(headers); + return Http.exchangeSync(hcuri.toString(), HttpMethod.GET, entity, String.class, 5000); + } + + public void setHcuri(URI hcuri) { + this.hcuri = hcuri; + } + + public void setHcretrynum(String hcretrynum) { + this.hcretrynum = hcretrynum; + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/health/ToscaLabHealthState.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/health/ToscaLabHealthState.java new file mode 100644 index 0000000..6fe469f --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/health/ToscaLabHealthState.java @@ -0,0 +1,29 @@ +package org.onap.sdc.dcae.composition.controller.health; + +import org.onap.sdc.dcae.composition.restmodels.health.ComponentsInfo; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.springframework.context.annotation.Scope; +import org.springframework.stereotype.Component; + +@Scope(value = "singleton") +@Component +public class ToscaLabHealthState { + private ComponentsInfo toscaLabHealthResponse; + + public ToscaLabHealthState() { + super(); + toscaLabHealthResponse = new ComponentsInfo(); + toscaLabHealthResponse.setDescription("Not up yet"); + toscaLabHealthResponse.setHealthCheckComponent(DcaeBeConstants.Health.TOSCA_LAB); + toscaLabHealthResponse.setHealthCheckStatus(DcaeBeConstants.Health.DOWN); + } + + public ComponentsInfo getToscaLabHealthResponse() { + return toscaLabHealthResponse; + } + + public void setToscaLabHealthResponse(ComponentsInfo toscaLabHealthResponse) { + this.toscaLabHealthResponse = toscaLabHealthResponse; + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/swagger/SwaggerConfig.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/swagger/SwaggerConfig.java new file mode 100644 index 0000000..48c7153 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/controller/swagger/SwaggerConfig.java @@ -0,0 +1,23 @@ +package org.onap.sdc.dcae.composition.controller.swagger; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import springfox.documentation.builders.PathSelectors; +import springfox.documentation.builders.RequestHandlerSelectors; +import springfox.documentation.spi.DocumentationType; +import springfox.documentation.spring.web.plugins.Docket; +import springfox.documentation.swagger2.annotations.EnableSwagger2; + +@Configuration +@EnableSwagger2 +public class SwaggerConfig { + @Bean + public Docket productApi() { + return new Docket(DocumentationType.SWAGGER_2) + .select() + .apis(RequestHandlerSelectors.basePackage("org.onap.sdc.dcae.composition.controller")) + .paths(PathSelectors.regex("/*.*")) + .build(); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/impl/BaseBusinessLogic.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/impl/BaseBusinessLogic.java new file mode 100644 index 0000000..f26c885 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/impl/BaseBusinessLogic.java @@ -0,0 +1,49 @@ +package org.onap.sdc.dcae.composition.impl; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.dcae.client.ISdcClient; +import org.onap.sdc.dcae.composition.restmodels.sdc.Artifact; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.onap.sdc.dcae.utils.SdcRestClientUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.util.Base64Utils; +import org.springframework.util.CollectionUtils; + +@Component +public class BaseBusinessLogic { + @Autowired + protected ISdcClient sdcRestClient; + + protected static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + protected static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + public ISdcClient getSdcRestClient() { + return sdcRestClient; + } + + void setSdcRestClient(ISdcClient sdcRestClient) { + this.sdcRestClient = sdcRestClient; + } + + Artifact cloneArtifactToTarget(String userId, String targetId, String payload, Artifact artifactToClone, String requestId) throws Exception { + Artifact cloned = SdcRestClientUtils.generateDeploymentArtifact(artifactToClone.getArtifactDescription(), artifactToClone.getArtifactName(), artifactToClone.getArtifactType(), artifactToClone.getArtifactLabel(), payload.getBytes()); + return sdcRestClient.createResourceArtifact(userId, targetId, cloned, requestId); + } + + public void cloneArtifactToTarget(String userId, String targetId, String payload, Artifact artifactToClone, Artifact artifactToOverride, String requestId) throws Exception{ + if (null != artifactToOverride) { + artifactToOverride.setDescription(artifactToOverride.getArtifactDescription()); + artifactToOverride.setPayloadData(Base64Utils.encodeToString(payload.getBytes())); + sdcRestClient.updateResourceArtifact(userId, targetId, artifactToOverride, requestId); + } else { + cloneArtifactToTarget(userId, targetId, payload, artifactToClone, requestId); + } + } + + Artifact findArtifactDataByArtifactName(ResourceDetailed vfcmt, String artifactName) { + return null == vfcmt ? null : CollectionUtils.isEmpty(vfcmt.getArtifacts()) ? null : vfcmt.getArtifacts().stream() + .filter(p -> artifactName.equals(p.getArtifactName())).findAny().orElse(null); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/impl/ReferenceBusinessLogic.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/impl/ReferenceBusinessLogic.java new file mode 100644 index 0000000..d229b67 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/impl/ReferenceBusinessLogic.java @@ -0,0 +1,74 @@ +package org.onap.sdc.dcae.composition.impl; + +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.MonitoringComponent; +import org.onap.sdc.dcae.composition.restmodels.sdc.Artifact; +import org.onap.sdc.dcae.composition.restmodels.sdc.ExternalReferencesMap; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceInstance; +import org.onap.sdc.dcae.composition.restmodels.sdc.ServiceDetailed; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.utils.Normalizers; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.stereotype.Component; + +import java.util.*; + +import static org.springframework.util.CollectionUtils.isEmpty; + +@Component +public class ReferenceBusinessLogic extends BaseBusinessLogic { + + public ResponseEntity deleteVfcmtReferenceBlueprint(String userId, String context, String monitoringComponentName, String serviceUuid, String vfiName, String vfcmtUuid, String requestId) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Going to delete blueprint, monitoringComponentName = {}, vfiName = {}", monitoringComponentName, vfiName); + try { + String normalizedVfiName = Normalizers.normalizeComponentInstanceName(vfiName); + ServiceDetailed serviceDetailed = sdcRestClient.getService(serviceUuid, requestId); + Optional<ResourceInstance> resourceInstance = serviceDetailed.getResources().stream().filter(item -> item.getResourceInstanceName().equalsIgnoreCase(vfiName)).findAny(); + if (resourceInstance.isPresent() && resourceInstance.get().getArtifacts() != null) { + Optional<Artifact> artifact = resourceInstance.get().getArtifacts().stream().filter(item -> item.getArtifactName().contains(monitoringComponentName)).findAny(); + artifact.ifPresent(artifact1 -> sdcRestClient.deleteInstanceResourceArtifact(userId, context, serviceUuid, normalizedVfiName, artifact1.getArtifactUUID(), requestId)); + } + } catch (Exception e) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"Failed to delete blueprint with serviceUuid {}, vfcmtUuid . message: {} ", serviceUuid, vfcmtUuid, e); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.DELETE_BLUEPRINT_FAILED); + } + return new ResponseEntity<>(HttpStatus.OK); + } + + public void deleteVfcmtReference(String userId, String context, String serviceUuid, String vfiName, String vfcmtUuid, String requestId) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Going to delete vfcmt reference, vfiName = {}", vfiName); + String normalizedVfiName = Normalizers.normalizeComponentInstanceName(vfiName); + sdcRestClient.deleteExternalMonitoringReference(userId, context, serviceUuid, normalizedVfiName, vfcmtUuid, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Finished to delete vfcmt reference. serviceUuid {}, vfcmtUuid {}", serviceUuid, vfcmtUuid); + } + + // 1806 US381853 Return a list of monitoring components by external reference id. Support partial success + public Map<String, List<MonitoringComponent>> fetchMonitoringComponents(ExternalReferencesMap mcRefs, String requestId) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Going to fetch monitoring components metadata for vfis {}", mcRefs.keySet()); + Map<String, List<MonitoringComponent>> result = new LinkedHashMap<>(); + List<MonitoringComponent> monitoringComponents = Collections.synchronizedList(new ArrayList<>()); + List<MonitoringComponent> unavailable = Collections.synchronizedList(new ArrayList<>()); + mcRefs.entrySet().parallelStream().forEach(entry -> + entry.getValue().parallelStream().forEach(id -> { + try{ + monitoringComponents.add(new MonitoringComponent(getSdcRestClient().getResource(id, requestId), entry.getKey())); + } catch (Exception e) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"Failed to fetch monitoring component with uuid {}. message: {} ", id, e); + unavailable.add(new MonitoringComponent(id, entry.getKey(), "unavailable")); + } + + }) + ); + result.put("monitoringComponents", monitoringComponents); + if(!isEmpty(unavailable)) { + result.put("unavailable", unavailable); + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Finished fetching monitoring components metadata for vfis {}", mcRefs.keySet()); + return result; + } + + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/impl/VfcmtBusinessLogic.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/impl/VfcmtBusinessLogic.java new file mode 100644 index 0000000..e68a8ee --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/composition/impl/VfcmtBusinessLogic.java @@ -0,0 +1,283 @@ +package org.onap.sdc.dcae.composition.impl; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang.StringUtils; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.*; +import org.onap.sdc.dcae.composition.restmodels.sdc.Artifact; +import org.onap.sdc.dcae.composition.restmodels.sdc.ExternalReferencesMap; +import org.onap.sdc.dcae.composition.restmodels.sdc.Resource; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.enums.ArtifactType; +import org.onap.sdc.dcae.enums.LifecycleOperationType; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.utils.SdcRestClientUtils; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.stereotype.Component; +import org.springframework.util.Base64Utils; + +import java.io.IOException; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.onap.sdc.dcae.composition.util.DcaeBeConstants.LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT; +import static org.onap.sdc.dcae.composition.util.DcaeBeConstants.LifecycleStateEnum.findState; + +@Component +public class VfcmtBusinessLogic extends BaseBusinessLogic { + + private static final String VFCMT = "VFCMT"; + private static final String TEMPLATE = "Template"; + private static final String MONITORING_TEMPLATE = "Monitoring Template"; + private static final String DEFAULTICON = "defaulticon"; + private static final String VENDOR_NAME = "vendorName"; + private static final String VENDOR_RELEASE = "vendorRelease"; + + public ResponseEntity createMcFromTemplate(String userId, CreateVFCMTRequest request, String requestId) { + if(!validateMCRequestFields(request)) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Missing information"); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.INVALID_CONTENT); + } + return cloneMcAndAddServiceReference(userId, request, requestId); + } + + //1806 US388513 collect existing VFCMT data - flowType from cdump artifact and external reference from svc_reference artifact. If cdump not found - return error + + public ResponseEntity getVfcmtReferenceData(String vfcmtUuid, String requestId) throws Exception { + ResourceDetailed vfcmt = sdcRestClient.getResource(vfcmtUuid, requestId); + Artifact artifactData = findCdumpArtifactData(vfcmt); + if(null == artifactData) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"No composition found on vfcmt {}", vfcmtUuid); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.MISSING_TOSCA_FILE, "", vfcmt.getName()); + } + VfcmtData vfcmtData = new VfcmtData(vfcmt); + //fetch cdump payload + String payload = getSdcRestClient().getResourceArtifact(vfcmtUuid, artifactData.getArtifactUUID(), requestId); + //extract and set flowType from cdump payload + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"Looking for flowType definition in cdump"); + vfcmtData.setFlowType(StringUtils.substringBetween(payload,"\"flowType\":\"","\"")); + //find svc_reference + artifactData = findArtifactDataByArtifactName(vfcmt, DcaeBeConstants.Composition.fileNames.SVC_REF); + if(null != artifactData) { + //fetch svc_reference payload + payload = getSdcRestClient().getResourceArtifact(vfcmtUuid, artifactData.getArtifactUUID(), requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"Looking for attached service and vfi info in svc_reference"); + //extract and set serviceUuid from svc_reference payload + vfcmtData.setServiceUuid(StringUtils.substringBefore(payload, "/")); + //extract and set vfiName from svc_reference payload + vfcmtData.setVfiName(StringUtils.substringAfterLast(payload, "/")); + } + return new ResponseEntity<>(vfcmtData, HttpStatus.OK); + } + + + //1806 US388525 import or clone VFCMT - always pass the flowType - update will only take place if missing from cdump + public ResponseEntity importMC(String userId, ImportVFCMTRequest request, String requestId) { + if(!validateMCRequestFields(request)) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Missing information"); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.INVALID_CONTENT); + } + // option 1 - clone + if(request.isCloneVFCMT()) { + return cloneMcAndAddServiceReference(userId, request, requestId); + } + + ResourceDetailed vfcmt = null; + boolean undoCheckoutOnFailure = false; + // fetch vfcmt and cdump + try { + vfcmt = sdcRestClient.getResource(request.getTemplateUuid(), requestId); + Artifact cdumpArtifactData = fetchCdumpAndSetFlowType(vfcmt, request.getFlowType(), requestId); + if (null == cdumpArtifactData) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "No cdump found for monitoring component {}", vfcmt.getUuid()); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.MISSING_TOSCA_FILE, "", vfcmt.getName()); + } + String cdumpPayload = cdumpArtifactData.getPayloadData(); + + // option 2 - edit original cdump - requires check out + if(request.isUpdateFlowType()) { + if(DcaeBeConstants.LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT != DcaeBeConstants.LifecycleStateEnum.findState(vfcmt.getLifecycleState())) { + vfcmt = sdcRestClient.changeResourceLifecycleState(userId, vfcmt.getUuid(), LifecycleOperationType.CHECKOUT.name(), "checking out VFCMT", requestId); + undoCheckoutOnFailure = true; + } + cdumpArtifactData.setDescription("updating flowType on cdump"); + cdumpArtifactData.setPayloadData(Base64Utils.encodeToString(cdumpPayload.getBytes())); + sdcRestClient.updateResourceArtifact(userId, vfcmt.getUuid(), cdumpArtifactData, requestId); + } + // option 3 - update service reference only + updateReferenceToService(userId, request, vfcmt.getUuid(), requestId); + if(DcaeBeConstants.LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT == DcaeBeConstants.LifecycleStateEnum.findState(vfcmt.getLifecycleState())) { + // this will not throw an exception + checkinVfcmtAfterClone(userId, vfcmt, requestId); + } + return new ResponseEntity<>(buildVfcmtAndCdumpResponse(vfcmt, request.getVfiName(), request.getFlowType(), cdumpPayload), HttpStatus.OK); + } catch (Exception e) { + errLogger.log(LogLevel.ERROR,this.getClass().getName(),"Failed updating Monitoring Component:{}", e.getMessage()); + if(undoCheckoutOnFailure) { + rollBack(userId, vfcmt, requestId); + } + return ErrConfMgr.INSTANCE.handleException(e, ErrConfMgr.ApiType.CREATE_NEW_VFCMT); + } + + } + + private boolean validateMCRequestFields(CreateVFCMTRequest request) { + return Stream.of(request.getFlowType(), request.getTemplateUuid(), request.getName(), request.getDescription(), request.getContextType(), request.getServiceUuid(), request.getVfiName()) + .allMatch(StringUtils::isNotBlank); + } + + private void updateReferenceToService(String userId, CreateVFCMTRequest request, String newVfcmtUuid, String requestId) { + String serviceUuid = request.getServiceUuid(); + String vfiName = request.getVfiName(); + + debugLogger.log(LogLevel.INFO, this.getClass().getName(),"About to update service {}/{} to monitoring component {} ", serviceUuid, vfiName, request.getName()); + + sdcRestClient.addExternalMonitoringReference(userId, request, new ReferenceUUID(newVfcmtUuid), requestId); + + } + + private void rollBack(String userId, ResourceDetailed newVfcmt, String requestId) { + if (null != newVfcmt) { + try { + getSdcRestClient().changeResourceLifecycleState(userId, newVfcmt.getUuid(), LifecycleOperationType.UNDO_CHECKOUT.getValue(), "DCAE rollback", requestId); + } catch (Exception e) { + errLogger.log(LogLevel.ERROR,this.getClass().getName(),"Failed rolling back Monitoring Component. ID:{}", newVfcmt.getUuid()); + debugLogger.log(LogLevel.ERROR,this.getClass().getName(),"Failed rolling back Monitoring Component:{}", e); + } + } + } + + private ResponseEntity cloneMcAndAddServiceReference(String userId, CreateVFCMTRequest request, String requestId) { + addSdcMandatoryFields(request, userId); + ResourceDetailed newVfcmt = null; + try { + // Retrieve the Template VFCMT from SDC - use the template UUID provided from UI + ResourceDetailed templateMC = sdcRestClient.getResource(request.getTemplateUuid(), requestId); + // Download the CDUMP file from the template VFCMT + Artifact cdumpArtifactData = fetchCdumpAndSetFlowType(templateMC, request.getFlowType(), requestId); + if (null == cdumpArtifactData) { + errLogger.log(LogLevel.ERROR,this.getClass().getName(),"No cdump found for template {} while creating monitoring component", templateMC.getUuid()); + return ErrConfMgr.INSTANCE.buildErrorResponse(ActionStatus.MISSING_TOSCA_FILE, "", templateMC.getName()); + } + newVfcmt = sdcRestClient.createResource(userId, request, requestId); + // The cdump has the original template id. we need to replace it with the new vfcmt id + String newVfcmtUuid = newVfcmt.getUuid(); + String cdumpPayload = cdumpArtifactData.getPayloadData().replaceAll(templateMC.getUuid(), newVfcmtUuid); + + // Upload it to newly created VFCMT + cloneArtifactToTarget(userId, newVfcmtUuid, cdumpPayload, cdumpArtifactData, requestId); + cloneRuleArtifacts(userId, templateMC, newVfcmtUuid, requestId); + createReferenceArtifact(userId, request, newVfcmtUuid, requestId); + updateReferenceToService(userId, request, newVfcmtUuid, requestId); + + // this will not throw an exception + checkinVfcmtAfterClone(userId, newVfcmt, requestId); + return new ResponseEntity<>(buildVfcmtAndCdumpResponse(newVfcmt, request.getVfiName(), request.getFlowType(), cdumpPayload), HttpStatus.OK); + } catch (Exception e) { + errLogger.log(LogLevel.ERROR,this.getClass().getName(),"Failed creating Monitoring Component:{}", e.getMessage()); + rollBack(userId, newVfcmt, requestId); + return ErrConfMgr.INSTANCE.handleException(e, ErrConfMgr.ApiType.CREATE_NEW_VFCMT); + } + } + + private CreateMcResponse buildVfcmtAndCdumpResponse(ResourceDetailed vfcmt, String vfiName, String flowType, String cdumpPayload) throws IOException { + return new CreateMcResponse(new VfcmtData(vfcmt, vfiName, flowType), new ObjectMapper().readValue(cdumpPayload, Object.class)); + } + + private void checkinVfcmtAfterClone(String userId, ResourceDetailed vfcmt, String requestId) { + try { + vfcmt = sdcRestClient.changeResourceLifecycleState(userId, vfcmt.getUuid(), LifecycleOperationType.CHECKIN.getValue(), "check in after clone", requestId); + } catch (Exception e) { + errLogger.log(LogLevel.ERROR,this.getClass().getName(),"Failed to check in Monitoring Component: {}. message: {}", vfcmt.getUuid(), e); + } + } + + + private Artifact findCdumpArtifactData(ResourceDetailed vfcmt) { + return findArtifactDataByArtifactName(vfcmt, DcaeBeConstants.Composition.fileNames.COMPOSITION_YML); + } + + private void cloneRuleArtifacts(String userId, ResourceDetailed templateMC, String newVfcmtUuid, String requestId) throws Exception { + // handle rule artifacts using java 7 for-loop - exception propagation to calling method + for(Artifact artifact : templateMC.getArtifacts()) { + if(artifact.getArtifactName().endsWith(DcaeBeConstants.Composition.fileNames.MAPPING_RULE_POSTFIX)) { + cloneArtifactToTarget(userId, newVfcmtUuid, sdcRestClient.getResourceArtifact(templateMC.getUuid(), artifact.getArtifactUUID(), requestId), artifact, requestId); + } + } + } + + // fetch the vfcmt cdump artifact payload and insert the flowType. Return the artifact with updated payload or null (artifact doesn't exist) + private Artifact fetchCdumpAndSetFlowType(ResourceDetailed vfcmt, String flowType, String requestId) throws Exception { + Artifact cdumpArtifactData = findCdumpArtifactData(vfcmt); + if (null != cdumpArtifactData) { + String cdumpPayload = sdcRestClient.getResourceArtifact(vfcmt.getUuid(), cdumpArtifactData.getArtifactUUID(), requestId); + // Add flowType data to cdump if provided + if(!cdumpPayload.contains("\"flowType\":\"") && StringUtils.isNotBlank(flowType)) { + cdumpPayload = cdumpPayload.replaceFirst("\\{", "{\"flowType\":\"" + flowType + "\","); + } + cdumpArtifactData.setPayloadData(cdumpPayload); + } + return cdumpArtifactData; + } + + // backward compatibility (very backward) + private void createReferenceArtifact(String userId, CreateVFCMTRequest request, String newVfcmtUuid, String requestId) throws Exception { + String referencePayload = request.getServiceUuid() + "/resources/" + request.getVfiName(); + Artifact refArtifact = SdcRestClientUtils.generateDeploymentArtifact("createReferenceArtifact", DcaeBeConstants.Composition.fileNames.SVC_REF, ArtifactType.DCAE_TOSCA.name(), "servicereference", referencePayload.getBytes()); + sdcRestClient.createResourceArtifact(userId, newVfcmtUuid, refArtifact, requestId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Artifact {} created with content: {}", DcaeBeConstants.Composition.fileNames.SVC_REF, referencePayload); + } + + public ResponseEntity getVfcmtsForMigration(String userId, String contextType, String uuid, String version, + String requestId) { + List<Resource> resources; + ExternalReferencesMap connectedVfcmts; + try { + connectedVfcmts = getSdcRestClient().getMonitoringReferences(contextType, uuid, version, requestId); + resources = getSdcRestClient().getResources(VFCMT, TEMPLATE, MONITORING_TEMPLATE, requestId); + } catch (Exception e) { + errLogger.log(LogLevel.ERROR,this.getClass().getName(),"Exception getVfcmtsForMigration {}", e); + debugLogger.log(LogLevel.DEBUG,this.getClass().getName(),"Exception getVfcmtsForMigration {}", e); + return ErrConfMgr.INSTANCE.handleException(e, ErrConfMgr.ApiType.GET_ALL_VFCMTS); + } + + List<Resource> vfcmts = resources.stream() + .filter(resource -> notCheckedOutOrMine(userId, resource)) + .filter(resource -> !connected(resource, connectedVfcmts)) + .collect(Collectors.toList()); + return new ResponseEntity<>(vfcmts, HttpStatus.OK); + } + + private boolean connected(Resource resource, ExternalReferencesMap connectedVfcmts){ + return connectedVfcmts.values().stream().anyMatch(p -> p.contains(resource.getUuid())); + } + + private boolean notCheckedOutOrMine(String userId, Resource resource) { + // if the resource belongs to this user then it is kosher + // or if it doesn't belong to the user check the lifecycle state is checked out + + return resource.getLastUpdaterUserId().equalsIgnoreCase(userId) || + NOT_CERTIFIED_CHECKOUT != findState(resource.getLifecycleState()); + } + + + public void addSdcMandatoryFields(CreateVFCMTRequest createRequest, String user) { + createRequest.setContactId(user); + createRequest.setIcon(DEFAULTICON); + createRequest.setResourceType(VFCMT); + createRequest.setVendorName(VENDOR_NAME); + createRequest.setVendorRelease(VENDOR_RELEASE); + if (StringUtils.isBlank(createRequest.getCategory())) { + createRequest.setCategory(TEMPLATE); + } + if (StringUtils.isBlank(createRequest.getSubcategory())) { + createRequest.setSubcategory(MONITORING_TEMPLATE); + } + createRequest.setTags(new String[]{createRequest.getName()}); + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ActionStatus.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ActionStatus.java new file mode 100644 index 0000000..cac92f5 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ActionStatus.java @@ -0,0 +1,48 @@ +package org.onap.sdc.dcae.errormng; + +public enum ActionStatus { + + OK, + CREATED, + NO_CONTENT, + NOT_ALLOWED, + GENERAL_ERROR, + INVALID_CONTENT, + NOT_FOUND, + CONFIGURATION_ERROR, + VES_SCHEMA_NOT_FOUND, + VES_SCHEMA_INVALID, + FLOW_TYPES_CONFIGURATION_ERROR, + CLONE_FAILED, + EMPTY_SERVICE_LIST, + MONITORING_TEMPLATE_ATTACHMENT_ERROR, + MISSING_TOSCA_FILE, + VALIDATE_TOSCA_ERROR, + SUBMIT_BLUEPRINT_ERROR, + GENERATE_BLUEPRINT_ERROR, + INVALID_RULE_FORMAT, + SAVE_RULE_FAILED, + RESOURCE_NOT_VFCMT_ERROR, + VFI_FETCH_ERROR, + USER_CONFLICT, + MISSING_RULE_DESCRIPTION, + MISSING_ACTION, + MISSING_ACTION_FIELD, + MISSING_CONCAT_VALUE, + INVALID_GROUP_CONDITION, + MISSING_CONDITION_ITEM, + MISSING_OPERAND, + INVALID_OPERATOR, + MISSING_ENTRY, + MISSING_DEFAULT_VALUE, + DUPLICATE_KEY, + ACTION_DEPENDENCY, + RULE_DEPENDENCY, + NODE_NOT_FOUND, + DELETE_RULE_FAILED, + TRANSLATE_FAILED, + CATALOG_NOT_AVAILABLE, + AUTH_ERROR, + DELETE_BLUEPRINT_FAILED, + AS_IS +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/BasicConfiguration.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/BasicConfiguration.java new file mode 100644 index 0000000..001109e --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/BasicConfiguration.java @@ -0,0 +1,5 @@ +package org.onap.sdc.dcae.errormng; + +public class BasicConfiguration { + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/DcaeException.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/DcaeException.java new file mode 100644 index 0000000..60b8e0e --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/DcaeException.java @@ -0,0 +1,15 @@ +package org.onap.sdc.dcae.errormng; + +import org.springframework.http.HttpStatus; +import org.springframework.web.client.HttpClientErrorException; + +public class DcaeException extends BaseException { + +// public DcaeException(HttpClientErrorException theError) { +// super(theError); +// } + + public DcaeException(HttpStatus status, RequestError re){ + super(status, re); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrConfMgr.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrConfMgr.java new file mode 100644 index 0000000..de1d06b --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrConfMgr.java @@ -0,0 +1,306 @@ +package org.onap.sdc.dcae.errormng; + +import org.onap.sdc.dcae.catalog.asdc.ASDCException; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; + +import java.util.*; + +public enum ErrConfMgr { + INSTANCE; + + private static EnumMap<ApiType, Map<String, String>> sdcDcaeMsgIdMap; + public static final String AS_IS = "AS_IS"; + private ResponseFormatManager responseFormatManager; + + ErrConfMgr() { + responseFormatManager = ResponseFormatManager.getInstance(); + populateSdcDcaeMsgIdMap(); + } + + private void setSdcCatalogPolicyMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("POL5000", AS_IS); + map.put("POL5001", "POL5500"); + map.put("POL5002", "POL5501"); + sdcDcaeMsgIdMap.put(ApiType.ALL_SDC_CATALOG, map); + } + + private void setGetVfcmtMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", AS_IS); + map.put("SVC4505", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.GET_VFCMT, map); + } + + private void setCreateNewVfcmtMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4050", AS_IS); + map.put("SVC4126", AS_IS); + map.put("SVC4500", AS_IS); + map.put("SVC4062", AS_IS); + map.put("SVC4064", AS_IS); + map.put("SVC4065", AS_IS); + map.put("SVC4066", AS_IS); + map.put("SVC4067", AS_IS); + map.put("SVC4068", AS_IS); + map.put("SVC4069", AS_IS); + map.put("SVC4070", AS_IS); + map.put("SVC4071", AS_IS); + map.put("SVC4072", AS_IS); + map.put("SVC4073", AS_IS); + map.put("SVC4053", AS_IS); + map.put("POL5003", AS_IS); + // adding service referencing error handling to create scenario + map.put("SVC4063", AS_IS); + map.put("SVC4122", AS_IS); + map.put("SVC4124", AS_IS); + map.put("SVC4128", AS_IS); + map.put("SVC4125", AS_IS); + map.put("SVC4127", AS_IS); + map.put("SVC4086", AS_IS); + map.put("SVC4301", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.CREATE_NEW_VFCMT, map); + } + + private void setCloneVfcmtMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", AS_IS); + map.put("SVC4505", AS_IS); + map.put("SVC4085", AS_IS); + map.put("SVC4080", AS_IS); + map.put("SVC4122", "SVC6010"); + map.put("SVC4124", "SVC6010"); + map.put("SVC4128", "SVC6010"); + map.put("SVC4125", AS_IS); + map.put("SVC4127", "SVC6010"); + map.put("SVC4086", AS_IS); + map.put("SVC4301", AS_IS); + map.put("SVC4086", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.CLONE_VFCMT, map); + } + + + private void setGetServiceMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4503", AS_IS); + map.put("SVC4642", "200"); + sdcDcaeMsgIdMap.put(ApiType.GET_SERVICE, map); + } + + private void setAttachToServiceMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", "SVC6021"); + map.put("SVC4122", "SVC6021"); + map.put("SVC4124", "SVC6021"); + map.put("SVC4128", "SVC6021"); + map.put("SVC4125", AS_IS); + map.put("SVC4127", "SVC6021"); + map.put("SVC4086", AS_IS); + map.put("SVC4301", AS_IS); + map.put("SVC4503", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.ATTACH_TO_SERVICE, map); + } + + private void setGetCdumpMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", AS_IS); + map.put("SVC4505", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.GET_CDUMP, map); + } + + private void setGetModelMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", AS_IS); + map.put("SVC4505", "SVC6031"); + sdcDcaeMsgIdMap.put(ApiType.GET_MODEL, map); + } + + private void setCheckoutResourceMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", "SVC6021"); + map.put("SVC4085", AS_IS); + map.put("SVC4080", AS_IS); + map.put("SVC4002", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.CHECK_OUT_RESOURCE, map); + } + + private void setCheckinResourceMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", "SVC6021"); + map.put("SVC4086", AS_IS); + map.put("SVC4301", AS_IS); + map.put("SVC4084", AS_IS); + map.put("SVC4085", AS_IS); + map.put("SVC4002", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.CHECK_IN_RESOURCE, map); + } + + private void setSaveCdumpMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", "SVC6021"); + map.put("SVC4122", "SVC6021"); + map.put("SVC4124", "SVC6021"); + map.put("SVC4128", "SVC6021"); + map.put("SVC4125", AS_IS); + map.put("SVC4127", "SVC6021"); + map.put("SVC4086", AS_IS); + map.put("SVC4301", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.SAVE_CDUMP, map); + } + + private void setSubmitBlueprintMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", AS_IS); + map.put("SVC4505", "SVC6031"); + map.put("SVC4503", AS_IS); + map.put("SVC4085", AS_IS); + map.put("SVC4080", AS_IS); + map.put("SVC4122", "SVC6033"); + map.put("SVC4124", "SVC6033"); + map.put("SVC4128", "SVC6033"); + map.put("SVC4125", AS_IS); + map.put("SVC4127", "SVC6033"); + map.put("SVC4086", AS_IS); + map.put("SVC4301", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.SUBMIT_BLUEPRINT, map); + } + + private void setGetRuleMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.GET_RULE_ARTIFACT, map); + } + + private void setSaveRuleMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4063", "SVC6036"); + map.put("SVC4122", "SVC6036"); + map.put("SVC4124", "SVC6036"); + map.put("SVC4128", "SVC6036"); + map.put("SVC4125", AS_IS); + map.put("SVC4127", "SVC6036"); + map.put("SVC4086", AS_IS); + map.put("SVC4301", AS_IS); + map.put("SVC4000", "SVC6036"); + sdcDcaeMsgIdMap.put(ApiType.SAVE_RULE_ARTIFACT, map); + } + + private void setGetAllVfcmtMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("SVC4642", "200"); + sdcDcaeMsgIdMap.put(ApiType.GET_ALL_VFCMTS, map); + } + + + private void setDeleteReferenceMapping(){ + Map<String, String> map = new HashMap<>(); + map.put("POL5003", AS_IS); + map.put("SVC4063", AS_IS); + map.put("POL4050", AS_IS); + map.put("SVC4086", AS_IS); + map.put("SVC4301", AS_IS); + map.put("SVC4687", AS_IS); + sdcDcaeMsgIdMap.put(ApiType.DELETE_VFCMT_REFERENCE, map); + } + + private void populateSdcDcaeMsgIdMap() { + sdcDcaeMsgIdMap = new EnumMap<>(ApiType.class); + setAttachToServiceMapping(); + setCheckinResourceMapping(); + setCheckoutResourceMapping(); + setCloneVfcmtMapping(); + setGetAllVfcmtMapping(); + setGetRuleMapping(); + setCreateNewVfcmtMapping(); + setGetCdumpMapping(); + setGetModelMapping(); + setSaveCdumpMapping(); + setSaveRuleMapping(); + setSubmitBlueprintMapping(); + setGetServiceMapping(); + setGetVfcmtMapping(); + setSdcCatalogPolicyMapping(); + setDeleteReferenceMapping(); + } + + public enum ApiType { + CREATE_NEW_VFCMT, + GET_ALL_VFCMTS, + CLONE_VFCMT, + GET_VFCMT, + GET_SERVICE, + ATTACH_TO_SERVICE, + GET_CDUMP, + GET_MODEL, + CHECK_OUT_RESOURCE, + CHECK_IN_RESOURCE, + SAVE_CDUMP, + SUBMIT_BLUEPRINT, + GET_RULE_ARTIFACT, + SAVE_RULE_ARTIFACT, + ALL_SDC_CATALOG, + DELETE_VFCMT_REFERENCE + } + + public ResponseFormat getResponseFormat(ActionStatus actionStatus, String notes, String... variables) { + return responseFormatManager.getResponseFormat(actionStatus, notes, variables); + } + + public ResponseEntity buildErrorResponse(ActionStatus actionStatus, String notes, String... variables) { + ResponseFormat response = responseFormatManager.getResponseFormat(actionStatus, notes, variables); + return new ResponseEntity<>(response, HttpStatus.valueOf(response.getStatus())); + } + + public ResponseEntity buildErrorResponse(ActionStatus actionStatus) { + ResponseFormat response = responseFormatManager.getResponseFormat(actionStatus, ""); + return new ResponseEntity<>(response, HttpStatus.valueOf(response.getStatus())); + } + + public ResponseEntity buildErrorResponse(BaseException baseException) { + ResponseFormat response = responseFormatManager.getResponseFormat(baseException); + return new ResponseEntity<>(response, HttpStatus.valueOf(response.getStatus())); + } + + public ResponseEntity buildErrorArrayResponse(List<ServiceException> errors) { + ResponseFormat response = responseFormatManager.getResponseFormat(errors); + return new ResponseEntity<>(response, HttpStatus.valueOf(response.getStatus())); + } + + // ActionStatus determined by sdc to dcae mapping + public ActionStatus convertToDcaeActionStatus(String messageId, ApiType apiType) { + // try the apiType's specific mapping from SDC messageId to dcaeMessageId + String dcaeMessageId = sdcDcaeMsgIdMap.get(apiType).get(messageId); + // if no specific mapping found try the general mapping + if(null == dcaeMessageId) + dcaeMessageId = sdcDcaeMsgIdMap.get(ApiType.ALL_SDC_CATALOG).get(messageId); + // if no mapping found return general error + if(null == dcaeMessageId) + return ActionStatus.GENERAL_ERROR; + // if mapped to 'AS_IS' return 'AS_IS' + if(AS_IS.equals(dcaeMessageId)) + return ActionStatus.AS_IS; + // for any other valid mapping fetch the ActionStatus by corresponding dcaeMessageId + return responseFormatManager.getMsgIdToActionStatusMap().get(dcaeMessageId); + } + + public ResponseEntity handleException(Exception e, ApiType apiType, String... variables){ + if (e instanceof ASDCException){ + ASDCException se = (ASDCException)e; + ActionStatus status = convertToDcaeActionStatus(se.getMessageId(), apiType); + switch (status) { + case AS_IS: + return buildErrorResponse(se); + case OK: + return new ResponseEntity<>(new ArrayList<>(), HttpStatus.OK); + default: + return buildErrorResponse(status, se.getMessage(), variables); + } + } + //TODO refactor - don't throw DcaeException + if (e instanceof DcaeException){ + return buildErrorResponse((DcaeException)e); + } + return buildErrorResponse(ActionStatus.GENERAL_ERROR, e.getMessage()); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrorConfiguration.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrorConfiguration.java new file mode 100644 index 0000000..8f6f5af --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrorConfiguration.java @@ -0,0 +1,46 @@ +package org.onap.sdc.dcae.errormng; + +import java.util.Map; + +/** + * Example: + * VES_SCHEMA_INVALID: { + code: 500, + message: "Error – Failed to parse VES Schema file '%1'. [%2]", + messageId: "SVC6007" + } + + key will be "VES_SCHEMA_INVALID" + value is the json object containing code, message, messageId + */ + +import org.onap.sdc.dcae.errormng.BasicConfiguration; + +public class ErrorConfiguration extends BasicConfiguration { + + private Map<String, ErrorInfo> errors; + + public Map<String, ErrorInfo> getErrors() { + return errors; + } + + public void setErrors(Map<String, ErrorInfo> errors) { + this.errors = errors; + } + + public ErrorInfo getErrorInfo(String key) { + ErrorInfo clone = null; + ErrorInfo other = errors.get(key); + if (other != null) { + clone = new ErrorInfo(); + clone.cloneData(other); + } + return clone; + } + + @Override + public String toString() { + return "ErrorConfiguration [errors=" + errors + "]"; + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrorConfigurationLoader.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrorConfigurationLoader.java new file mode 100644 index 0000000..8b7ab44 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrorConfigurationLoader.java @@ -0,0 +1,121 @@ +package org.onap.sdc.dcae.errormng; + +import org.apache.commons.lang.ArrayUtils; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.yaml.snakeyaml.Yaml; + +import java.io.File; +import java.io.FilenameFilter; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; + +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +public class ErrorConfigurationLoader { + + private static ErrorConfigurationLoader instance; + private String jettyBase; + private ErrorConfiguration errorConfiguration = new ErrorConfiguration(); + private OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + public ErrorConfigurationLoader(String sourcePath) { + jettyBase = sourcePath; + loadErrorConfiguration(); + instance = this; + } + + private void loadErrorConfiguration(){ + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "ErrorConfigurationLoader: Trying to load error configuration"); + if (jettyBase == null) { + String msg = "Couldn't resolve jetty.base environmental variable"; + errLogger.log(LogLevel.ERROR, this.getClass().getName(), msg); + throw new ExceptionInInitializerError (msg + ". Failed to load error configuration files... aborting"); + } + + String path = jettyBase + "/config/dcae-be"; + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "jetty.base={}", jettyBase); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Configuration Path={}", path); + + File dir = new File(path); + File[] files = dir.listFiles(new FilenameFilter() { + @Override public boolean accept(File dir, String name) { + return name.equals("error-configuration.yaml"); + } + }); + + if (ArrayUtils.isEmpty(files)) { + String msg = "No error configuration files found"; + errLogger.log(LogLevel.ERROR, this.getClass().getName(), msg); + throw new ExceptionInInitializerError (msg); + }else if (files.length>1){ + String msg = "Multiple configuration files found. Make sure only one file exists. Path: "+ path; + errLogger.log(LogLevel.ERROR, this.getClass().getName(), msg); + throw new ExceptionInInitializerError (msg); + } + else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Loading error configuration file: {}", files[0].getName()); + try { + errorConfiguration = parseErrConfFileAndSaveToMap(files[0].getCanonicalPath()); +// convertToUsefulMaps(errorConfiguration); + } catch (IOException e) { + String msg = "Exception thrown while trying to read the error configuration file path. File="+files[0].getName(); + errLogger.log(LogLevel.ERROR, this.getClass().getName(), msg); + throw new ExceptionInInitializerError (msg); + } + if(errorConfiguration == null){ + String msg = "Error configuration file couldn't be parsed"; + errLogger.log(LogLevel.ERROR, this.getClass().getName(), msg); + throw new ExceptionInInitializerError (msg); + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Error Configuration: {}", errorConfiguration.toString()); + } + } + + + private ErrorConfiguration parseErrConfFileAndSaveToMap(String fullFileName) { + + Yaml yaml = new Yaml(); + + InputStream in = null; + ErrorConfiguration errorConfiguration = null; + try { + + File f = new File(fullFileName); + if (false == f.exists()) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "The file {} cannot be found. Ignore reading configuration.", fullFileName); + return null; + } + in = Files.newInputStream(Paths.get(fullFileName)); + + errorConfiguration = yaml.loadAs(in, ErrorConfiguration.class); + + } catch (Exception e) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Failed to convert yaml file {} to object. {}", fullFileName, e); + return null; + } + finally { + if (in != null) { + try { + in.close(); + } catch (IOException e) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Failed to close input stream {}", e.getMessage()); + } + } + } + + return errorConfiguration; + } + + ErrorConfiguration getErrorConfiguration() { + return errorConfiguration; + } + + public static ErrorConfigurationLoader getErrorConfigurationLoader() { + return instance; + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrorInfo.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrorInfo.java new file mode 100644 index 0000000..3ec3cef --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ErrorInfo.java @@ -0,0 +1,99 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.dcae.errormng; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +public class ErrorInfo { + + private Integer code; + private String message; + private String messageId; + private ErrorInfoType errorInfoType; + + private static final String SVC_PREFIX = "SVC"; + private static final String POL_PREFIX = "POL"; + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + + public ErrorInfo() { + this.errorInfoType = ErrorInfoType.OK; + } + + public Integer getCode() { + return code; + } + + public void setCode(Integer code) { + this.code = code; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + public String getMessageId() { + return messageId; + } + + public void setMessageId(String messageId) { + // Determining the type of error + if (messageId == null || "200".equals(messageId) || "201".equals(messageId) || "204".equals(messageId)) { + this.errorInfoType = ErrorInfoType.OK; + } else if (messageId.startsWith(SVC_PREFIX)) { + this.errorInfoType = ErrorInfoType.SERVICE_EXCEPTION; + } else if (messageId.startsWith(POL_PREFIX)) { + this.errorInfoType = ErrorInfoType.POLICY_EXCEPTION; + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Error: unexpected error message ID {}, should start with {} or {}", messageId, SVC_PREFIX, POL_PREFIX); + } + this.messageId = messageId; + } + + public ErrorInfoType getErrorInfoType() { + return this.errorInfoType; + } + + public void cloneData(ErrorInfo other) { + this.code = other.getCode(); + this.message = other.getMessage(); + this.messageId = other.getMessageId(); + this.errorInfoType = other.errorInfoType; + } + + @Override + public String toString() { + return "ErrorInfo [code=" + code + ", messageId=" + messageId + ", message=" + message + "]"; + } + + public enum ErrorInfoType { + OK, POLICY_EXCEPTION, SERVICE_EXCEPTION + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ResponseFormatManager.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ResponseFormatManager.java new file mode 100644 index 0000000..ada790f --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/errormng/ResponseFormatManager.java @@ -0,0 +1,103 @@ +package org.onap.sdc.dcae.errormng; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.errormng.ErrorInfo.ErrorInfoType; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ResponseFormatManager { + + private volatile static ResponseFormatManager instance; + private static ErrorConfiguration errorConfiguration; + private static Map<String, ActionStatus> msgIdToActionStatusMap = new HashMap<>(); + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + + public static ResponseFormatManager getInstance() { + if (instance == null) { + instance = init(); + } + return instance; + } + + private static synchronized ResponseFormatManager init() { + if (instance == null) { + instance = new ResponseFormatManager(); + errorConfiguration = ErrorConfigurationLoader.getErrorConfigurationLoader().getErrorConfiguration(); + convertToActionMap(); + } + return instance; + } + + ResponseFormat getResponseFormat(ActionStatus actionStatus, String notes, String... variables) { + ErrorInfo errorInfo = errorConfiguration.getErrorInfo(actionStatus.name()); + if (errorInfo == null) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "failed to locate {} in error configuration", actionStatus.name()); + errorInfo = errorConfiguration.getErrorInfo(ActionStatus.GENERAL_ERROR.name()); + } + + ResponseFormat responseFormat = new ResponseFormat(errorInfo.getCode()); + String errorMessage = errorInfo.getMessage(); + String errorMessageId = errorInfo.getMessageId(); + ErrorInfoType errorInfoType = errorInfo.getErrorInfoType(); + responseFormat.setNotes(notes); + + if (errorInfoType==ErrorInfoType.SERVICE_EXCEPTION) { + responseFormat.setServiceException(new ServiceException(errorMessageId, errorMessage, variables)); + } + else if (errorInfoType==ErrorInfoType.POLICY_EXCEPTION) { + responseFormat.setPolicyException(new PolicyException(errorMessageId, errorMessage, variables)); + } + else if (errorInfoType==ErrorInfoType.OK) { + responseFormat.setOkResponseInfo(new OkResponseInfo(errorMessageId, errorMessage, variables)); + } + return responseFormat; + } + + ResponseFormat getResponseFormat(BaseException baseException) { + + ResponseFormat responseFormat = new ResponseFormat(baseException.getRawStatusCode()); + AbstractSdncException e = baseException.getRequestError().getError(); + + if (e instanceof ServiceException) { + responseFormat.setServiceException((ServiceException)e); + } + else if (e instanceof PolicyException) { + responseFormat.setPolicyException((PolicyException)e); + } + else { + responseFormat.setOkResponseInfo((OkResponseInfo)e); + } + return responseFormat; + } + + ResponseFormat getResponseFormat(List<ServiceException> errors) { + ResponseFormat responseFormat = new ResponseFormat(400); + responseFormat.setServiceExceptions(errors); + return responseFormat; + } + + public Map<String, ActionStatus> getMsgIdToActionStatusMap() { + return msgIdToActionStatusMap; + } + + private static void convertToActionMap() { + Map<String, ErrorInfo> errors = errorConfiguration.getErrors(); + + if(errors!=null){ + errors.forEach((k, v) -> { + debugLogger.log(LogLevel.DEBUG, ResponseFormatManager.class.getName(), "{}, {}", v.getMessageId(), k); + msgIdToActionStatusMap.put(v.getMessageId(), ActionStatus.valueOf(k)); + }); + } + } + + public ResponseFormatManager(){ + + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/filter/LoggingFilter.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/filter/LoggingFilter.java new file mode 100644 index 0000000..919d244 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/filter/LoggingFilter.java @@ -0,0 +1,247 @@ +package org.onap.sdc.dcae.filter; + +import static java.net.HttpURLConnection.HTTP_BAD_METHOD; +import static java.net.HttpURLConnection.HTTP_BAD_REQUEST; +import static java.net.HttpURLConnection.HTTP_CLIENT_TIMEOUT; +import static java.net.HttpURLConnection.HTTP_CONFLICT; +import static java.net.HttpURLConnection.HTTP_ENTITY_TOO_LARGE; +import static java.net.HttpURLConnection.HTTP_FORBIDDEN; +import static java.net.HttpURLConnection.HTTP_GONE; +import static java.net.HttpURLConnection.HTTP_LENGTH_REQUIRED; +import static java.net.HttpURLConnection.HTTP_NOT_ACCEPTABLE; +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; +import static java.net.HttpURLConnection.HTTP_PAYMENT_REQUIRED; +import static java.net.HttpURLConnection.HTTP_PRECON_FAILED; +import static java.net.HttpURLConnection.HTTP_PROXY_AUTH; +import static java.net.HttpURLConnection.HTTP_REQ_TOO_LONG; +import static java.net.HttpURLConnection.HTTP_UNAUTHORIZED; +import static java.net.HttpURLConnection.HTTP_UNSUPPORTED_TYPE; + +import java.io.IOException; +import java.util.Locale; +import java.util.UUID; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.http.impl.EnglishReasonPhraseCatalog; +import org.onap.sdc.common.onaplog.OnapLoggerAudit; +import org.onap.sdc.common.onaplog.OnapMDCWrapper; +import org.onap.sdc.common.onaplog.Enums.OnapLoggerErrorCode; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +public class LoggingFilter implements Filter { + + private static final String serviceName = "DCAE-D-BE"; + + private OnapMDCWrapper commonLoggerArgs = OnapMDCWrapper.getInstance(); + private OnapLoggerAudit auditLogger = OnapLoggerAudit.getInstance(); + + public LoggingFilter() { + super(); + } + + + @Override + public void destroy() {} + + + @Override + public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain) + throws IOException, ServletException { + + boolean shouldLogRequest = true; + + try { + if (request instanceof HttpServletRequest) { + HttpServletRequest httpRequest = (HttpServletRequest) request; + if (httpRequest.getServletPath().equals("/healthCheck")) { + shouldLogRequest = false; + } + + if (shouldLogRequest) { + beforeHandle(httpRequest); + } + } + } catch (Exception e) { + // TODO: log problem with extracting parameters or writing to log + } + + filterChain.doFilter(request, response); // handle request + + try { + if (response instanceof HttpServletResponse && shouldLogRequest) { + afterHandle((HttpServletResponse) response); + } + } catch (Exception e) { + // TODO: log problem with extracting parameters or writing to log + } + } + + + private void beforeHandle(HttpServletRequest request) { + + String requestId = getRequestId(request); + request.setAttribute("requestId", requestId); // making requestId available for the API controllers + commonLoggerArgs + .clear() + .startTimer() + .setRemoteHost(request.getRemoteAddr()) + .setServiceName(serviceName) + .setPartnerName(getPartnerName(request.getHeader("USER_ID"), request.getHeader("user-agent"))) + .setKeyRequestId(requestId) + .setAutoServerIPAddress(request.getLocalAddr()) + .setOptCustomField1(request.getProtocol()) + .setOptCustomField2(request.getMethod()) + .setOptCustomField3(request.getServletPath()); + + } + + + private static String getRequestId(HttpServletRequest request) { + String requestId = request.getHeader("X-ECOMP-RequestID"); + return isNullOrEmpty(requestId) + ? UUID.randomUUID().toString() + : requestId; + } + + + private void afterHandle(HttpServletResponse response) { + String responseDesc = EnglishReasonPhraseCatalog.INSTANCE.getReason(response.getStatus(), Locale.ENGLISH); + commonLoggerArgs + .stopTimer() + .setResponseCode(getLoggingErrorCode(response.getStatus()).getErrorCode()) + .setResponseDesc(responseDesc) + .setOptCustomField4(Integer.toString(response.getStatus())); + + auditLogger + .setStatusCode(Integer.toString(response.getStatus())) + .log(LogLevel.INFO, this.getClass().getName(), responseDesc); + } + + + private OnapLoggerErrorCode getLoggingErrorCode(int httpResponseCode) { + if (isSuccessError(httpResponseCode)) { + return OnapLoggerErrorCode.SUCCESS; + } + else if (isSchemaError(httpResponseCode)) { + return OnapLoggerErrorCode.SCHEMA_ERROR; + } + else if (isDataError(httpResponseCode)) { + return OnapLoggerErrorCode.DATA_ERROR; + } + else if (isPermissionsError(httpResponseCode)) { + return OnapLoggerErrorCode.PERMISSION_ERROR; + } + else if (isTimeoutOrAvailabilityError(httpResponseCode)) { + return OnapLoggerErrorCode.AVAILABILITY_TIMEOUTS_ERROR; + } + else if (isBusinessProcessError(httpResponseCode)) { + return OnapLoggerErrorCode.BUSINESS_PROCESS_ERROR; + } + else { + return OnapLoggerErrorCode.UNKNOWN_ERROR; + } + } + + + private boolean isTimeoutOrAvailabilityError(int httpResponseCode) { + + switch (httpResponseCode) { + case HTTP_BAD_REQUEST: + case HTTP_UNAUTHORIZED: + case HTTP_NOT_FOUND: + case HTTP_CLIENT_TIMEOUT: + case HTTP_GONE: + return true; + } + + return false; + } + + private boolean isPermissionsError(int httpResponseCode) { + + switch (httpResponseCode) { + case HTTP_PAYMENT_REQUIRED: + case HTTP_FORBIDDEN: + case HTTP_BAD_METHOD: + case HTTP_PROXY_AUTH: + return true; + } + + return false; + } + + private boolean isDataError(int httpResponseCode) { + + switch (httpResponseCode) { + case HTTP_NOT_ACCEPTABLE: + case HTTP_LENGTH_REQUIRED: + case HTTP_PRECON_FAILED: + case HTTP_REQ_TOO_LONG: + case HTTP_ENTITY_TOO_LARGE: + case HTTP_UNSUPPORTED_TYPE: + return true; + } + + return false; + } + + private boolean isSchemaError(int httpResponseCode) { + + switch (httpResponseCode) { + case HTTP_CONFLICT: + return true; + } + + return false; + } + + private boolean isSuccessError(int httpResponseCode) { + return httpResponseCode < 399; + } + + private boolean isBusinessProcessError(int httpResponseCode) { + return httpResponseCode > 499; + } + + private String getPartnerName(String userId, String userAgent) { + return (isNullOrEmpty(userId)) + ? getClientApplication(userAgent) + : userId; + } + + private String getClientApplication(String userAgent) { + if (userAgent != null && userAgent.length() > 0) { + if (userAgent.toLowerCase().contains("firefox")) { + return "fireFox_FE"; + } + + if (userAgent.toLowerCase().contains("msie")) { + return "explorer_FE"; + } + + if (userAgent.toLowerCase().contains("chrome")) { + return "chrome_FE"; + } + + return userAgent; + } + return ""; + } + + + private static boolean isNullOrEmpty(String str) { + return (str == null || str.isEmpty()); + } + + + @Override + public void init(FilterConfig config) throws ServletException {} +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/enums/ConditionTypeEnum.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/enums/ConditionTypeEnum.java new file mode 100644 index 0000000..e4921e2 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/enums/ConditionTypeEnum.java @@ -0,0 +1,22 @@ +package org.onap.sdc.dcae.rule.editor.enums; + +import java.util.Arrays; + +public enum ConditionTypeEnum { + ALL("And"), ANY("Or"); + + public String getFilterClass() { + return filterClass; + } + + private String filterClass; + + ConditionTypeEnum(String filterClass) { + + this.filterClass = filterClass; + } + + public static ConditionTypeEnum getTypeByName(String name) { + return Arrays.stream(ConditionTypeEnum.values()).filter(type -> name.equalsIgnoreCase(type.name())).findAny().orElse(null); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/enums/OperatorTypeEnum.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/enums/OperatorTypeEnum.java new file mode 100644 index 0000000..2cd03a7 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/enums/OperatorTypeEnum.java @@ -0,0 +1,32 @@ +package org.onap.sdc.dcae.rule.editor.enums; + +import java.util.Arrays; + +public enum OperatorTypeEnum { + EQUALS("Equals", "OneOf"), + NOT_EQUAL("NotEqual", "NotOneOf"), + CONTAINS("Contains", null), + ENDS_WITH("EndsWith", null), + STARTS_WITH("StartsWith", null); + + private String type; + private String modifiedType; + + OperatorTypeEnum(String type, String modifiedType) { + this.type = type; + this.modifiedType = modifiedType; + } + + public String getType() { + return type; + } + + public String getModifiedType() { + return modifiedType; + } + + public static OperatorTypeEnum getTypeByName(String name) { + return Arrays.stream(OperatorTypeEnum.values()).filter(type -> name.replaceAll(" ", "").equalsIgnoreCase(type.getType())).findAny().orElse(null); + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/enums/RuleEditorElementType.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/enums/RuleEditorElementType.java new file mode 100644 index 0000000..0bec7d8 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/enums/RuleEditorElementType.java @@ -0,0 +1,58 @@ +package org.onap.sdc.dcae.rule.editor.enums; + +import java.util.Arrays; + +import org.onap.sdc.dcae.rule.editor.translators.ConditionGroupTranslator; +import org.onap.sdc.dcae.rule.editor.translators.ConditionTranslator; +import org.onap.sdc.dcae.rule.editor.translators.CopyActionTranslator; +import org.onap.sdc.dcae.rule.editor.translators.DateFormatterTranslator; +import org.onap.sdc.dcae.rule.editor.translators.FieldConditionTranslator; +import org.onap.sdc.dcae.rule.editor.translators.IRuleElementTranslator; +import org.onap.sdc.dcae.rule.editor.translators.MapActionTranslator; +import org.onap.sdc.dcae.rule.editor.translators.MappingRulesTranslator; +import org.onap.sdc.dcae.rule.editor.translators.RegexActionTranslator; +import org.onap.sdc.dcae.rule.editor.translators.RuleTranslator; +import org.onap.sdc.dcae.rule.editor.validators.ActionValidator; +import org.onap.sdc.dcae.rule.editor.validators.ConcatActionValidator; +import org.onap.sdc.dcae.rule.editor.validators.ConditionGroupValidator; +import org.onap.sdc.dcae.rule.editor.validators.ConditionValidator; +import org.onap.sdc.dcae.rule.editor.validators.DateFormatterValidator; +import org.onap.sdc.dcae.rule.editor.validators.IRuleElementValidator; +import org.onap.sdc.dcae.rule.editor.validators.MapActionValidator; +import org.onap.sdc.dcae.rule.editor.validators.RuleValidator; + +public enum RuleEditorElementType { + COPY("Copy", ActionValidator.getInstance(), CopyActionTranslator.getInstance()), + CONCAT("Concat", ConcatActionValidator.getInstance(), CopyActionTranslator.getInstance()), + MAP("Map", MapActionValidator.getInstance(), MapActionTranslator.getInstance()), + REGEX("Regex", ActionValidator.getInstance(), RegexActionTranslator.getInstance()), + DATE_FORMATTER("DateFormatter", DateFormatterValidator.getInstance(), DateFormatterTranslator.getInstance()), + CONDITION("Condition", ConditionValidator.getInstance(), ConditionTranslator.getInstance()), + FIELD_CONDITION("FieldCondition", ConditionValidator.getInstance(), FieldConditionTranslator.getInstance()), + CONDITION_GROUP("ConditionGroup", ConditionGroupValidator.getInstance(), ConditionGroupTranslator.getInstance()), + RULE("Rule", RuleValidator.getInstance(), RuleTranslator.getInstance()), + MAPPING_RULES("MappingRules", null, MappingRulesTranslator.getInstance()); + + private String elementType; + private IRuleElementValidator validator; + private IRuleElementTranslator translator; + + public IRuleElementValidator getValidator() { + return validator; + } + + public IRuleElementTranslator getTranslator() { + return translator; + } + + RuleEditorElementType(String elementType, IRuleElementValidator validator, IRuleElementTranslator translator) { + this.elementType = elementType; + this.validator = validator; + this.translator = translator; + } + + public static RuleEditorElementType getElementTypeByName(String name) { + return Arrays.stream(RuleEditorElementType.values()).filter(p -> p.elementType.equalsIgnoreCase(name)) + .findAny().orElse(null); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/impl/RulesBusinessLogic.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/impl/RulesBusinessLogic.java new file mode 100644 index 0000000..849ad42 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/impl/RulesBusinessLogic.java @@ -0,0 +1,149 @@ +package org.onap.sdc.dcae.rule.editor.impl; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.apache.commons.lang3.StringUtils; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.*; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.onap.sdc.dcae.errormng.ServiceException; +import org.onap.sdc.dcae.rule.editor.translators.MappingRulesTranslator; +import org.onap.sdc.dcae.rule.editor.utils.EmptyStringTranslationSerializer; +import org.onap.sdc.dcae.rule.editor.validators.RuleValidator; +import org.springframework.stereotype.Component; +import org.springframework.util.CollectionUtils; + +import java.util.*; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Collectors; + +@Component +public class RulesBusinessLogic { + + protected OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + private RuleValidator ruleValidator = RuleValidator.getInstance(); + private MappingRulesTranslator mappingRulesTranslator = MappingRulesTranslator.getInstance(); + private static Gson gsonTranslator = new GsonBuilder().registerTypeAdapter(String.class, new EmptyStringTranslationSerializer()).enableComplexMapKeySerialization().create(); + + public List<ServiceException> validateRule(Rule rule) { + List<ResponseFormat> errors = new ArrayList<>(); + if(ruleValidator.validate(rule, errors)) + detectAndResolveActionDependencies(rule, errors); + return errors.stream().map(r -> r.getRequestError().getServiceException()).collect(Collectors.toList()); + } + + public List<ServiceException> validateRules(MappingRules rules) { + List<ResponseFormat> errors = new ArrayList<>(); + detectAndResolveRuleDependencies(rules, errors); + return errors.stream().map(r -> r.getRequestError().getServiceException()).collect(Collectors.toList()); + } + + public String translateRules(MappingRules rules, String entryPointPhase, String lastPhase, String runPhase) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Start translating mapping rules"); + return gsonTranslator.toJson(mappingRulesTranslator.translateToHpJson(rules, entryPointPhase, lastPhase, runPhase)); + } + + public boolean addOrEditRule(MappingRules rules, Rule rule) { + // in case the rule id is passed but the rule doesn't exist on the mapping rule file: + if(StringUtils.isNotBlank(rule.getUid()) && !rules.ruleExists(rule)) + return false; + rules.addOrReplaceRule(rule); + return true; + } + + public Rule deleteRule(MappingRules rules, String ruleUid) { + return rules.removeRule(ruleUid); + } + + private <T> List<T> detectDependentItemsByDependencyDefinition(Collection<T> allItems, BiFunction<T, Collection<T>, Boolean> dependencyDefinition) { + return allItems.stream().filter(i -> dependencyDefinition.apply(i, allItems)).collect(Collectors.toList()); + } + + // if all dependencies are resolvable returns empty list + // else returns list of non resolvable items (circular dependent items) + // iterate through all dependentItems removing resolvable items each iteration. + + private <T> List<T> detectCircularDependenciesByDependencyDefinition(List<T> dependentItems, BiFunction<T, Collection<T>, Boolean> dependencyDetector) { + while(!CollectionUtils.isEmpty(dependentItems)) { + List<T> resolvable = dependentItems.stream() + .filter(i -> !dependencyDetector.apply(i, dependentItems)) + .collect(Collectors.toList()); + if(CollectionUtils.isEmpty(resolvable)) + break; + dependentItems.removeAll(resolvable); + } + return dependentItems; + } + + private <T> List<T> reorderItemsByDependencyDefinition(Collection<T> allItems, BiFunction<T, T, Boolean> dependencyDetector) { + List<T> ordered = new ArrayList<>(allItems); + allItems.forEach(i -> { + List<T> dependencies = allItems.stream().filter(o -> dependencyDetector.apply(i, o)).collect(Collectors.toList()); + dependencies.forEach(d -> { + if(ordered.indexOf(d) > ordered.indexOf(i)) { + ordered.remove(d); + ordered.add(ordered.indexOf(i), d); + } + }); + }); + return ordered; + } + + private void detectAndResolveActionDependencies(Rule rule, List<ResponseFormat> errors) { + List<BaseAction> dependentActions = detectDependentItemsByDependencyDefinition(rule.getActions(), BaseAction::hasDependencies); + if(!CollectionUtils.isEmpty(dependentActions)) { + List<BaseAction> nonResolvable = detectCircularDependenciesByDependencyDefinition(dependentActions, BaseAction::hasDependencies); + if (!CollectionUtils.isEmpty(nonResolvable)) { + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.ACTION_DEPENDENCY, null, nonResolvable.stream().map(BaseAction::getTarget).collect(Collectors.joining(", ")))); + return; + } + List<BaseAction> actions = reorderItemsByDependencyDefinition(rule.getActions(), BaseAction::referencesTarget); + rule.setActions(actions); + } + } + + // first identify dependent rules + // if no dependencies found return true + // if non resolvable dependencies found return false + // else reorder and return true + + private void detectAndResolveRuleDependencies(MappingRules rules, List<ResponseFormat> errors) { + List<Rule> dependentRules = detectDependentItemsByDependencyDefinition(rules.getRules().values(), Rule::referencesOtherRules); + if(!CollectionUtils.isEmpty(dependentRules)) { + List<Rule> nonResolvable = detectCircularDependenciesByDependencyDefinition(dependentRules, Rule::referencesOtherRules); + if (!CollectionUtils.isEmpty(nonResolvable)) { + String nonResolvableRuleIds = nonResolvable.stream().map(Rule::getUid).collect(Collectors.joining(", ")); + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.RULE_DEPENDENCY, null, nonResolvableRuleIds, extractDependentActionTargetsFromRules(nonResolvable))); + return; + } + reorderRulesByDependency(rules); + } + } + + private String extractDependentActionTargetsFromRules(List<Rule> dependentRules) { + List<BaseAction> allActions = dependentRules.stream().map(Rule::getActions).flatMap(List::stream).collect(Collectors.toList()); + // option 1: circular dependency between actions + List<BaseAction> nonResolvable = detectCircularDependenciesByDependencyDefinition(allActions, BaseAction::hasDependencies); + if(CollectionUtils.isEmpty(nonResolvable)) + // option 2: circular dependency between rules - collect dependent actions and condition dependencies + nonResolvable = dependentRules.stream() + .map(r -> r.findDependencies(dependentRules)) + .flatMap(List::stream) + .collect(Collectors.toList()); + return nonResolvable.stream() + .map(BaseAction::getTarget) + .collect(Collectors.joining(", ")); + } + + private void reorderRulesByDependency(MappingRules rules) { + List<Rule> ordered = reorderItemsByDependencyDefinition(rules.getRules().values(), Rule::referencesOtherRule); + Map<String, Rule> rulesMap = ordered.stream().collect(Collectors.toMap(Rule::getUid, Function.identity(), (u, v) -> { + throw new IllegalStateException(String.format("Duplicate key %s", u)); + }, LinkedHashMap::new)); + rules.setRules(rulesMap); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/ConditionGroupTranslator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/ConditionGroupTranslator.java new file mode 100644 index 0000000..093c239 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/ConditionGroupTranslator.java @@ -0,0 +1,47 @@ +package org.onap.sdc.dcae.rule.editor.translators; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.*; +import org.onap.sdc.dcae.rule.editor.enums.ConditionTypeEnum; +import org.onap.sdc.dcae.rule.editor.enums.OperatorTypeEnum; +import org.onap.sdc.dcae.rule.editor.utils.ValidationUtils; + +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class ConditionGroupTranslator implements IRuleElementTranslator<ConditionGroup> { + + private static ConditionGroupTranslator conditionGroupTranslator = new ConditionGroupTranslator(); + + public static ConditionGroupTranslator getInstance() { + return conditionGroupTranslator; + } + + private ConditionGroupTranslator(){} + + public Translation translateToHpJson(ConditionGroup conditionGroup) { + String clazz = ConditionTypeEnum.getTypeByName(conditionGroup.getType()).getFilterClass(); + FiltersTranslation translation = new FiltersTranslation(clazz, conditionGroup.getChildren().stream() + .map(this::getTranslation) + .collect(Collectors.toList())); + flattenNestedFilters(translation, clazz); + return translation; + } + + + private IRuleElementTranslator getConditionTranslator(BaseCondition condition){ + return condition instanceof ConditionGroup ? ConditionGroupTranslator.getInstance() : + ValidationUtils.validateNotEmpty(OperatorTypeEnum.getTypeByName(((Condition)condition).getOperator()).getModifiedType()) ? FieldConditionTranslator.getInstance() : ConditionTranslator.getInstance(); + } + + private Translation getTranslation(BaseCondition condition) { + return getConditionTranslator(condition).translateToHpJson(condition); + } + + private void flattenNestedFilters(FiltersTranslation filtersTranslation, String clazz) { + Map<Boolean, List<Translation>> partitioned = filtersTranslation.filters.stream().collect(Collectors.partitioningBy(f -> clazz.equals(((ProcessorTranslation) f).clazz))); + filtersTranslation.filters.removeAll(partitioned.get(Boolean.TRUE)); + filtersTranslation.filters.addAll(partitioned.get(Boolean.TRUE).stream().map(f -> ((FiltersTranslation) f).filters).flatMap(List::stream).collect(Collectors.toList())); + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/ConditionTranslator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/ConditionTranslator.java new file mode 100644 index 0000000..f93101b --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/ConditionTranslator.java @@ -0,0 +1,40 @@ +package org.onap.sdc.dcae.rule.editor.translators; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.Condition; +import org.onap.sdc.dcae.rule.editor.enums.ConditionTypeEnum; +import org.onap.sdc.dcae.rule.editor.enums.OperatorTypeEnum; + +import java.util.stream.Collectors; + +public class ConditionTranslator implements IRuleElementTranslator<Condition> { + + private static ConditionTranslator conditionTranslator = new ConditionTranslator(); + + public static ConditionTranslator getInstance() { + return conditionTranslator; + } + + private ConditionTranslator(){} + + private class StringFilterTranslation extends ProcessorTranslation { + private String string; + private String value; + + private StringFilterTranslation(Condition condition, String value){ + this.clazz = OperatorTypeEnum.getTypeByName(condition.getOperator()).getType(); + this.string = condition.getLeft(); + this.value = value; + } + + private StringFilterTranslation(Condition condition){ + this(condition, condition.getRight().get(0)); + } + } + + public Translation translateToHpJson(Condition condition) { + return 1 == condition.getRight().size() ? new StringFilterTranslation(condition) : new FiltersTranslation(ConditionTypeEnum.ANY.getFilterClass(), condition.getRight().stream() + .map(r -> new StringFilterTranslation(condition, r)).collect(Collectors.toList())); + } + + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/CopyActionTranslator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/CopyActionTranslator.java new file mode 100644 index 0000000..9d02c8e --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/CopyActionTranslator.java @@ -0,0 +1,47 @@ +package org.onap.sdc.dcae.rule.editor.translators; + +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.BaseAction; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.BaseAction; + +public class CopyActionTranslator<A extends BaseAction> implements IRuleElementTranslator<A>{ + + private static CopyActionTranslator copyActionTranslator = new CopyActionTranslator(); + + public static CopyActionTranslator getInstance() { + return copyActionTranslator; + } + + CopyActionTranslator(){} + + public Translation translateToHpJson(A action) { + return new CopyActionSetTranslation(action.getTarget(), action.getFromValue()); + } + + void addToHpJsonProcessors(A action, List<Translation> processors) { + processors.add(translateToHpJson(action)); + } + + public boolean addToHpJsonProcessors(A action, List<Translation> processors, boolean asNewProcessor) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Translating {} action. New Processor: {}", action.getActionType(), asNewProcessor); + if(asNewProcessor) + addToHpJsonProcessors(action, processors); + else + ((CopyActionSetTranslation) processors.get(processors.size()-1)).updates.put(action.getTarget(), action.getFromValue()); + return false; + } + + class CopyActionSetTranslation extends ProcessorTranslation { + Map<String, String> updates = new LinkedHashMap<>(); + CopyActionSetTranslation(String target, String from) { + clazz = "Set"; + updates.put(target, from); + } + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/DateFormatterTranslator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/DateFormatterTranslator.java new file mode 100644 index 0000000..89f0def --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/DateFormatterTranslator.java @@ -0,0 +1,49 @@ +package org.onap.sdc.dcae.rule.editor.translators; + +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import java.util.List; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.DateFormatterAction; + +public class DateFormatterTranslator extends CopyActionTranslator<DateFormatterAction> { + + private static DateFormatterTranslator dateFormatterTranslator = new DateFormatterTranslator(); + + public static DateFormatterTranslator getInstance() { + return dateFormatterTranslator; + } + + private DateFormatterTranslator(){} + + private class DateFormatterTranslation extends ProcessorTranslation { + private String fromFormat; + private String fromTz; + private String toField; + private String toFormat; + private String toTz; + private String value; + + private DateFormatterTranslation(DateFormatterAction action){ + clazz = "DateFormatter"; + fromFormat = action.getFromFormat(); + fromTz = action.getFromTz(); + toField = action.getTarget(); + toFormat = action.getToFormat(); + toTz = action.getToTz(); + value = action.getFromValue(); + } + } + + @Override + public boolean addToHpJsonProcessors(DateFormatterAction action, List<Translation> processors, boolean asNewProcessor) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Translating date formatter action"); + addToHpJsonProcessors(action, processors); + return true; + } + + @Override + public Translation translateToHpJson(DateFormatterAction action){ + return new DateFormatterTranslation(action); + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/FieldConditionTranslator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/FieldConditionTranslator.java new file mode 100644 index 0000000..ef2949e --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/FieldConditionTranslator.java @@ -0,0 +1,43 @@ +package org.onap.sdc.dcae.rule.editor.translators; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.Condition; +import org.onap.sdc.dcae.rule.editor.enums.OperatorTypeEnum; + +import java.util.List; + +public class FieldConditionTranslator implements IRuleElementTranslator<Condition> { + + private static FieldConditionTranslator fieldConditionTranslator = new FieldConditionTranslator(); + + public static FieldConditionTranslator getInstance() { + return fieldConditionTranslator; + } + + private FieldConditionTranslator(){} + + private class FieldFilterTranslation extends ProcessorTranslation { + private String field; + private String value; + + private FieldFilterTranslation(Condition condition) { + clazz = OperatorTypeEnum.getTypeByName(condition.getOperator()).getType(); + field = condition.getLeft(); + value = condition.getRight().get(0); + } + } + + private class MultiFieldFilterTranslation extends ProcessorTranslation { + private String field; + private List<String> values; + + private MultiFieldFilterTranslation(Condition condition) { + field = condition.getLeft(); + values = condition.getRight(); + clazz = OperatorTypeEnum.getTypeByName(condition.getOperator()).getModifiedType(); + } + } + + public Translation translateToHpJson(Condition condition) { + return 1 == condition.getRight().size() ? new FieldFilterTranslation(condition) : new MultiFieldFilterTranslation(condition); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/IRuleElementTranslator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/IRuleElementTranslator.java new file mode 100644 index 0000000..dac818d --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/IRuleElementTranslator.java @@ -0,0 +1,50 @@ +package org.onap.sdc.dcae.rule.editor.translators; + +import com.google.gson.annotations.SerializedName; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; + +import java.util.ArrayList; +import java.util.List; + +public interface IRuleElementTranslator<T> { + + OnapLoggerError errLogger = OnapLoggerError.getInstance(); + OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + Translation translateToHpJson(T element); + + abstract class Translation { + } + + class ProcessorTranslation extends Translation { + @SerializedName("class") + protected String clazz; + } + + + class FiltersTranslation extends ProcessorTranslation { + protected List<Translation> filters; + + protected FiltersTranslation(String clazz, List<Translation> filters) { + this.clazz = clazz; + this.filters = filters; + } + } + + class RuleTranslation extends Translation { + protected String phase; + protected Translation filter; + protected List<Translation> processors = new ArrayList<>(); + } + + class RunPhaseProcessorsTranslation extends ProcessorTranslation { + protected String phase; + + protected RunPhaseProcessorsTranslation(String runPhase){ + clazz ="RunPhase"; + phase = runPhase; + } + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/MapActionTranslator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/MapActionTranslator.java new file mode 100644 index 0000000..922312e --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/MapActionTranslator.java @@ -0,0 +1,50 @@ +package org.onap.sdc.dcae.rule.editor.translators; + +import com.google.gson.annotations.SerializedName; + +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +import java.util.List; +import java.util.Map; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.MapAction; + +public class MapActionTranslator extends CopyActionTranslator<MapAction> { + + private static MapActionTranslator mapActionTranslator = new MapActionTranslator(); + + public static MapActionTranslator getInstance() { + return mapActionTranslator; + } + + private MapActionTranslator(){} + + private class MapActionTranslation extends ProcessorTranslation { + + private Map<String, String> map; + private String field; + private String toField; + @SerializedName("default") + private String Default; + + private MapActionTranslation(MapAction action) { + clazz = "MapAlarmValues"; + Default = action.getMapDefaultValue(); + field = action.getFromValue(); + toField = action.getTarget(); + map = action.transformToMap(); + } + } + + @Override + public Translation translateToHpJson(MapAction action) { + return new MapActionTranslation(action); + } + + @Override + public boolean addToHpJsonProcessors(MapAction action, List<Translation> processors, boolean asNewProcessor) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Translating map action"); + addToHpJsonProcessors(action, processors); + return true; + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/MappingRulesTranslator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/MappingRulesTranslator.java new file mode 100644 index 0000000..0164446 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/MappingRulesTranslator.java @@ -0,0 +1,69 @@ +package org.onap.sdc.dcae.rule.editor.translators; + +import java.util.List; +import java.util.stream.Collectors; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.MappingRules; + +public class MappingRulesTranslator implements IRuleElementTranslator<MappingRules> { + + private static MappingRulesTranslator mappingRulesTranslator = new MappingRulesTranslator(); + + public static MappingRulesTranslator getInstance() { + return mappingRulesTranslator; + } + + private MappingRulesTranslator() { + } + + private RuleTranslator ruleTranslator = RuleTranslator.getInstance(); + + public Translation translateToHpJson(MappingRules mappingRules) { + return new MappingRulesTranslation(mappingRules); + } + + public Translation translateToHpJson(MappingRules mappingRules, String entryPointPhaseName, String lastPhaseName, String runPhase) { + // 1806 US349308 assign Vfcmt name as rule phaseName + mappingRules.getRules().forEach((k,v) -> v.setPhase(runPhase)); + return new MappingRulesTranslation(mappingRules, entryPointPhaseName, lastPhaseName, runPhase); + } + + private class MappingRulesTranslation extends Translation { + + private List<Translation> processing; + + private MappingRulesTranslation(MappingRules mappingRules) { + processing = mappingRules.getRules().values().stream().map(ruleTranslator::translateToHpJson).collect(Collectors.toList()); + } + + private MappingRulesTranslation(MappingRules mappingRules, String entryPointPhaseName, String lastPhaseName, String runPhase) { + this(mappingRules); + //hardcoded entry point processor + processing.add(0, new RunPhaseRuleTranslation(entryPointPhaseName, runPhase)); + //hardcoded map_publish processor + processing.add(new RunPhaseRuleTranslation(runPhase, lastPhaseName)); + } + } + + private class RunPhaseRuleTranslation extends RuleTranslation { + + private RunPhaseRuleTranslation(String phaseName, String runPhase) { + phase = phaseName; + if ("snmp_map".equals(phaseName)) + processors.add(new SnmpConvertor()); + processors.add(new RunPhaseProcessorsTranslation(runPhase)); + } + } + + // hardcoded SNMP processor + private class SnmpConvertor extends ProcessorTranslation { + private String array = "varbinds"; + private String datacolumn = "varbind_value"; + private String keycolumn = "varbind_oid"; + + private SnmpConvertor() { + clazz = "SnmpConvertor"; + } + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/RegexActionTranslator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/RegexActionTranslator.java new file mode 100644 index 0000000..c49a04e --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/RegexActionTranslator.java @@ -0,0 +1,44 @@ +package org.onap.sdc.dcae.rule.editor.translators; + +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import java.util.List; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.BaseAction; + +public class RegexActionTranslator extends CopyActionTranslator<BaseAction> { + + private static RegexActionTranslator regexActionTranslator = new RegexActionTranslator(); + + public static RegexActionTranslator getInstance() { + return regexActionTranslator; + } + + private RegexActionTranslator(){} + + private class RegexCopyActionTranslation extends ProcessorTranslation { + + private String regex; + private String field; + private String value; + + private RegexCopyActionTranslation(BaseAction action) { + clazz = "ExtractText"; + regex = action.getRegexValue(); + field = action.getTarget(); + value = action.getFromValue(); + } + } + + @Override + public boolean addToHpJsonProcessors(BaseAction action, List<Translation> processors, boolean asNewProcessor) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Translating copy action as regex action"); + addToHpJsonProcessors(action, processors); + return true; + } + + @Override + public Translation translateToHpJson(BaseAction action) { + return new RegexCopyActionTranslation(action); + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/RuleTranslator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/RuleTranslator.java new file mode 100644 index 0000000..f7dea47 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/translators/RuleTranslator.java @@ -0,0 +1,51 @@ +package org.onap.sdc.dcae.rule.editor.translators; + +import com.google.gson.Gson; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.*; +import org.onap.sdc.dcae.rule.editor.enums.OperatorTypeEnum; +import org.onap.sdc.dcae.rule.editor.enums.RuleEditorElementType; +import org.onap.sdc.dcae.rule.editor.utils.ValidationUtils; + +public class RuleTranslator implements IRuleElementTranslator<Rule> { + + private static RuleTranslator ruleTranslator = new RuleTranslator(); + + public static RuleTranslator getInstance() { + return ruleTranslator; + } + + private RuleTranslator() { + } + + private class ActionRuleTranslation extends RuleTranslation { + private ActionRuleTranslation(Rule rule) { + phase = rule.getPhase(); + filter = rule.isConditionalRule() ? getConditionTranslator(rule.getCondition()).translateToHpJson(rule.getCondition()) : null; + boolean asNewProcessor = true; + for (BaseAction action : rule.getActions()) { + // consecutive copy actions are aggregated into a single processor + asNewProcessor = getActionTranslator(action).addToHpJsonProcessors(action, processors, asNewProcessor); + } + } + } + + public Translation translateToHpJson(Rule rule) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Start translating rule {}", rule.getUid()); + Translation translation = new ActionRuleTranslation(rule); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Finished translation for rule {}. Result: {}", rule.getUid(), new Gson().toJson(translation)); + return translation; + } + + private IRuleElementTranslator getConditionTranslator(BaseCondition condition){ + return condition instanceof ConditionGroup ? ConditionGroupTranslator.getInstance() : + ValidationUtils.validateNotEmpty(OperatorTypeEnum.getTypeByName(((Condition)condition).getOperator()).getModifiedType()) ? FieldConditionTranslator.getInstance() : ConditionTranslator.getInstance(); + } + + private CopyActionTranslator getActionTranslator(BaseAction action) { + ActionTypeEnum type = ActionTypeEnum.getTypeByName(action.getActionType()); + if(ActionTypeEnum.COPY == type && ValidationUtils.validateNotEmpty(action.getRegexValue())) + return RegexActionTranslator.getInstance(); + return (CopyActionTranslator)RuleEditorElementType.getElementTypeByName(type.getType()).getTranslator(); + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/utils/EmptyStringTranslationSerializer.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/utils/EmptyStringTranslationSerializer.java new file mode 100644 index 0000000..c65076f --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/utils/EmptyStringTranslationSerializer.java @@ -0,0 +1,14 @@ +package org.onap.sdc.dcae.rule.editor.utils; + +import com.google.gson.*; + +import java.lang.reflect.Type; + +public class EmptyStringTranslationSerializer implements JsonSerializer<String> { + + public JsonElement serialize(String src, Type typeOfSrc, JsonSerializationContext context) { + if("\"\"".equals(src)) + return new JsonPrimitive(""); + return new JsonPrimitive(src); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/utils/RulesPayloadUtils.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/utils/RulesPayloadUtils.java new file mode 100644 index 0000000..33f9e92 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/utils/RulesPayloadUtils.java @@ -0,0 +1,38 @@ +package org.onap.sdc.dcae.rule.editor.utils; + +import java.util.List; + +import org.onap.sdc.dcae.composition.restmodels.sdc.Artifact; +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.*; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonParseException; + +public class RulesPayloadUtils { + private static Gson gson = new GsonBuilder().serializeNulls() + .registerTypeAdapter(BaseAction.class, new ActionDeserializer()) + .registerTypeAdapter(BaseCondition.class, new ConditionDeserializer()).create(); + + public static Rule parsePayloadToRule(String payload) throws JsonParseException { + return gson.fromJson(payload, Rule.class); + } + + public static MappingRules parseMappingRulesArtifactPayload(String payload) throws JsonParseException { + return gson.fromJson(payload, MappingRules.class); + } + + public static SchemaInfo extractInfoFromDescription(Artifact rulesArtifact) { + try { + return gson.fromJson(rulesArtifact.getArtifactDescription(), SchemaInfo.class); + }catch (JsonParseException e) { + return null; + } + } + + public static String buildSchemaAndRulesResponse(String payload, List<EventTypeDefinitionUI> schema) { + return "{\"schema\":"+gson.toJson(schema)+","+payload.replaceFirst("\\{", ""); + } + + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/utils/ValidationUtils.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/utils/ValidationUtils.java new file mode 100644 index 0000000..7a3b206 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/utils/ValidationUtils.java @@ -0,0 +1,20 @@ +package org.onap.sdc.dcae.rule.editor.utils; + +import org.apache.commons.lang3.StringUtils; + + +public class ValidationUtils { + + private static final String EXPLICIT_EMPTY = "\"\""; + + public static boolean validateNotEmpty(String value){ + return StringUtils.isNoneBlank(value); + } + + public static boolean validateTargetField(String value) { + return validateNotEmpty(value) && !EXPLICIT_EMPTY.equals(value); + } + + + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ActionValidator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ActionValidator.java new file mode 100644 index 0000000..3eb0eb5 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ActionValidator.java @@ -0,0 +1,40 @@ +package org.onap.sdc.dcae.rule.editor.validators; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.BaseAction; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.onap.sdc.dcae.rule.editor.utils.ValidationUtils; + +import java.util.List; + +public class ActionValidator<A extends BaseAction> implements IRuleElementValidator<A> { + + private static ActionValidator actionValidator = new ActionValidator(); + + public static ActionValidator getInstance() { + return actionValidator; + } + + ActionValidator(){} + + public boolean validate(A action, List<ResponseFormat> errors) { + + // validate from is populated + boolean valid = validateFromValue(action, errors); + //validate target is populated + if (!ValidationUtils.validateTargetField(action.getTarget())) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_ACTION_FIELD, null, "target", action.getActionType(), action.getTarget())); + } + return valid; + } + + protected boolean validateFromValue(A action, List<ResponseFormat> errors) { + if(!ValidationUtils.validateNotEmpty(action.getFromValue())) { + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_ACTION_FIELD, null, "from", action.getActionType(), action.getTarget())); + return false; + } + return true; + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ConcatActionValidator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ConcatActionValidator.java new file mode 100644 index 0000000..965c898 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ConcatActionValidator.java @@ -0,0 +1,29 @@ +package org.onap.sdc.dcae.rule.editor.validators; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.BaseAction; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.onap.sdc.dcae.rule.editor.utils.ValidationUtils; + +import java.util.List; + +public class ConcatActionValidator extends ActionValidator<BaseAction> { + + private static ConcatActionValidator concatActionValidator = new ConcatActionValidator(); + + public static ConcatActionValidator getInstance() { + return concatActionValidator; + } + + private ConcatActionValidator(){} + + @Override + protected boolean validateFromValue(BaseAction action, List<ResponseFormat> errors) { + if(!ValidationUtils.validateNotEmpty(action.getFromValue()) || 2 > action.getFromValues().size()) { + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_CONCAT_VALUE, null, action.getTarget())); + return false; + } + return true; + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ConditionGroupValidator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ConditionGroupValidator.java new file mode 100644 index 0000000..995a817 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ConditionGroupValidator.java @@ -0,0 +1,40 @@ +package org.onap.sdc.dcae.rule.editor.validators; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.ConditionGroup; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.onap.sdc.dcae.rule.editor.enums.ConditionTypeEnum; +import org.onap.sdc.dcae.rule.editor.enums.RuleEditorElementType; +import org.onap.sdc.dcae.rule.editor.utils.ValidationUtils; +import org.springframework.util.CollectionUtils; + +import java.util.List; + +public class ConditionGroupValidator implements IRuleElementValidator<ConditionGroup> { + + private static ConditionGroupValidator conditionGroupValidator = new ConditionGroupValidator(); + + public static ConditionGroupValidator getInstance() { + return conditionGroupValidator; + } + + private ConditionGroupValidator(){} + + public boolean validate(ConditionGroup condition, List<ResponseFormat> errors) { + boolean valid = true; + if(!ValidationUtils.validateNotEmpty(condition.getType()) || null == ConditionTypeEnum.getTypeByName(condition.getType())) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.INVALID_GROUP_CONDITION, null, condition.getType())); + } + if(CollectionUtils.isEmpty(condition.getChildren()) || 2 > condition.getChildren().size()) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_CONDITION_ITEM, null, null)); + } else { + valid = condition.getChildren().stream() + .map(c -> RuleEditorElementType.getElementTypeByName(c.getClass().getSimpleName()).getValidator().validate(c, errors)) + .reduce(true, (x,y) -> x && y) && valid; + } + return valid; + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ConditionValidator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ConditionValidator.java new file mode 100644 index 0000000..1b4ae94 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/ConditionValidator.java @@ -0,0 +1,40 @@ +package org.onap.sdc.dcae.rule.editor.validators; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.Condition; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.onap.sdc.dcae.rule.editor.enums.OperatorTypeEnum; +import org.onap.sdc.dcae.rule.editor.utils.ValidationUtils; +import org.springframework.util.CollectionUtils; + +import java.util.List; + +public class ConditionValidator implements IRuleElementValidator<Condition> { + + private static ConditionValidator conditionValidator = new ConditionValidator(); + + public static ConditionValidator getInstance() { + return conditionValidator; + } + + private ConditionValidator(){} + + public boolean validate(Condition condition, List<ResponseFormat> errors) { + boolean valid = true; + if(!ValidationUtils.validateNotEmpty(condition.getLeft())) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_OPERAND, null, "left")); + } + if(CollectionUtils.isEmpty(condition.getRight())) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_OPERAND, null, "right")); + } + if(!ValidationUtils.validateNotEmpty(condition.getOperator()) || null == OperatorTypeEnum.getTypeByName(condition.getOperator())) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.INVALID_OPERATOR, null, condition.getOperator())); + } + return valid; + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/DateFormatterValidator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/DateFormatterValidator.java new file mode 100644 index 0000000..d5ec0fc --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/DateFormatterValidator.java @@ -0,0 +1,41 @@ +package org.onap.sdc.dcae.rule.editor.validators; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.DateFormatterAction; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.onap.sdc.dcae.rule.editor.utils.ValidationUtils; + +import java.util.List; + +public class DateFormatterValidator extends ActionValidator<DateFormatterAction> { + private static DateFormatterValidator dateFormatterValidator = new DateFormatterValidator(); + + public static DateFormatterValidator getInstance() { + return dateFormatterValidator; + } + + private DateFormatterValidator(){} + + @Override + public boolean validate(DateFormatterAction action, List<ResponseFormat> errors) { + boolean valid = super.validate(action, errors); + if(!ValidationUtils.validateNotEmpty(action.getFromFormat())){ + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_ACTION_FIELD, null, "from format", action.getActionType(), action.getTarget())); + } + if(!ValidationUtils.validateNotEmpty(action.getFromTz())){ + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_ACTION_FIELD, null, "from timezone", action.getActionType(), action.getTarget())); + } + if(!ValidationUtils.validateNotEmpty(action.getToFormat())){ + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_ACTION_FIELD, null, "to format", action.getActionType(), action.getTarget())); + } + if(!ValidationUtils.validateNotEmpty(action.getToTz())){ + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_ACTION_FIELD, null, "to timezone", action.getActionType(), action.getTarget())); + } + return valid; + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/IRuleElementValidator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/IRuleElementValidator.java new file mode 100644 index 0000000..dd1eaf4 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/IRuleElementValidator.java @@ -0,0 +1,9 @@ +package org.onap.sdc.dcae.rule.editor.validators; + +import org.onap.sdc.dcae.errormng.ResponseFormat; + +import java.util.List; + +public interface IRuleElementValidator <T> { + boolean validate(T element, List<ResponseFormat> errors); +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/MapActionValidator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/MapActionValidator.java new file mode 100644 index 0000000..8cbcaa8 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/MapActionValidator.java @@ -0,0 +1,49 @@ +package org.onap.sdc.dcae.rule.editor.validators; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.MapAction; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.onap.sdc.dcae.rule.editor.utils.ValidationUtils; +import org.springframework.util.CollectionUtils; + +import java.util.List; + +public class MapActionValidator extends ActionValidator<MapAction> { + + private static MapActionValidator mapActionValidator = new MapActionValidator(); + + public static MapActionValidator getInstance() { + return mapActionValidator; + } + + private MapActionValidator(){} + + @Override + public boolean validate(MapAction action, List<ResponseFormat> errors) { + boolean valid = super.validate(action, errors); + if (action.getMap() == null || CollectionUtils.isEmpty(action.getMapValues())) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_ENTRY, null, action.getTarget())); + } else { + if (action.mapHasDefault() && !ValidationUtils.validateNotEmpty(action.getMapDefaultValue())) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_DEFAULT_VALUE, null, action.getTarget())); + } + try { + if (!validateMapValues(action)) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_ENTRY, null, action.getTarget())); + } + } catch (IllegalStateException err) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.DUPLICATE_KEY, null)); + } + } + return valid; + } + + private boolean validateMapValues(MapAction action) { + return action.transformToMap().entrySet().stream().noneMatch(p -> !ValidationUtils.validateNotEmpty(p.getKey()) || !ValidationUtils.validateNotEmpty(p.getValue())); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/RuleValidator.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/RuleValidator.java new file mode 100644 index 0000000..371d1e9 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/rule/editor/validators/RuleValidator.java @@ -0,0 +1,56 @@ +package org.onap.sdc.dcae.rule.editor.validators; + +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.ActionTypeEnum; +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.BaseAction; +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.BaseCondition; +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.Rule; +import org.onap.sdc.dcae.errormng.ActionStatus; +import org.onap.sdc.dcae.errormng.ErrConfMgr; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.onap.sdc.dcae.errormng.ServiceException; +import org.onap.sdc.dcae.rule.editor.enums.RuleEditorElementType; +import org.onap.sdc.dcae.rule.editor.utils.ValidationUtils; +import org.springframework.util.CollectionUtils; + +import java.util.List; + +public class RuleValidator implements IRuleElementValidator<Rule> { + + private static RuleValidator ruleValidator = new RuleValidator(); + + public static RuleValidator getInstance() { + return ruleValidator; + } + + private RuleValidator(){} + + + public boolean validate(Rule rule, List<ResponseFormat> errors) { + boolean valid = true; + if(rule.isConditionalRule()) + valid = getConditionValidator(rule.getCondition()).validate(rule.getCondition(), errors); + if(!ValidationUtils.validateNotEmpty(rule.getDescription())) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_RULE_DESCRIPTION, null, null)); + } + if(CollectionUtils.isEmpty(rule.getActions())) { + valid = false; + errors.add(ErrConfMgr.INSTANCE.getResponseFormat(ActionStatus.MISSING_ACTION, null, null)); + } else { + valid = rule.getActions().stream() + .map(a -> getActionValidator(a).validate(a, errors)) + .reduce(true, (x,y) -> x && y) && valid; + } + return valid; + } + + + private IRuleElementValidator getActionValidator(BaseAction action) { + ActionTypeEnum type = ActionTypeEnum.getTypeByName(action.getActionType()); + return RuleEditorElementType.getElementTypeByName(type.getType()).getValidator(); + } + + private IRuleElementValidator getConditionValidator(BaseCondition condition) { + return RuleEditorElementType.getElementTypeByName(condition.getClass().getSimpleName()).getValidator(); + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/EventListenerDefinition.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/EventListenerDefinition.java new file mode 100644 index 0000000..cc5bec1 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/EventListenerDefinition.java @@ -0,0 +1,101 @@ +package org.onap.sdc.dcae.ves; + +import com.google.gson.Gson; +import org.apache.commons.lang.StringUtils; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +public class EventListenerDefinition extends VesDataTypeDefinition { + + public static final String EVENT_ROOT = "event"; + private String $schema; + private Map<String, VesDataTypeDefinition> definitions; + + public String get$schema() { + return $schema; + } + + public void set$schema(String $schema) { + this.$schema = $schema; + } + + public Map<String, VesDataTypeDefinition> getDefinitions() { + return definitions; + } + + public void setDefinitions(Map<String, VesDataTypeDefinition> definitions) { + this.definitions = definitions; + } + + // returns error message detailing unresolvable types - or null (success) + public String resolveRefTypes() { + + Predicate<Map.Entry<String, VesDataTypeDefinition>> isFullyResolved = dt -> !dt.getValue().containsAnyReferenceItem(); + Map<String, VesDataTypeDefinition> resolved = definitions.entrySet().stream() + .filter(isFullyResolved) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + int initialUnresolvedItems = -1; + int remainingUnresolvedItems = 0; + + while (resolved.size() != definitions.size() && initialUnresolvedItems != remainingUnresolvedItems) { + initialUnresolvedItems = definitions.size() - resolved.size(); + definitions.entrySet().forEach(definition -> { + if (!resolved.containsKey(definition.getKey()) && definition.getValue().isResolvable(resolved)) { + definition.getValue().resolveAllReferences(resolved); + resolved.put(definition.getKey(), definition.getValue()); + } + }); + remainingUnresolvedItems = definitions.size() - resolved.size(); + } + + if (resolved.size() != definitions.size()) { + definitions.keySet().removeAll(resolved.keySet()); + return constructErrorMessage(definitions.keySet()); + } + return resolveRootRefTypes(); + + } + + private String constructErrorMessage(Set<String> unresolvable) { + return "the following definitions containing unresolvable references: " + new Gson().toJson(unresolvable); + } + + private String resolveRootRefTypes() { + Set<String> unresolvable = new HashSet<>(); + getProperties().forEach((k, v) -> { + if (isResolvable(definitions)) + resolveAllReferences(definitions); + else + unresolvable.add(k); + }); + return unresolvable.isEmpty() ? null : constructErrorMessage(unresolvable); + + } + + @Override + public String validate() { + String error = getProperties().containsKey(EVENT_ROOT) ? null : "schema not containing property: event"; + if (StringUtils.isBlank(error)) + error = super.validate(); + if (StringUtils.isBlank(error)) + error = validateDefinitions(); + return error; + } + + private String validateDefinitions() { + String error = null; + for (VesDataTypeDefinition def : definitions.values()) { + if (StringUtils.isBlank(error)) + error = def.validate(); + else + break; + } + return error; + } + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesDataItemsDefinition.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesDataItemsDefinition.java new file mode 100644 index 0000000..ad1b2f9 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesDataItemsDefinition.java @@ -0,0 +1,9 @@ +package org.onap.sdc.dcae.ves; + +import java.util.ArrayList; + +// json 'items' value can be either a single object or an array. customized POJO will always be an array +public class VesDataItemsDefinition extends ArrayList<VesDataTypeDefinition> { + + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesDataTypeDefinition.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesDataTypeDefinition.java new file mode 100644 index 0000000..5465d62 --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesDataTypeDefinition.java @@ -0,0 +1,270 @@ +package org.onap.sdc.dcae.ves; + +import com.google.gson.JsonElement; +import com.google.gson.annotations.SerializedName; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang.StringUtils; + +import java.util.*; + +public class VesDataTypeDefinition { + + private static final String jsonReferencePrefix = "#/definitions/"; + private String type; + private String description; + private String format; + private String title; + private Map<String, VesDataTypeDefinition> properties; + private List<String> required = new ArrayList<>(); + @SerializedName("enum") + private List<String> enums; + @SerializedName("default") + private JsonElement defaultValue; + private VesDataItemsDefinition items; + @SerializedName("$ref") + private String ref; + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getFormat() { + return format; + } + + public void setFormat(String format) { + this.format = format; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public Map<String, VesDataTypeDefinition> getProperties() { + return properties; + } + + public void setProperties(Map<String, VesDataTypeDefinition> properties) { + this.properties = properties; + } + + public List<String> getRequired() { + return required; + } + + public void setRequired(List<String> required) { + this.required = required; + } + + public List<String> getEnums() { + return enums; + } + + public void setEnums(List<String> enums) { + this.enums = enums; + } + + public JsonElement getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(JsonElement defaultValue) { + this.defaultValue = defaultValue; + } + + public VesDataItemsDefinition getItems() { + return items; + } + + public void setItems(VesDataItemsDefinition items) { + this.items = items; + } + + public String getRef() { + return ref; + } + + public void setRef(String ref) { + this.ref = ref; + } + + protected boolean hasReference() { + return StringUtils.isNotBlank(getRef()); + } + + protected boolean itemsContainReference() { + return CollectionUtils.isNotEmpty(getItems()) && getItems().stream().anyMatch(VesDataTypeDefinition::containsAnyReferenceItem); + } + + protected boolean propertiesContainReference() { + return MapUtils.isNotEmpty(getProperties()) && getProperties().values().stream().anyMatch(VesDataTypeDefinition::containsAnyReferenceItem); + } + + protected boolean containsAnyReferenceItem() { + return hasReference() || itemsContainReference() || propertiesContainReference(); + } + + protected String getJsonRefPointer() { + return getRef().replace(jsonReferencePrefix, ""); + } + + private void addReferenceItem(Set<String> allRefs) { + if (hasReference()) { + allRefs.add(getJsonRefPointer()); + } + } + + private Set<String> extractAllReferenceTokens() { + Set<String> allRefs = new HashSet<>(); + extractReferenceTokens(allRefs); + return allRefs; + } + + private void extractReferenceTokens(Set<String> allRefs) { + + addReferenceItem(allRefs); + if (itemsContainReference()) { + getItems().forEach(item -> item.extractReferenceTokens(allRefs)); + } + if (propertiesContainReference()) { + getProperties().values().forEach(property -> property.extractReferenceTokens(allRefs)); + } + } + + protected boolean isResolvable(Map<String, VesDataTypeDefinition> resolvedTypes) { + return resolvedTypes.keySet().containsAll(extractAllReferenceTokens()); + } + + private void resolveReference(Map<String, VesDataTypeDefinition> resolvedTypes) { + if (hasReference()) { + VesDataTypeDefinition other = resolvedTypes.get(getJsonRefPointer()); + setType(other.getType()); + setRef(other.getRef()); + setDefaultValue(other.getDefaultValue()); + setDescription(other.getDescription()); + setEnums(other.getEnums()); + setProperties(other.getProperties()); + setFormat(other.getFormat()); + setRequired(other.getRequired()); + setItems(other.getItems()); + setTitle(other.getTitle()); + } + } + + private void resolveItemReferences(Map<String, VesDataTypeDefinition> resolvedTypes) { + if (itemsContainReference()) { + for (VesDataTypeDefinition item : getItems()) { + item.resolveAllReferences(resolvedTypes); + } + } + } + + private void resolvePropertyReferences(Map<String, VesDataTypeDefinition> resolvedTypes) { + if (propertiesContainReference()) { + for (VesDataTypeDefinition property : getProperties().values()) { + property.resolveAllReferences(resolvedTypes); + } + } + } + + // the reference resolver is called on each VesDataTypeDefinition after it passes the 'isResolvable' validation, affirming that all its references(direct/properties/items) point to a resolved VesDataTypeDefinition (has no references) + protected void resolveAllReferences(Map<String, VesDataTypeDefinition> resolvedTypes) { + resolveReference(resolvedTypes); + resolveItemReferences(resolvedTypes); + resolvePropertyReferences(resolvedTypes); + } + + private String validateType() { + return null == type? null : VesSimpleTypesEnum.getSimpleTypes().contains(type) ? null : "invalid type declaration: " + type; + } + + private String validateRequired() { + String invalid = null == type? null : !type.equals(VesSimpleTypesEnum.OBJECT.getType()) ? null : required.stream().filter(r -> !properties.keySet().contains(r)).findAny().orElse(null); + return StringUtils.isBlank(invalid) ? invalid : "invalid required entry: " + invalid; + } + + // returns error message detailing invalid 'type' or 'required' fields (null for success) + protected String validate() { + String error = validateType(); + if (StringUtils.isBlank(error)) + error = validateRequired(); + if (StringUtils.isBlank(error) && CollectionUtils.isNotEmpty(items)) + error = validateItems(); + if(StringUtils.isBlank(error) && MapUtils.isNotEmpty(properties)) + error = validateProperties(); + return error; + } + + private String validateItems(){ + String error = null; + for (VesDataTypeDefinition def : items) { + if (StringUtils.isBlank(error)) + error = def.validate(); + else + break; + } + return error; + } + + private String validateProperties(){ + String error = null; + for (VesDataTypeDefinition def : properties.values()) { + if (StringUtils.isBlank(error)) + error = def.validate(); + else + break; + } + return error; + } + + + @Override + public boolean equals(Object obj) { + if (obj == this) + return true; + if (null == obj || getClass() != obj.getClass()) + return false; + VesDataTypeDefinition other = (VesDataTypeDefinition) obj; + return Objects.equals(type, other.type) && + Objects.equals(description, other.description) && + Objects.equals(format, other.format) && + Objects.equals(title, other.title) && + Objects.equals(required, other.required) && + Objects.equals(enums, other.enums) && + Objects.equals(defaultValue, other.defaultValue) && + Objects.equals(items, other.items) && + Objects.equals(properties, other.properties) && + Objects.equals(ref, other.ref); + } + + @Override public int hashCode() { + int result = type != null ? type.hashCode() : 0; + result = 31 * result + (description != null ? description.hashCode() : 0); + result = 31 * result + (format != null ? format.hashCode() : 0); + result = 31 * result + (title != null ? title.hashCode() : 0); + result = 31 * result + (properties != null ? properties.hashCode() : 0); + result = 31 * result + (required != null ? required.hashCode() : 0); + result = 31 * result + (enums != null ? enums.hashCode() : 0); + result = 31 * result + (defaultValue != null ? defaultValue.hashCode() : 0); + result = 31 * result + (items != null ? items.hashCode() : 0); + result = 31 * result + (ref != null ? ref.hashCode() : 0); + return result; + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesJsonDeserializer.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesJsonDeserializer.java new file mode 100644 index 0000000..f5cfd2a --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesJsonDeserializer.java @@ -0,0 +1,20 @@ +package org.onap.sdc.dcae.ves; + +import com.google.gson.*; + +import java.lang.reflect.Type; + +// json 'items' value can be either a single object or an array. customized POJO will always be an array +public class VesJsonDeserializer implements JsonDeserializer<VesDataItemsDefinition> { + @Override + public VesDataItemsDefinition deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { + + if(json instanceof JsonArray){ + return new Gson().fromJson(json, VesDataItemsDefinition.class); + } + + VesDataItemsDefinition items = new VesDataItemsDefinition(); + items.add(new Gson().fromJson(json, VesDataTypeDefinition.class)); + return items; + } +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesSimpleTypesEnum.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesSimpleTypesEnum.java new file mode 100644 index 0000000..0606a6d --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesSimpleTypesEnum.java @@ -0,0 +1,26 @@ +package org.onap.sdc.dcae.ves; + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +public enum VesSimpleTypesEnum { + + ARRAY("array"), BOOLEAN("boolean"), INTEGER("integer"), NULL("null"), NUMBER("number"), OBJECT("object"), STRING("string"); + + private String type; + + public String getType() { + return type; + } + + private VesSimpleTypesEnum(String type) { + this.type = type; + } + + public static Set<String> getSimpleTypes() { + return Arrays.stream(VesSimpleTypesEnum.values()).map(t -> t.getType()).collect(Collectors.toSet()); + } + + +} diff --git a/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesStructureLoader.java b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesStructureLoader.java new file mode 100644 index 0000000..cb8a16c --- /dev/null +++ b/dcaedt_be/src/main/java/org/onap/sdc/dcae/ves/VesStructureLoader.java @@ -0,0 +1,115 @@ +package org.onap.sdc.dcae.ves; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonIOException; +import com.google.gson.JsonSyntaxException; +import com.google.gson.reflect.TypeToken; +import org.apache.commons.lang.ArrayUtils; +import org.apache.commons.lang.StringUtils; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.springframework.stereotype.Service; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import java.io.*; +import java.lang.reflect.Type; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +@Service("vesstructureloader") +public class VesStructureLoader { + + + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private static Map<String, EventListenerDefinition> eventListeners = new HashMap<>(); + private static final Type type = new TypeToken<VesDataItemsDefinition>(){}.getType(); + private static final Gson gson = new GsonBuilder().registerTypeAdapter(type, new VesJsonDeserializer()).create(); + private static final String SCHEMA_NAME_PREFIX = "CommonEventFormat_v"; + private static final String SCHEMA_NAME_SUFFIX = ".json"; + + private VesStructureLoader() { + } + + @PostConstruct public void init() { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "VesStructureLoader: Trying to load json schemas"); + String jettyBase = System.getProperty("jetty.base"); + if (jettyBase == null) { + String msg = "Couldn't resolve jetty.base environmental variable"; + errLogger.log(LogLevel.ERROR, this.getClass().getName(), msg); + throw new IllegalArgumentException(msg + ". Failed to load VES schema files... aborting"); + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "jetty.base={}", jettyBase); + + File dir = new File(jettyBase + "/config/dcae-be/ves-schema"); + File[] files = dir.listFiles((dir1, name) -> name.startsWith(SCHEMA_NAME_PREFIX) && name.endsWith(SCHEMA_NAME_SUFFIX)); + + if (ArrayUtils.isEmpty(files)) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Error – Failed to find VES Schema definitions."); + } else { + + for (File f : files) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Loading VES schema file: {}", f.getName()); + parseJsonFileAndSaveToMap(f); + } + } + + } + + private void parseJsonFileAndSaveToMap(File file) { + + try { + EventListenerDefinition eventListener = gson.fromJson(new FileReader(file), EventListenerDefinition.class); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), gson.toJson(eventListener)); + String validationError = getValidatorMessage(eventListener); + if (StringUtils.isEmpty(validationError)) { + eventListeners.put(getVersionFromFileName(file.getName()), eventListener); + } else { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Error: Failed to parse VES schema file {}. [{}]", file.getName(), validationError); + } + } catch (FileNotFoundException | JsonIOException | JsonSyntaxException e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "Error: Failed to parse VES schema file {}. [{}]", file.getName(), e); + } + } + + public static Map<String, VesDataTypeDefinition> getEventListenerDefinitionByVersion(String version) { + return eventListeners.get(version).getProperties().get(EventListenerDefinition.EVENT_ROOT).getProperties(); + } + + public static Set<String> getAvailableVersionsList() { + return eventListeners.keySet(); + } + + public static Map<String, Set<String>> getAvailableVersionsAndEventTypes() { + return eventListeners.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> getEventListenerDefinitionByVersion(e.getKey()).keySet())); + } + + public static Set<String> getEventTypeListByVersion(String version) { + return getEventListenerDefinitionByVersion(version).keySet(); + } + + private String getValidatorMessage(EventListenerDefinition eventListenerDefinition) { + String validationError = eventListenerDefinition.validate(); + if (StringUtils.isBlank(validationError)) { + validationError = eventListenerDefinition.resolveRefTypes(); + } + return validationError; + } + + private String getVersionFromFileName(String fileName) { + return fileName.replace(SCHEMA_NAME_PREFIX, "").replace(SCHEMA_NAME_SUFFIX, ""); + } + + @PreDestroy + public void preDestroy() { + // why is this method empty? + } +} diff --git a/dcaedt_be/src/main/resources/logback-spring.xml b/dcaedt_be/src/main/resources/logback-spring.xml new file mode 100644 index 0000000..62dc9c0 --- /dev/null +++ b/dcaedt_be/src/main/resources/logback-spring.xml @@ -0,0 +1,181 @@ +<!-- Copyright (c) 2016 AT&T Intellectual Property. All rights reserved. --> +<configuration scan="true" scanPeriod="3 seconds"> + <include resource="org/springframework/boot/logging/logback/base.xml"/> + <!--<jmxConfigurator /> --> + <!-- specify the component name --> + <property name="componentName" value="DCAE"></property> + <!-- log file names --> + <property name="combinedLogName" value="all" /> + <property name="generalLogName" value="application" /> + <property name="errorLogName" value="error" /> + <property name="metricsLogName" value="metrics" /> + <property name="auditLogName" value="audit" /> + <property name="debugLogName" value="debug" /> + <property name="defaultPattern" value="%date{ISO8601,UTC}|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{VirtualServerName}|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{ClassName}|%X{Timer}| %msg%n" /> + <property name="auditLoggerPattern" value="%X{BeginTimestamp}|%X{EndTimestamp}|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{VirtualServerName}|%X{ServiceName}|%X{PartnerName}|%X{StatusCode}|%X{ResponseCode}|%X{ResponseDescription}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ElapsedTime}|%X{ServerFQDN}|%X{RemoteHost}|%X{ClassName}|%X{Unused}|%X{ProcessKey}|%X{CustomField1}|%X{CustomField2}|%X{CustomField3}|%X{CustomField4}| %msg%n" /> + <property name="metricsLoggerPattern" value="%X{BeginTimestamp}|%X{EndTimestamp}|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{VirtualServerName}|%X{ServiceName}|%X{PartnerName}|%X{TargetEntity}|%X{TargetServiceName}|%X{StatusCode}|%X{ResponseCode}|%X{ResponseDescription}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ElapsedTime}|%X{ServerFQDN}|%X{RemoteHost}|%X{ClassName}|%X{Unused}|%X{ProcessKey}|%X{TargetVirtualEntity}|%X{CustomField1}|%X{CustomField2}|%X{CustomField3}|%X{CustomField4}| %msg%n" /> + <property name="errorLoggerPattern" value="%date{ISO8601,UTC}|%X{RequestId}|%thread|%X{ServiceName}|%X{PartnerName}|%X{TargetEntity}|%X{TargetServiceName}|%.-5level|%X{ErrorCode}|%X{ErrorDescription}| %msg%n" /> + <property name="debugLoggerPattern" value="%date{ISO8601,UTC}|%X{RequestId}| %msg%n" /> + <property name="logDirectory" value="${log.home}/${componentName}" /> + <!-- ============================================================================ --> + <!-- EELF Appenders --> + <!-- ============================================================================ --> + <!-- The EELFAppender is used to record events to the general application + log --> + <appender name="EELF" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${generalLogName}.log</file> + <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${generalLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${defaultPattern}</pattern> + </encoder> + </appender> + <appender name="asyncEELF" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELF" /> + </appender> + + <appender name="ALL" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${combinedLogName}.log</file> + <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${combinedLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${defaultPattern}</pattern> + </encoder> + </appender> + <appender name="asyncALL" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="ALL" /> + </appender> + <!-- EELF Audit Appender. This appender is used to record audit engine + related logging events. The audit logger and appender are specializations + of the EELF application root logger and appender. This can be used to segregate + Policy engine events from other components, or it can be eliminated to record + these events as part of the application root log. --> + <!-- Audit log --> + <appender name="EELFAudit" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${auditLogName}.log</file> + <!-- Audit messages filter - accept audit messages --> + <filter class="ch.qos.logback.core.filter.EvaluatorFilter"> + <evaluator class="ch.qos.logback.classic.boolex.OnMarkerEvaluator"> + <marker>AUDIT_MARKER</marker> + <marker>AUDIT</marker> + </evaluator> + <onMismatch>DENY</onMismatch> + <onMatch>ACCEPT</onMatch> + </filter> + <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${auditLogName}.%i.log.zip</fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>10</maxIndex> + </rollingPolicy> + <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>20MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${auditLoggerPattern}</pattern> + </encoder> + </appender> + <appender name="asyncEELFAudit" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFAudit" /> + </appender> + <appender name="EELFMetrics" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${metricsLogName}.log</file> + <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${metricsLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <!-- <pattern>"%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - + %msg%n"</pattern> --> + <pattern>${metricsLoggerPattern}</pattern> + </encoder> + </appender> + <appender name="asyncEELFMetrics" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFMetrics"/> + </appender> + <appender name="EELFError" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${errorLogName}.log</file> + <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${errorLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${errorLoggerPattern}</pattern> + </encoder> + </appender> + <appender name="asyncEELFError" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFError"/> + </appender> + <appender name="EELFDebug" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${debugLogName}.log</file> + <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${debugLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${debugLoggerPattern}</pattern> + </encoder> + </appender> + <appender name="asyncEELFDebug" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFDebug" /> + <includeCallerData>true</includeCallerData> + </appender> + <!-- ============================================================================ --> + <!-- EELF loggers --> + <!-- ============================================================================ --> + <logger name="com.onap.eelf" level="info" additivity="true"> + <appender-ref ref="asyncEELF" /> + </logger> + <logger name="org.onap.sdc.common.onaplog" level="all" additivity="true"> + <appender-ref ref="asyncALL" /> + </logger> + <logger name="org.onap.sdc.common.onaplog.OnapLoggerAudit" level="info" additivity="true"> + <appender-ref ref="asyncEELFAudit" /> + </logger> + <logger name="org.onap.sdc.common.onaplog.OnapLoggerMetric" level="info" additivity="true"> + <appender-ref ref="asyncEELFMetrics" /> + </logger> + <logger name="org.onap.sdc.common.onaplog.OnapLoggerError" level="error" additivity="true"> + <appender-ref ref="asyncEELFError" /> + </logger> + <logger name="org.onap.sdc.common.onaplog.OnapLoggerDebug" level="debug" additivity="true"> + <appender-ref ref="asyncEELFDebug" /> + </logger> + <root level="INFO"> + <appender-ref ref="asyncEELF" /> + </root> +</configuration> diff --git a/dcaedt_be/src/main/resources/schema.sql b/dcaedt_be/src/main/resources/schema.sql new file mode 100644 index 0000000..487b5e5 --- /dev/null +++ b/dcaedt_be/src/main/resources/schema.sql @@ -0,0 +1,4 @@ +--DROP TABLE composition_status; +CREATE TABLE IF NOT EXISTS composition_status (id varchar(20) NOT NULL, revision varchar(20) DEFAULT 0, name blob(1024), is_changed varchar(2), PRIMARY KEY (id, revision)); +CREATE TABLE IF NOT EXISTS test (id varchar(20) NOT NULL, name varchar(20), is_changed varchar(2), PRIMARY KEY (id)); + diff --git a/dcaedt_be/src/main/webapp/META-INF/MANIFEST.MF b/dcaedt_be/src/main/webapp/META-INF/MANIFEST.MF new file mode 100644 index 0000000..67a9713 --- /dev/null +++ b/dcaedt_be/src/main/webapp/META-INF/MANIFEST.MF @@ -0,0 +1,8 @@ +Manifest-Version: 1.0 +Implementation-Title: dcae-be +Implementation-Version: APPLICATION_VERSION +Implementation-Vendor-Id: org.onap.sdc.dcae +Build-Jdk: 1.7.0_45 +Created-By: Apache Maven 3.2.1 +Archiver-Version: Plexus Archiver +Specification-Version: 1707.0.0-SNAPSHOT diff --git a/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/application.properties b/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/application.properties new file mode 100644 index 0000000..d5333c4 --- /dev/null +++ b/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/application.properties @@ -0,0 +1,22 @@ +# Configure the server to run with SSL/TLS and using HTTPS +server.port=8446 +server.contextPath=/ +##ScheduleTimer +scheduled.timer.value=5000 +asdc_catalog_url=asdc +#uri=https://dcaeDesigner:Aa123456@zldcrdm2sdc2abe01.3f1a87.rdm2.tci.att.com:8443#demo +uri=https://dcaeDesigner:Aa123456@zldcrdm2sdc4cbe01.3f1a87.rdm2.tci.att.com:8443#demo +toscalab_url=http://localhost:8080/ +blueprinter.uri=${toscalab_url}translate +blueprinter.hcuri=${toscalab_url}healthcheck +blueprinter.hcretrynum=3 +asdc_rootPath=/sdc/v1/catalog/ +healthpoller.fixedDelay=15000 +compositionConfig.isRuleEditorActive=false +compositionConfig.flowTypes={"Syslog":{"entryPointPhaseName":"syslog_map","lastPhaseName":"map_publish"},\ + "SNMP":{"entryPointPhaseName":"snmp_map","lastPhaseName":"map_publish"},\ + "FOI":{"entryPointPhaseName":"foi_map","lastPhaseName":"map_publish"},\ + "Guest OS":{"entryPointPhaseName":"guest_os_map","lastPhaseName":"map_publish"},\ + "Status Poller":{"entryPointPhaseName":"status_poller_map","lastPhaseName":"map_publish"},\ + "SNMP Polling":{"entryPointPhaseName":"snmp_polling_map","lastPhaseName":"map_publish"},\ + "TCA Hi Lo":{"entryPointPhaseName":"tca_hi_lo_map","lastPhaseName":"map_publish"}} diff --git a/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/error-configuration.yaml b/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/error-configuration.yaml new file mode 100644 index 0000000..c60465c --- /dev/null +++ b/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/error-configuration.yaml @@ -0,0 +1,286 @@ +# DCAE-D Errors +errors: + OK: { + code: 200, + message: "OK", + messageId: "200" + } + CREATED: { + code: 201, + message: "OK", + messageId: "201" + } + NO_CONTENT: { + code: 204, + message: "No Content", + messageId: "204" + } +#--------POL4050----------------------------- + NOT_ALLOWED: { + code: 405, + message: "Error: Method not allowed.", + messageId: "POL4050" + } +#--------POL5000----------------------------- + GENERAL_ERROR: { + code: 500, + message: "Error: Internal Server Error. Please try again later.", + messageId: "POL5000" + } +#--------POL5500----------------------------- + CATALOG_NOT_AVAILABLE: { + code: 502, + message: "Error - SDC Catalog currently not available. Please try again later", + messageId: "POL5500" + } +#--------POL5501----------------------------- + AUTH_ERROR: { + code: 502, + message: "Error – connection to SDC catalog could not be authenticated. Please contact your administrator", + messageId: "POL5501" + } + +#---------SVC4000----------------------------- + INVALID_CONTENT: { + code: 400, + message: "Error: Invalid content.", + messageId: "SVC4000" + } + +#---------SVC6001----------------------------- + NOT_FOUND: { + code: 404, + message: "No Monitoring Templates were found.", + messageId: "SVC6001" + } +#---------SVC6005----------------------------- + CONFIGURATION_ERROR: { + code: 500, + message: "Error – Failed to find configuration.", + messageId: "SVC6005" + } +#---------SVC6006----------------------------- + VES_SCHEMA_NOT_FOUND: { + code: 500, + message: "Error – Failed to find VES Schema definitions.", + messageId: "SVC6006" + } +#---------SVC6007----------------------------- +# %1 - VES Schema filename +# %2 - error reason + VES_SCHEMA_INVALID: { + code: 500, + message: "Error – Failed to parse VES Schema file '%1'. [%2]", + messageId: "SVC6007" + } +#---------SVC6008----------------------------- + FLOW_TYPES_CONFIGURATION_ERROR: { + code: 500, + message: "Error – Failed to read flow type definitions.", + messageId: "SVC6008" + } +#---------SVC6010----------------------------- + CLONE_FAILED: { + code: 409, + message: "Error – could not import Monitoring Template.", + messageId: "SVC6010" + } +#---------SVC6020----------------------------- + EMPTY_SERVICE_LIST: { + code: 404, + message: "Service List is not available.", + messageId: "SVC6020" + } +#---------SVC6021----------------------------- + MONITORING_TEMPLATE_ATTACHMENT_ERROR: { + code: 409, + message: "Error – Monitoring Template is not available for editing.", + messageId: "SVC6021" + } + +#---------SVC6031----------------------------- +# %1 - Component Name + MISSING_TOSCA_FILE: { + code: 404, + message: "Error – Could not read component %1 details.", + messageId: "SVC6031" + } +#---------SVC6032----------------------------- +# %1 - Component Name + VALIDATE_TOSCA_ERROR: { + code: 500, + message: "Error – Component %1 details could not be parsed.", + messageId: "SVC6032" + } +#---------SVC6033----------------------------- + SUBMIT_BLUEPRINT_ERROR: { + code: 500, + message: "Error –Failed to submit blueprint.", + messageId: "SVC6033" + } +#---------SVC6034----------------------------- +# %1 -VFCMT Name + GENERATE_BLUEPRINT_ERROR: { + code: 500, + message: "Error – Failed to generate blueprint file for Monitoring Template %1.", + messageId: "SVC6034" + } +#---------SVC6035----------------------------- +# %1 - the error reason + INVALID_RULE_FORMAT: { + code: 400, + message: "Error - Rule format is invalid: %1.", + messageId: "SVC6035" + } +#---------SVC6036----------------------------- + SAVE_RULE_FAILED: { + code: 409, + message: "Error - Failed to save rule. Internal persistence error", + messageId: "SVC6036" + } + +#---------SVC6038----------------------------- +# %1 - VFCMT uuid + RESOURCE_NOT_VFCMT_ERROR: { + code: 400, + message: "Resource with uuid %1 is either not of resourceType='VFCMT' or not of category='Template'", + messageId: "SVC6038" + } +#---------SVC6039----------------------------- +# %1 - serviceUuid +# %2 - vfiName + VFI_FETCH_ERROR: { + code: 404, + message: "Service %1 does not contain a VFI named %2", + messageId: "SVC6039" + } +#-----------SVC6085--------------------------- +# %1 - current user id +# %2 - resource/service name +# %3 - last modifier user id + USER_CONFLICT: { + code: 403, + message: "Error: User conflict. Operation not allowed for user %1 on asset %2 checked out by %3.", + messageId: "SVC6085" + } + +#---------SVC6100----------------------------- + MISSING_RULE_DESCRIPTION: { + code: 400, + message: "Please enter a valid rule description.", + messageId: "SVC6100" + } +#---------SVC6101----------------------------- + MISSING_ACTION: { + code: 400, + message: "Rule must have at least one action.", + messageId: "SVC6101" + } + +#---------SVC6104----------------------------- +# %1 - action target + MISSING_CONCAT_VALUE: { + code: 400, + message: "Please fill all from fields of concat action to %1", + messageId: "SVC6104" + } +#---------SVC6105----------------------------- +# %1 - condition group type + INVALID_GROUP_CONDITION: { + code: 400, + message: "Undefined condition group type: %1", + messageId: "SVC6105" + } + +#---------SVC6106----------------------------- + MISSING_CONDITION_ITEM: { + code: 400, + message: "Please enter all condition items", + messageId: "SVC6106" + } +#---------SVC6107----------------------------- +# %1 - left/right + MISSING_OPERAND: { + code: 400, + message: "Please enter the %1 operand field", + messageId: "SVC6107" + } +#---------SVC6108----------------------------- +# %1 - operator + INVALID_OPERATOR: { + code: 400, + message: "Undefined operator: %1", + messageId: "SVC6108" + } +#---------SVC6109----------------------------- +# %1 - action target + MISSING_ENTRY: { + code: 400, + message: "Please fill all key-value pairs of map action to %1", + messageId: "SVC6109" + } +#---------SVC6110----------------------------- +# %1 - action target + MISSING_DEFAULT_VALUE: { + code: 400, + message: "Please fill the default value of map action to %1", + messageId: "SVC6110" + } +#---------SVC6111----------------------------- + DUPLICATE_KEY: { + code: 400, + message: "Error: Duplication in map keys exists, please modify rule configuration", + messageId: "SVC6111" + } +#---------SVC6112----------------------------- +# %1 - dependent actions + ACTION_DEPENDENCY: { + code: 400, + message: "A circular dependency was detected between actions. The following fields should be resolved: %1", + messageId: "SVC6112" + } +#---------SVC6113----------------------------- +# %1 - dependent rule ids +# %2 - dependent action targets + RULE_DEPENDENCY: { + code: 400, + message: "A circular dependency was detected between rules: %1 within fields: %2", + messageId: "SVC6113" + } +#---------SVC6114----------------------------- +# %1 - dcae component name + NODE_NOT_FOUND: { + code: 400, + message: "DCAE component %1 not found in composition", + messageId: "SVC6114" + } +#---------SVC6115----------------------------- + DELETE_RULE_FAILED: { + code: 409, + message: "Delete rule failed. Internal persistence error", + messageId: "SVC6115" + } +#---------SVC6116----------------------------- +# %1 - reason + TRANSLATE_FAILED: { + code: 400, + message: "Translation failed. Reason: %1", + messageId: "SVC6116" + } + +#---------SVC6117----------------------------- +# %1 - field name +# %2 - action type +# %3 - action target + MISSING_ACTION_FIELD: { + code: 400, + message: "Please fill the %1 field of %2 action to %3", + messageId: "SVC6117" + } + +#---------SVC6118----------------------------- + DELETE_BLUEPRINT_FAILED: { + code: 400, + message: "The request was partially successful. Removing the attached Blueprint from the service has failed. You must manually delete the artifact.", + messageId: "SVC6118" + }
\ No newline at end of file diff --git a/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/ves-schema/CommonEventFormat_v4.1.json b/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/ves-schema/CommonEventFormat_v4.1.json new file mode 100644 index 0000000..2f86c38 --- /dev/null +++ b/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/ves-schema/CommonEventFormat_v4.1.json @@ -0,0 +1,1165 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "definitions": { + "attCopyrightNotice": { + "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", + "type": "object", + "properties": { + "useAndRedistribution": { + "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", + "type": "string" + }, + "condition1": { + "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", + "type": "string" + }, + "condition2": { + "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", + "type": "string" + }, + "condition3": { + "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", + "type": "string" + }, + "condition4": { + "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", + "type": "string" + }, + "disclaimerLine1": { + "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", + "type": "string" + }, + "disclaimerLine2": { + "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", + "type": "string" + }, + "disclaimerLine3": { + "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", + "type": "string" + }, + "disclaimerLine4": { + "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + "type": "string" + } + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { "type": "string" }, + "numberInUse": { "type": "number" } + }, + "required": [ "codecIdentifier", "numberInUse" ] + }, + "command": { + "description": "command from an event collector toward an event source", + "type": "object", + "properties": { + "commandType": { + "type": "string", + "enum": [ + "heartbeatIntervalChange", + "measurementIntervalChange", + "provideThrottlingState", + "throttlingSpecification" + ] + }, + "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" }, + "measurementInterval": { "type": "number" } + }, + "required": [ "commandType" ] + }, + "commandList": { + "description": "array of commands from an event collector toward an event source", + "type": "array", + "items": { + "$ref": "#/definitions/commandListEntry" + }, + "minItems": 0 + }, + "commandListEntry": { + "description": "reference to a command object", + "type": "object", + "properties": { + "command": {"$ref": "#/definitions/command"} + }, + "required": [ "command" ] + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": [ + "fault", + "heartbeat", + "measurementsForVfScaling", + "mobileFlow", + "other", + "stateChange", + "syslog", + "thresholdCrossingAlert" + ] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventType": { + "description": "unique event topic name", + "type": "string" + }, + "functionalRole": { + "description": "function of the event source e.g., eNodeB, MME, PCRF", + "type": "string" + }, + "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": [ + "High", + "Medium", + "Normal", + "Low" + ] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an OAM VM", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "version": { + "description": "version of the event header", + "type": "number" + } + }, + "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec", + "priority", "reportingEntityName", "sequence", + "sourceName", "startEpochMicrosec" ] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, + "name": { "type": "string" }, + "thresholdCrossed": { "type": "string" }, + "value": { "type": "string"} + }, + "required": [ "criticality", "name", "thresholdCrossed", "value" ] + }, + "cpuUsage": { + "description": "percent usage of an identified CPU", + "type": "object", + "properties": { + "cpuIdentifier": { "type": "string" }, + "percentUsage": { "type": "number" } + }, + "required": [ "cpuIdentifier", "percentUsage" ] + }, + "errors": { + "description": "receive and transmit errors for the measurements domain", + "type": "object", + "properties": { + "receiveDiscards": { "type": "number" }, + "receiveErrors": { "type": "number" }, + "transmitDiscards": { "type": "number" }, + "transmitErrors": { "type": "number" } + }, + "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" }, + "faultFields": { "$ref": "#/definitions/faultFields" }, + "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" }, + "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, + "otherFields": { "$ref": "#/definitions/otherFields" }, + "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, + "syslogFields": { "$ref": "#/definitions/syslogFields" }, + "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" } + }, + "required": [ "commonEventHeader" ] + }, + "eventDomainThrottleSpecification": { + "description": "specification of what information to suppress within an event domain", + "type": "object", + "properties": { + "eventDomain": { + "description": "Event domain enum from the commonEventHeader domain field", + "type": "string" + }, + "suppressedFieldNames": { + "description": "List of optional field names in the event block that should not be sent to the Event Listener", + "type": "array", + "items": { + "type": "string" + } + }, + "suppressedNvPairsList": { + "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", + "type": "array", + "items": { + "$ref": "#/definitions/suppressedNvPairs" + } + } + }, + "required": [ "eventDomain" ] + }, + "eventDomainThrottleSpecificationList": { + "description": "array of eventDomainThrottleSpecifications", + "type": "array", + "items": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "minItems": 0 + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "eventThrottlingState": { + "description": "reports the throttling in force at the event source", + "type": "object", + "properties": { + "eventThrottlingMode": { + "description": "Mode the event manager is in", + "type": "string", + "enum": [ + "normal", + "throttled" + ] + }, + "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" } + }, + "required": [ "eventThrottlingMode" ] + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "description": "additional alarm information", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "number" + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "required": [ "alarmCondition", "eventSeverity", + "eventSourceType", "specificProblem", "vfStatus" ] + }, + "featuresInUse": { + "description": "number of times an identified feature was used over the measurementInterval", + "type": "object", + "properties": { + "featureIdentifier": { "type": "string" }, + "featureUtilization": { "type": "number" } + }, + "required": [ "featureIdentifier", "featureUtilization" ] + }, + "field": { + "description": "name value pair", + "type": "object", + "properties": { + "name": { "type": "string" }, + "value": { "type": "string" } + }, + "required": [ "name", "value" ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { "type": "number" }, + "blockIops": { "type": "number" }, + "blockUsed": { "type": "number" }, + "ephemeralConfigured": { "type": "number" }, + "ephemeralIops": { "type": "number" }, + "ephemeralUsed": { "type": "number" }, + "filesystemName": { "type": "string" } + }, + "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", + "ephemeralIops", "ephemeralUsed", "filesystemName" ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", + "avgReceiveThroughput", "avgTransmitThroughput", + "flowActivationEpoch", "flowActivationMicrosec", + "flowDeactivationEpoch", "flowDeactivationMicrosec", + "flowDeactivationTime", "flowStatus", + "maxPacketDelayVariation", "numActivationFailures", + "numBitErrors", "numBytesReceived", "numBytesTransmitted", + "numDroppedPackets", "numL7BytesReceived", + "numL7BytesTransmitted", "numLostPackets", + "numOutOfOrderPackets", "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", "numTimeouts", "numTunneledL7BytesReceived", + "roundTripTime", "timeToFirstByte" + ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { "type": "number" }, + "highEndOfLatencyBucket": { "type": "number" }, + "lowEndOfLatencyBucket": { "type": "number" } + }, + "required": [ "countsInTheBucket" ] + }, + "measurementGroup": { + "description": "measurement group", + "type": "object", + "properties": { + "name": { "type": "string" }, + "measurements": { + "description": "array of name value pair measurements", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "required": [ "name", "measurements" ] + }, + "measurementsForVfScalingFields": { + "description": "measurementsForVfScaling fields", + "type": "object", + "properties": { + "additionalMeasurements": { + "description": "additional measurement fields", + "type": "array", + "items": { + "$ref": "#/definitions/measurementGroup" + } + }, + "aggregateCpuUsage": { + "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", + "type": "number" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", + "type": "number" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "errors": { "$ref": "#/definitions/errors" }, + "featureUsageArray": { + "description": "array of features in use", + "type": "array", + "items": { + "$ref": "#/definitions/featuresInUse" + } + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the VNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementsForVfScalingVersion": { + "description": "version of the measurementsForVfScaling block", + "type": "number" + }, + "memoryConfigured": { + "description": "memory in MB configured in the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "memoryUsed": { + "description": "memory usage in MB of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "number" + }, + "requestRate": { + "description": "peak rate of service requests per second to the VNF over the measurementInterval", + "type": "number" + }, + "vnfcScalingMetric": { + "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", + "type": "number" + }, + "vNicUsageArray": { + "description": "usage of an array of virtual network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/vNicUsage" + } + } + }, + "required": [ "measurementInterval" ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional mobileFlow fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "number" + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "number" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", + "ipVersion", "otherEndpointIpAddress", "otherEndpointPort", + "reportingEndpointIpAddr", "reportingEndpointPort" ] + }, + "otherFields": { + "description": "additional fields not reported elsewhere", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": [ "messageId", "text" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": [ "newState", "oldState", "stateInterface" ] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ "nvPairFieldName", "suppressedNvPairNames" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "number" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "number" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string" + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp" + ] + }, + "vNicUsage": { + "description": "usage of identified virtual network interface card", + "type": "object", + "properties": { + "broadcastPacketsIn": { "type": "number" }, + "broadcastPacketsOut": { "type": "number" }, + "bytesIn": { "type": "number" }, + "bytesOut": { "type": "number" }, + "multicastPacketsIn": { "type": "number" }, + "multicastPacketsOut": { "type": "number" }, + "packetsIn": { "type": "number" }, + "packetsOut": { "type": "number" }, + "unicastPacketsIn": { "type": "number" }, + "unicastPacketsOut": { "type": "number" }, + "vNicIdentifier": { "type": "string" } + }, + "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": {"$ref": "#/definitions/event"} + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/ves-schema/CommonEventFormat_v5.3.json b/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/ves-schema/CommonEventFormat_v5.3.json new file mode 100644 index 0000000..fca55d9 --- /dev/null +++ b/dcaedt_be/src/main/webapp/WEB-INF/config/dcae-be/ves-schema/CommonEventFormat_v5.3.json @@ -0,0 +1,2004 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "attCopyrightNotice": { + "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", + "type": "object", + "properties": { + "useAndRedistribution": { + "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", + "type": "string" + }, + "condition1": { + "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", + "type": "string" + }, + "condition2": { + "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", + "type": "string" + }, + "condition3": { + "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", + "type": "string" + }, + "condition4": { + "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", + "type": "string" + }, + "disclaimerLine1": { + "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", + "type": "string" + }, + "disclaimerLine2": { + "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", + "type": "string" + }, + "disclaimerLine3": { + "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", + "type": "string" + }, + "disclaimerLine4": { + "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + "type": "string" + } + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { + "type": "string" + }, + "numberInUse": { + "type": "integer" + } + }, + "required": ["codecIdentifier", + "numberInUse"] + }, + "command": { + "description": "command from an event collector toward an event source", + "type": "object", + "properties": { + "commandType": { + "type": "string", + "enum": ["heartbeatIntervalChange", + "measurementIntervalChange", + "provideThrottlingState", + "throttlingSpecification"] + }, + "eventDomainThrottleSpecification": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "heartbeatInterval": { + "type": "integer" + }, + "measurementInterval": { + "type": "integer" + } + }, + "required": ["commandType"] + }, + "commandList": { + "description": "array of commands from an event collector toward an event source", + "type": "array", + "items": { + "$ref": "#/definitions/command" + }, + "minItems": 0 + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": ["fault", + "heartbeat", + "measurementsForVfScaling", + "mobileFlow", + "other", + "sipSignaling", + "stateChange", + "syslog", + "thresholdCrossingAlert", + "voiceQuality"] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventName": { + "description": "unique event name", + "type": "string" + }, + "eventType": { + "description": "for example - applicationVnf, guestOS, hostOS, platform", + "type": "string" + }, + "internalHeaderFields": { + "$ref": "#/definitions/internalHeaderFields" + }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "nfcNamingCode": { + "description": "3 character network function component type, aligned with vfc naming standards", + "type": "string" + }, + "nfNamingCode": { + "description": "4 character network function type, aligned with vnf naming standards", + "type": "string" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": ["High", + "Medium", + "Normal", + "Low"] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an EMS name; may be the same as sourceName", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "version": { + "description": "version of the event header", + "type": "number" + } + }, + "required": ["domain", + "eventId", + "eventName", + "lastEpochMicrosec", + "priority", + "reportingEntityName", + "sequence", + "sourceName", + "startEpochMicrosec", + "version"] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { + "type": "string", + "enum": ["CRIT", + "MAJ"] + }, + "name": { + "type": "string" + }, + "thresholdCrossed": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "required": ["criticality", + "name", + "thresholdCrossed", + "value"] + }, + "cpuUsage": { + "description": "usage of an identified CPU", + "type": "object", + "properties": { + "cpuIdentifier": { + "description": "cpu identifer", + "type": "string" + }, + "cpuIdle": { + "description": "percentage of CPU time spent in the idle task", + "type": "number" + }, + "cpuUsageInterrupt": { + "description": "percentage of time spent servicing interrupts", + "type": "number" + }, + "cpuUsageNice": { + "description": "percentage of time spent running user space processes that have been niced", + "type": "number" + }, + "cpuUsageSoftIrq": { + "description": "percentage of time spent handling soft irq interrupts", + "type": "number" + }, + "cpuUsageSteal": { + "description": "percentage of time spent in involuntary wait which is neither user, system or idle time and is effectively time that went missing", + "type": "number" + }, + "cpuUsageSystem": { + "description": "percentage of time spent on system tasks running the kernel", + "type": "number" + }, + "cpuUsageUser": { + "description": "percentage of time spent running un-niced user space processes", + "type": "number" + }, + "cpuWait": { + "description": "percentage of CPU time spent waiting for I/O operations to complete", + "type": "number" + }, + "percentUsage": { + "description": "aggregate cpu usage of the virtual machine on which the VNFC reporting the event is running", + "type": "number" + } + }, + "required": ["cpuIdentifier", + "percentUsage"] + }, + "diskUsage": { + "description": "usage of an identified disk", + "type": "object", + "properties": { + "diskIdentifier": { + "description": "disk identifier", + "type": "string" + }, + "diskIoTimeAvg": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the average over the measurement interval", + "type": "number" + }, + "diskIoTimeLast": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskIoTimeMax": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskIoTimeMin": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadAvg": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadLast": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadMax": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadMin": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteAvg": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteLast": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteMax": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteMin": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadAvg": { + "description": "number of octets per second read from a disk or partition; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadLast": { + "description": "number of octets per second read from a disk or partition; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadMax": { + "description": "number of octets per second read from a disk or partition; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadMin": { + "description": "number of octets per second read from a disk or partition; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteAvg": { + "description": "number of octets per second written to a disk or partition; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteLast": { + "description": "number of octets per second written to a disk or partition; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteMax": { + "description": "number of octets per second written to a disk or partition; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteMin": { + "description": "number of octets per second written to a disk or partition; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadAvg": { + "description": "number of read operations per second issued to the disk; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadLast": { + "description": "number of read operations per second issued to the disk; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadMax": { + "description": "number of read operations per second issued to the disk; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadMin": { + "description": "number of read operations per second issued to the disk; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteAvg": { + "description": "number of write operations per second issued to the disk; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteLast": { + "description": "number of write operations per second issued to the disk; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteMax": { + "description": "number of write operations per second issued to the disk; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteMin": { + "description": "number of write operations per second issued to the disk; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsAvg": { + "description": "queue size of pending I/O operations per second; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsLast": { + "description": "queue size of pending I/O operations per second; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsMax": { + "description": "queue size of pending I/O operations per second; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsMin": { + "description": "queue size of pending I/O operations per second; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadAvg": { + "description": "milliseconds a read operation took to complete; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadLast": { + "description": "milliseconds a read operation took to complete; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadMax": { + "description": "milliseconds a read operation took to complete; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadMin": { + "description": "milliseconds a read operation took to complete; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteAvg": { + "description": "milliseconds a write operation took to complete; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteLast": { + "description": "milliseconds a write operation took to complete; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteMax": { + "description": "milliseconds a write operation took to complete; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteMin": { + "description": "milliseconds a write operation took to complete; provide the minimum measurement within the measurement interval", + "type": "number" + } + }, + "required": ["diskIdentifier"] + }, + "endOfCallVqmSummaries": { + "description": "provides end of call voice quality metrics", + "type": "object", + "properties": { + "adjacencyName": { + "description": " adjacency name", + "type": "string" + }, + "endpointDescription": { + "description": "Either Caller or Callee", + "type": "string", + "enum": ["Caller", + "Callee"] + }, + "endpointJitter": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsDiscarded": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsReceived": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsSent": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsDiscarded": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsReceived": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsSent": { + "description": "", + "type": "number" + }, + "localJitter": { + "description": "", + "type": "number" + }, + "localRtpOctetsDiscarded": { + "description": "", + "type": "number" + }, + "localRtpOctetsReceived": { + "description": "", + "type": "number" + }, + "localRtpOctetsSent": { + "description": "", + "type": "number" + }, + "localRtpPacketsDiscarded": { + "description": "", + "type": "number" + }, + "localRtpPacketsReceived": { + "description": "", + "type": "number" + }, + "localRtpPacketsSent": { + "description": "", + "type": "number" + }, + "mosCqe": { + "description": "1-5 1dp", + "type": "number" + }, + "packetsLost": { + "description": "", + "type": "number" + }, + "packetLossPercent": { + "description": "Calculated percentage packet loss based on Endpoint RTP packets lost (as reported in RTCP) and Local RTP packets sent. Direction is based on Endpoint description (Caller, Callee). Decimal (2 dp)", + "type": "number" + }, + "rFactor": { + "description": "0-100", + "type": "number" + }, + "roundTripDelay": { + "description": "millisecs", + "type": "number" + } + }, + "required": ["adjacencyName", + "endpointDescription"] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { + "$ref": "#/definitions/commonEventHeader" + }, + "faultFields": { + "$ref": "#/definitions/faultFields" + }, + "heartbeatFields": { + "$ref": "#/definitions/heartbeatFields" + }, + "measurementsForVfScalingFields": { + "$ref": "#/definitions/measurementsForVfScalingFields" + }, + "mobileFlowFields": { + "$ref": "#/definitions/mobileFlowFields" + }, + "otherFields": { + "$ref": "#/definitions/otherFields" + }, + "sipSignalingFields": { + "$ref": "#/definitions/sipSignalingFields" + }, + "stateChangeFields": { + "$ref": "#/definitions/stateChangeFields" + }, + "syslogFields": { + "$ref": "#/definitions/syslogFields" + }, + "thresholdCrossingAlertFields": { + "$ref": "#/definitions/thresholdCrossingAlertFields" + }, + "voiceQualityFields": { + "$ref": "#/definitions/voiceQualityFields" + } + }, + "required": ["commonEventHeader"] + }, + "eventDomainThrottleSpecification": { + "description": "specification of what information to suppress within an event domain", + "type": "object", + "properties": { + "eventDomain": { + "description": "Event domain enum from the commonEventHeader domain field", + "type": "string" + }, + "suppressedFieldNames": { + "description": "List of optional field names in the event block that should not be sent to the Event Listener", + "type": "array", + "items": { + "type": "string" + } + }, + "suppressedNvPairsList": { + "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", + "type": "array", + "items": { + "$ref": "#/definitions/suppressedNvPairs" + } + } + }, + "required": ["eventDomain"] + }, + "eventDomainThrottleSpecificationList": { + "description": "array of eventDomainThrottleSpecifications", + "type": "array", + "items": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "minItems": 0 + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "eventThrottlingState": { + "description": "reports the throttling in force at the event source", + "type": "object", + "properties": { + "eventThrottlingMode": { + "description": "Mode the event manager is in", + "type": "string", + "enum": ["normal", + "throttled"] + }, + "eventDomainThrottleSpecificationList": { + "$ref": "#/definitions/eventDomainThrottleSpecificationList" + } + }, + "required": ["eventThrottlingMode"] + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "description": "additional alarm information", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventCategory": { + "description": "Event category, for example: license, link, routing, security, signaling", + "type": "string" + }, + "eventSeverity": { + "description": "event severity", + "type": "string", + "enum": ["CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL"] + }, + "eventSourceType": { + "description": "type of event source; examples: card, host, other, port, portThreshold, router, slotThreshold, switch, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "number" + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": ["Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination"] + } + }, + "required": ["alarmCondition", + "eventSeverity", + "eventSourceType", + "faultFieldsVersion", + "specificProblem", + "vfStatus"] + }, + "featuresInUse": { + "description": "number of times an identified feature was used over the measurementInterval", + "type": "object", + "properties": { + "featureIdentifier": { + "type": "string" + }, + "featureUtilization": { + "type": "integer" + } + }, + "required": ["featureIdentifier", + "featureUtilization"] + }, + "field": { + "description": "name value pair", + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "required": ["name", + "value"] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { + "type": "number" + }, + "blockIops": { + "type": "number" + }, + "blockUsed": { + "type": "number" + }, + "ephemeralConfigured": { + "type": "number" + }, + "ephemeralIops": { + "type": "number" + }, + "ephemeralUsed": { + "type": "number" + }, + "filesystemName": { + "type": "string" + } + }, + "required": ["blockConfigured", + "blockIops", + "blockUsed", + "ephemeralConfigured", + "ephemeralIops", + "ephemeralUsed", + "filesystemName"] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", + "type": "array", + "items": { + "type": "array", + "items": [{ + "type": "string" + }, + { + "type": "number" + }] + } + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", + "type": "array", + "items": { + "type": "array", + "items": [{ + "type": "string" + }, + { + "type": "number" + }] + } + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", + "type": "array", + "items": { + "type": "array", + "items": [{ + "type": "string" + }, + { + "type": "number" + }] + } + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "required": ["avgBitErrorRate", + "avgPacketDelayVariation", + "avgPacketLatency", + "avgReceiveThroughput", + "avgTransmitThroughput", + "flowActivationEpoch", + "flowActivationMicrosec", + "flowDeactivationEpoch", + "flowDeactivationMicrosec", + "flowDeactivationTime", + "flowStatus", + "maxPacketDelayVariation", + "numActivationFailures", + "numBitErrors", + "numBytesReceived", + "numBytesTransmitted", + "numDroppedPackets", + "numL7BytesReceived", + "numL7BytesTransmitted", + "numLostPackets", + "numOutOfOrderPackets", + "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", + "numTimeouts", + "numTunneledL7BytesReceived", + "roundTripTime", + "timeToFirstByte"] + }, + "heartbeatFields": { + "description": "optional field block for fields specific to heartbeat events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional heartbeat fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "heartbeatFieldsVersion": { + "description": "version of the heartbeatFields block", + "type": "number" + }, + "heartbeatInterval": { + "description": "current heartbeat interval in seconds", + "type": "integer" + } + }, + "required": ["heartbeatFieldsVersion", + "heartbeatInterval"] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "jsonObject": { + "description": "json object schema, name and other meta-information along with one or more object instances", + "type": "object", + "properties": { + "objectInstances": { + "description": "one or more instances of the jsonObject", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObjectInstance" + } + }, + "objectName": { + "description": "name of the JSON Object", + "type": "string" + }, + "objectSchema": { + "description": "json schema for the object", + "type": "string" + }, + "objectSchemaUrl": { + "description": "Url to the json schema for the object", + "type": "string" + }, + "nfSubscribedObjectName": { + "description": "name of the object associated with the nfSubscriptonId", + "type": "string" + }, + "nfSubscriptionId": { + "description": "identifies an openConfig telemetry subscription on a network function, which configures the network function to send complex object data associated with the jsonObject", + "type": "string" + } + }, + "required": ["objectInstances", + "objectName"] + }, + "jsonObjectInstance": { + "description": "meta-information about an instance of a jsonObject along with the actual object instance", + "type": "object", + "properties": { + "objectInstance": { + "description": "an instance conforming to the jsonObject schema", + "type": "object" + }, + "objectInstanceEpochMicrosec": { + "description": "the unix time aka epoch time associated with this objectInstance--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "objectKeys": { + "description": "an ordered set of keys that identifies this particular instance of jsonObject", + "type": "array", + "items": { + "$ref": "#/definitions/key" + } + } + }, + "required": ["objectInstance"] + }, + "key": { + "description": "tuple which provides the name of a key along with its value and relative order", + "type": "object", + "properties": { + "keyName": { + "description": "name of the key", + "type": "string" + }, + "keyOrder": { + "description": "relative sequence or order of the key with respect to other keys", + "type": "integer" + }, + "keyValue": { + "description": "value of the key", + "type": "string" + } + }, + "required": ["keyName"] + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { + "type": "number" + }, + "highEndOfLatencyBucket": { + "type": "number" + }, + "lowEndOfLatencyBucket": { + "type": "number" + } + }, + "required": ["countsInTheBucket"] + }, + "measurementsForVfScalingFields": { + "description": "measurementsForVfScaling fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional name-value-pair fields", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalMeasurements": { + "description": "array of named name-value-pair arrays", + "type": "array", + "items": { + "$ref": "#/definitions/namedArrayOfFields" + } + }, + "additionalObjects": { + "description": "array of JSON objects described by name, schema and other meta-information", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObject" + } + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", + "type": "integer" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", + "type": "integer" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "diskUsageArray": { + "description": "usage of an array of disks", + "type": "array", + "items": { + "$ref": "#/definitions/diskUsage" + } + }, + "featureUsageArray": { + "description": "array of features in use", + "type": "array", + "items": { + "$ref": "#/definitions/featuresInUse" + } + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the VNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementsForVfScalingVersion": { + "description": "version of the measurementsForVfScaling block", + "type": "number" + }, + "memoryUsageArray": { + "description": "memory usage of an array of VMs", + "type": "array", + "items": { + "$ref": "#/definitions/memoryUsage" + } + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "integer" + }, + "requestRate": { + "description": "peak rate of service requests per second to the VNF over the measurementInterval", + "type": "number" + }, + "vnfcScalingMetric": { + "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", + "type": "integer" + }, + "vNicPerformanceArray": { + "description": "usage of an array of virtual network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/vNicPerformance" + } + } + }, + "required": ["measurementInterval", + "measurementsForVfScalingVersion"] + }, + "memoryUsage": { + "description": "memory usage of an identified virtual machine", + "type": "object", + "properties": { + "memoryBuffered": { + "description": "kibibytes of temporary storage for raw disk blocks", + "type": "number" + }, + "memoryCached": { + "description": "kibibytes of memory used for cache", + "type": "number" + }, + "memoryConfigured": { + "description": "kibibytes of memory configured in the virtual machine on which the VNFC reporting the event is running", + "type": "number" + }, + "memoryFree": { + "description": "kibibytes of physical RAM left unused by the system", + "type": "number" + }, + "memorySlabRecl": { + "description": "the part of the slab that can be reclaimed such as caches measured in kibibytes", + "type": "number" + }, + "memorySlabUnrecl": { + "description": "the part of the slab that cannot be reclaimed even when lacking memory measured in kibibytes", + "type": "number" + }, + "memoryUsed": { + "description": "total memory minus the sum of free, buffered, cached and slab memory measured in kibibytes", + "type": "number" + }, + "vmIdentifier": { + "description": "virtual machine identifier associated with the memory metrics", + "type": "string" + } + }, + "required": ["memoryFree", + "memoryUsed", + "vmIdentifier"] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional mobileFlow fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { + "$ref": "#/definitions/gtpPerFlowMetrics" + }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "number" + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "integer" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "integer" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "integer" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "required": ["flowDirection", + "gtpPerFlowMetrics", + "ipProtocolType", + "ipVersion", + "mobileFlowFieldsVersion", + "otherEndpointIpAddress", + "otherEndpointPort", + "reportingEndpointIpAddr", + "reportingEndpointPort"] + }, + "namedArrayOfFields": { + "description": "an array of name value pairs along with a name for the array", + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "arrayOfFields": { + "description": "array of name value pairs", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "required": ["name", + "arrayOfFields"] + }, + "otherFields": { + "description": "fields for events belonging to the 'other' domain of the commonEventHeader domain enumeration", + "type": "object", + "properties": { + "hashOfNameValuePairArrays": { + "description": "array of named name-value-pair arrays", + "type": "array", + "items": { + "$ref": "#/definitions/namedArrayOfFields" + } + }, + "jsonObjects": { + "description": "array of JSON objects described by name, schema and other meta-information", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObject" + } + }, + "nameValuePairs": { + "description": "array of name-value pairs", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "otherFieldsVersion": { + "description": "version of the otherFields block", + "type": "number" + } + }, + "required": ["otherFieldsVersion"] + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": ["messageId", + "text"] + }, + "sipSignalingFields": { + "description": "sip signaling fields", + "type": "object", + "properties": { + "additionalInformation": { + "description": "additional sip signaling fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "compressedSip": { + "description": "the full SIP request/response including headers and bodies", + "type": "string" + }, + "correlator": { + "description": "this is the same for all events on this call", + "type": "string" + }, + "localIpAddress": { + "description": "IP address on VNF", + "type": "string" + }, + "localPort": { + "description": "port on VNF", + "type": "string" + }, + "remoteIpAddress": { + "description": "IP address of peer endpoint", + "type": "string" + }, + "remotePort": { + "description": "port of peer endpoint", + "type": "string" + }, + "sipSignalingFieldsVersion": { + "description": "version of the sipSignalingFields block", + "type": "number" + }, + "summarySip": { + "description": "the SIP Method or Response (‘INVITE’, ‘200 OK’, ‘BYE’, etc)", + "type": "string" + }, + "vendorVnfNameFields": { + "$ref": "#/definitions/vendorVnfNameFields" + } + }, + "required": ["correlator", + "localIpAddress", + "localPort", + "remoteIpAddress", + "remotePort", + "sipSignalingFieldsVersion", + "vendorVnfNameFields"] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": ["inService", + "maintenance", + "outOfService"] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": ["inService", + "maintenance", + "outOfService"] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": ["newState", + "oldState", + "stateChangeFieldsVersion", + "stateInterface"] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": ["nvPairFieldName", + "suppressedNvPairNames"] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed provided as name=value delimited by a pipe ‘|’ symbol, for example: 'name1=value1|name2=value2|…'", + "type": "string" + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "integer" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "integer" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string", + "enum": ["Alert", + "Critical", + "Debug", + "Emergency", + "Error", + "Info", + "Notice", + "Warning"] + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": ["eventSourceType", + "syslogFieldsVersion", + "syslogMsg", + "syslogTag"] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": ["CLEAR", + "CONT", + "SET"] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": ["CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY"] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { + "type": "string" + } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": ["CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL"] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": ["additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp", + "thresholdCrossingFieldsVersion"] + }, + "vendorVnfNameFields": { + "description": "provides vendor, vnf and vfModule identifying information", + "type": "object", + "properties": { + "vendorName": { + "description": "VNF vendor name", + "type": "string" + }, + "vfModuleName": { + "description": "ASDC vfModuleName for the vfModule generating the event", + "type": "string" + }, + "vnfName": { + "description": "ASDC modelName for the VNF generating the event", + "type": "string" + } + }, + "required": ["vendorName"] + }, + "vNicPerformance": { + "description": "describes the performance and errors of an identified virtual network interface card", + "type": "object", + "properties": { + "receivedBroadcastPacketsAccumulated": { + "description": "Cumulative count of broadcast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedBroadcastPacketsDelta": { + "description": "Count of broadcast packets received within the measurement interval", + "type": "number" + }, + "receivedDiscardedPacketsAccumulated": { + "description": "Cumulative count of discarded packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedDiscardedPacketsDelta": { + "description": "Count of discarded packets received within the measurement interval", + "type": "number" + }, + "receivedErrorPacketsAccumulated": { + "description": "Cumulative count of error packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedErrorPacketsDelta": { + "description": "Count of error packets received within the measurement interval", + "type": "number" + }, + "receivedMulticastPacketsAccumulated": { + "description": "Cumulative count of multicast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedMulticastPacketsDelta": { + "description": "Count of multicast packets received within the measurement interval", + "type": "number" + }, + "receivedOctetsAccumulated": { + "description": "Cumulative count of octets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedOctetsDelta": { + "description": "Count of octets received within the measurement interval", + "type": "number" + }, + "receivedTotalPacketsAccumulated": { + "description": "Cumulative count of all packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedTotalPacketsDelta": { + "description": "Count of all packets received within the measurement interval", + "type": "number" + }, + "receivedUnicastPacketsAccumulated": { + "description": "Cumulative count of unicast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedUnicastPacketsDelta": { + "description": "Count of unicast packets received within the measurement interval", + "type": "number" + }, + "transmittedBroadcastPacketsAccumulated": { + "description": "Cumulative count of broadcast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedBroadcastPacketsDelta": { + "description": "Count of broadcast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedDiscardedPacketsAccumulated": { + "description": "Cumulative count of discarded packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedDiscardedPacketsDelta": { + "description": "Count of discarded packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedErrorPacketsAccumulated": { + "description": "Cumulative count of error packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedErrorPacketsDelta": { + "description": "Count of error packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedMulticastPacketsAccumulated": { + "description": "Cumulative count of multicast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedMulticastPacketsDelta": { + "description": "Count of multicast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedOctetsAccumulated": { + "description": "Cumulative count of octets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedOctetsDelta": { + "description": "Count of octets transmitted within the measurement interval", + "type": "number" + }, + "transmittedTotalPacketsAccumulated": { + "description": "Cumulative count of all packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedTotalPacketsDelta": { + "description": "Count of all packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedUnicastPacketsAccumulated": { + "description": "Cumulative count of unicast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedUnicastPacketsDelta": { + "description": "Count of unicast packets transmitted within the measurement interval", + "type": "number" + }, + "valuesAreSuspect": { + "description": "Indicates whether vNicPerformance values are likely inaccurate due to counter overflow or other condtions", + "type": "string", + "enum": ["true", + "false"] + }, + "vNicIdentifier": { + "description": "vNic identification", + "type": "string" + } + }, + "required": ["valuesAreSuspect", + "vNicIdentifier"] + }, + "voiceQualityFields": { + "description": "provides statistics related to customer facing voice products", + "type": "object", + "properties": { + "additionalInformation": { + "description": "additional voice quality fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "calleeSideCodec": { + "description": "callee codec for the call", + "type": "string" + }, + "callerSideCodec": { + "description": "caller codec for the call", + "type": "string" + }, + "correlator": { + "description": "this is the same for all events on this call", + "type": "string" + }, + "endOfCallVqmSummaries": { + "$ref": "#/definitions/endOfCallVqmSummaries" + }, + "phoneNumber": { + "description": "phone number associated with the correlator", + "type": "string" + }, + "midCallRtcp": { + "description": "Base64 encoding of the binary RTCP data excluding Eth/IP/UDP headers", + "type": "string" + }, + "vendorVnfNameFields": { + "$ref": "#/definitions/vendorVnfNameFields" + }, + "voiceQualityFieldsVersion": { + "description": "version of the voiceQualityFields block", + "type": "number" + } + }, + "required": ["calleeSideCodec", + "callerSideCodec", + "correlator", + "midCallRtcp", + "vendorVnfNameFields", + "voiceQualityFieldsVersion"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": { + "$ref": "#/definitions/event" + } + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/main/webapp/WEB-INF/jetty-web.xml b/dcaedt_be/src/main/webapp/WEB-INF/jetty-web.xml new file mode 100644 index 0000000..6d5a8f6 --- /dev/null +++ b/dcaedt_be/src/main/webapp/WEB-INF/jetty-web.xml @@ -0,0 +1,12 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!DOCTYPE Configure PUBLIC + "-//Mort Bay Consulting//DTD Configure//EN" + "http://www.eclipse.org/jetty/configure_9_0.dtd"> + +<Configure class="org.eclipse.jetty.webapp.WebAppContext"> + <Set name="contextPath">/dcae</Set> + <Set name="parentLoaderPriority">true</Set> + <!-- <Call name="addServerClass"> + <Arg>-org.eclise.jetty.util</Arg> + </Call> --> +</Configure> diff --git a/dcaedt_be/src/main/webapp/WEB-INF/web.xml b/dcaedt_be/src/main/webapp/WEB-INF/web.xml new file mode 100644 index 0000000..ab6a37f --- /dev/null +++ b/dcaedt_be/src/main/webapp/WEB-INF/web.xml @@ -0,0 +1,5 @@ +<?xml version="1.0" encoding="UTF-8"?> +<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd" + version="3.0"> +</web-app> diff --git a/dcaedt_be/src/test/java/org/onap/sdc/dcae/VesStructureLoaderMock.java b/dcaedt_be/src/test/java/org/onap/sdc/dcae/VesStructureLoaderMock.java new file mode 100644 index 0000000..34db9fa --- /dev/null +++ b/dcaedt_be/src/test/java/org/onap/sdc/dcae/VesStructureLoaderMock.java @@ -0,0 +1,116 @@ +package org.onap.sdc.dcae; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonIOException; +import com.google.gson.JsonSyntaxException; +import com.google.gson.reflect.TypeToken; +import org.apache.commons.lang.ArrayUtils; +import org.apache.commons.lang.StringUtils; +import org.onap.sdc.dcae.ves.EventListenerDefinition; +import org.onap.sdc.dcae.ves.VesDataItemsDefinition; +import org.onap.sdc.dcae.ves.VesDataTypeDefinition; +import org.onap.sdc.dcae.ves.VesJsonDeserializer; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.FilenameFilter; +import java.lang.reflect.Type; +import java.util.*; +import java.util.stream.Collectors; + +public class VesStructureLoaderMock { + + private Map<String, EventListenerDefinition> eventListeners = new HashMap<>(); + private Type type = new TypeToken<VesDataItemsDefinition>() { + }.getType(); + private Gson gson = new GsonBuilder().registerTypeAdapter(type, new VesJsonDeserializer()).create(); + private final String schemaNamePrefix = "CommonEventFormat_v"; + private final String schemaNameSuffix = ".json"; + private List<String> initErrors; + + public VesStructureLoaderMock() { + this(true); + } + + public VesStructureLoaderMock(boolean validateAndResolve) { + this(validateAndResolve, System.getProperty("user.dir") + "/src/test/resources/ves-schema"); + } + + public VesStructureLoaderMock(boolean validateAndResolve, String path) { + initErrors = init(validateAndResolve, path); + } + + public List<String> init(boolean validateAndResolve, String pathToSchemaDir) { + + List<String> parseErrors = new ArrayList<>(); + File dir = new File(pathToSchemaDir); + File[] files = dir.listFiles(new FilenameFilter() { + @Override public boolean accept(File dir, String name) { + return name.startsWith(schemaNamePrefix) && name.endsWith(schemaNameSuffix); + } + }); + if (ArrayUtils.isEmpty(files)) { + parseErrors.add("No VES schema files found"); + } else { + for (File f : files) { + String error = parseJsonFileAndSaveToMap(f, validateAndResolve); + if (StringUtils.isNotBlank(error)) { + parseErrors.add("Error: parsing VES schema file " + f.getName() + " failed due to " + error); + } + } + } + return parseErrors; + + } + + public Map<String, VesDataTypeDefinition> getEventListenerDefinitionByVersion(String version) { + return eventListeners.get(version).getProperties().get(EventListenerDefinition.EVENT_ROOT).getProperties(); + } + + public Set<String> getAvailableVersionsList() { + return eventListeners.keySet(); + } + + public Map<String, Set<String>> getAvailableVersionsAndEventTypes() { + return eventListeners.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> getEventListenerDefinitionByVersion(e.getKey()).keySet())); + } + + public Set<String> getEventTypeListByVersion(String version) { + return getEventListenerDefinitionByVersion(version).keySet(); + } + + public String getVersionFromFileName(String fileName) { + return fileName.replace(schemaNamePrefix, "").replace(schemaNameSuffix, ""); + } + + private String parseJsonFileAndSaveToMap(File file, boolean validateAndResolve) { + String validationError = null; + try { + EventListenerDefinition eventListener = gson.fromJson(new FileReader(file), EventListenerDefinition.class); + if (validateAndResolve) + validationError = getValidatorMessage(eventListener); + if (StringUtils.isEmpty(validationError)) + eventListeners.put(getVersionFromFileName(file.getName()), eventListener); + } catch (FileNotFoundException | JsonIOException | JsonSyntaxException e) { + validationError = e.getMessage(); + } + return validationError; + } + + public Map<String, EventListenerDefinition> getEventListeners() { + return eventListeners; + } + + public List<String> getInitErrors() { + return initErrors; + } + + private String getValidatorMessage(EventListenerDefinition eventListenerDefinition) { + String validationError = eventListenerDefinition.validate(); + if (StringUtils.isBlank(validationError)) + validationError = eventListenerDefinition.resolveRefTypes(); + return validationError; + } +} diff --git a/dcaedt_be/src/test/java/org/onap/sdc/dcae/composition/impl/ReferenceBusinessLogicTest.java b/dcaedt_be/src/test/java/org/onap/sdc/dcae/composition/impl/ReferenceBusinessLogicTest.java new file mode 100644 index 0000000..c353701 --- /dev/null +++ b/dcaedt_be/src/test/java/org/onap/sdc/dcae/composition/impl/ReferenceBusinessLogicTest.java @@ -0,0 +1,123 @@ +package org.onap.sdc.dcae.composition.impl; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; +import org.onap.sdc.dcae.client.ISdcClient; +import org.onap.sdc.dcae.composition.restmodels.MonitoringComponent; +import org.onap.sdc.dcae.composition.restmodels.sdc.*; +import org.onap.sdc.dcae.errormng.ErrorConfigurationLoader; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.client.HttpClientErrorException; + +import java.util.*; + +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.*; + +@RunWith(MockitoJUnitRunner.class) +public class ReferenceBusinessLogicTest { + private String userId = "me"; + private String requestId = "1"; + private String monitoringComponentName = "monitoringComponentName"; + private String serviceUuid = "serviceUuid"; + private String vfiName = "vfiName"; + + @Mock + private ISdcClient sdcClientMock; + @Mock + private ResourceDetailed templateMC; + + @InjectMocks + ReferenceBusinessLogic classUnderTest; + + @Before + public void setup(){ + classUnderTest.setSdcRestClient(sdcClientMock); + new ErrorConfigurationLoader(System.getProperty("user.dir")+"/src/main/webapp/WEB-INF"); + } + + @Test + public void successfulFetchMonitoringComponents() throws Exception { + when(sdcClientMock.getResource(anyString(),anyString())).thenReturn(templateMC); + ExternalReferencesMap refs = new ExternalReferencesMap(); + refs.put("vfi1", Arrays.asList("a","b","c","d")); + refs.put("vfi2", Arrays.asList("u","v","w","x","y","z")); + Map<String, List<MonitoringComponent>> result = classUnderTest.fetchMonitoringComponents(refs, requestId); + verify(sdcClientMock,times(10)).getResource(anyString(),anyString()); + Assert.assertEquals(1, result.size()); + Assert.assertEquals(10, result.get("monitoringComponents").size()); + } + + @Test + public void partialSuccessfulFetchMonitoringComponents() throws Exception { + when(sdcClientMock.getResource(anyString(),anyString())).thenReturn(templateMC); + when(sdcClientMock.getResource(eq("no_such_uuid"),anyString())).thenThrow(new HttpClientErrorException(HttpStatus.NOT_FOUND)); + ExternalReferencesMap refs = new ExternalReferencesMap(); + refs.put("vfi1", Collections.singletonList("abc")); + refs.put("vfi2", Collections.singletonList("xyz")); + refs.put("vfi3", Collections.singletonList("no_such_uuid")); + Map<String, List<MonitoringComponent>> result = classUnderTest.fetchMonitoringComponents(refs, requestId); + verify(sdcClientMock,times(3)).getResource(anyString(),anyString()); + Assert.assertEquals(2, result.size()); + Assert.assertEquals(2, result.get("monitoringComponents").size()); + Assert.assertEquals(1, result.get("unavailable").size()); + } + + @Test(expected=RuntimeException.class) + public void deleteVfcmtReference_deleteFailed() { + doThrow(RuntimeException.class).when(sdcClientMock).deleteExternalMonitoringReference(anyString(), anyString(), anyString(), anyString(), anyString(), anyString()); + classUnderTest.deleteVfcmtReference(userId, "", "", "", "", requestId); + } + @Test + public void deleteVfcmtReference_deleteSuccess() { + classUnderTest.deleteVfcmtReference(userId, "", "", "", "", requestId); + verify(sdcClientMock).deleteExternalMonitoringReference(anyString(), anyString(), anyString(), anyString(), anyString(), anyString()); + } + + private void mockGetService() throws Exception { + ServiceDetailed serviceDetailed = new ServiceDetailed(); + ResourceInstance resourceInstance = new ResourceInstance(); + Artifact artifact = new Artifact(); + artifact.setArtifactName(monitoringComponentName); + resourceInstance.setArtifacts(Collections.singletonList(artifact)); + resourceInstance.setResourceInstanceName(vfiName); + serviceDetailed.setResources(Collections.singletonList(resourceInstance)); + when(sdcClientMock.getService(serviceUuid, requestId)).thenReturn(serviceDetailed); + } + + @Test + public void deleteVfcmtReferenceBlueprint_deleteSuccess() throws Exception { + mockGetService(); + ResponseEntity responseEntity = classUnderTest.deleteVfcmtReferenceBlueprint(userId, "", monitoringComponentName, serviceUuid, vfiName, "", requestId); + verify(sdcClientMock).getService(serviceUuid, requestId); + verify(sdcClientMock).deleteInstanceResourceArtifact(anyString(), anyString(), anyString(), anyString(), anyString(), anyString()); + Assert.assertEquals(HttpStatus.OK, responseEntity.getStatusCode()); + } + + @Test + public void deleteVfcmtReferenceBlueprint_exceptionSdcGetService() throws Exception { + when(sdcClientMock.getService(serviceUuid, requestId)).thenThrow(new RuntimeException("")); + + ResponseEntity<ResponseFormat> responseEntity = classUnderTest.deleteVfcmtReferenceBlueprint(userId, "", monitoringComponentName, serviceUuid, vfiName, "", requestId); + + Assert.assertEquals("The request was partially successful. Removing the attached Blueprint from the service has failed. You must manually delete the artifact.", responseEntity.getBody().getRequestError().getServiceException().getFormattedErrorMessage()); + } + + @Test + public void deleteVfcmtReferenceBlueprint_exceptionSdcdeleteInstanceResourceArtifact() throws Exception { + mockGetService(); + doThrow(new RuntimeException("")).when(sdcClientMock).deleteInstanceResourceArtifact(anyString(), anyString(), anyString(), anyString(), anyString(), anyString()); + + ResponseEntity<ResponseFormat> responseEntity = classUnderTest.deleteVfcmtReferenceBlueprint(userId, "", monitoringComponentName, serviceUuid, vfiName, "", requestId); + + Assert.assertEquals("The request was partially successful. Removing the attached Blueprint from the service has failed. You must manually delete the artifact.", responseEntity.getBody().getRequestError().getServiceException().getFormattedErrorMessage()); + } +} diff --git a/dcaedt_be/src/test/java/org/onap/sdc/dcae/composition/impl/VfcmtBusinessLogicTest.java b/dcaedt_be/src/test/java/org/onap/sdc/dcae/composition/impl/VfcmtBusinessLogicTest.java new file mode 100644 index 0000000..12ed040 --- /dev/null +++ b/dcaedt_be/src/test/java/org/onap/sdc/dcae/composition/impl/VfcmtBusinessLogicTest.java @@ -0,0 +1,310 @@ +package org.onap.sdc.dcae.composition.impl; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.onap.sdc.dcae.catalog.asdc.ASDCException; +import org.onap.sdc.dcae.client.ISdcClient; +import org.onap.sdc.dcae.client.SdcRestClient; +import org.onap.sdc.dcae.composition.restmodels.CreateVFCMTRequest; +import org.onap.sdc.dcae.composition.restmodels.ImportVFCMTRequest; +import org.onap.sdc.dcae.composition.restmodels.MonitoringComponent; +import org.onap.sdc.dcae.composition.restmodels.VfcmtData; +import org.onap.sdc.dcae.composition.restmodels.sdc.Artifact; +import org.onap.sdc.dcae.composition.restmodels.sdc.ExternalReferencesMap; +import org.onap.sdc.dcae.composition.restmodels.sdc.Resource; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.errormng.ErrorConfigurationLoader; +import org.onap.sdc.dcae.errormng.PolicyException; +import org.onap.sdc.dcae.errormng.RequestError; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.client.HttpClientErrorException; + +import java.util.*; + +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.*; +import static org.onap.sdc.dcae.composition.util.DcaeBeConstants.LifecycleStateEnum.CERTIFIED; +import static org.onap.sdc.dcae.composition.util.DcaeBeConstants.LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT; + +public class VfcmtBusinessLogicTest { + + private ISdcClient sdcClientMock = Mockito.mock(SdcRestClient.class); + private ResourceDetailed templateMC = Mockito.mock(ResourceDetailed.class); + + private VfcmtBusinessLogic vfcmtBusinessLogic = new VfcmtBusinessLogic(); + private ImportVFCMTRequest request = new ImportVFCMTRequest(); + + private String userId = "me"; + private String requestId = "1"; + + @Before + public void setup(){ + MockitoAnnotations.initMocks(this); + new ErrorConfigurationLoader(System.getProperty("user.dir")+"/src/main/webapp/WEB-INF"); + vfcmtBusinessLogic.setSdcRestClient(sdcClientMock); + request.setTemplateUuid("577"); + request.setVfiName("vfi_XX"); + request.setDescription("description"); + request.setFlowType("SNMP"); + request.setName("newVfcmt"); + request.setServiceUuid("service99999"); + request.setContextType("services"); + } + + @Test + public void sdcIsDown_creatingVfcmt_gotResponseWithError500() throws Exception{ + RequestError requestError = new RequestError(); + requestError.setPolicyException(new PolicyException("POL5000", "Error: Internal Server Error. Please try again later.", null)); + when(sdcClientMock.createResource(userId,request,requestId)).thenThrow(new ASDCException(HttpStatus.INTERNAL_SERVER_ERROR, requestError)); + + ResponseEntity res = vfcmtBusinessLogic.createMcFromTemplate(userId,request,requestId); + verify(sdcClientMock).getResource("577",requestId); + verify(sdcClientMock,times(0)).getResourceArtifact(anyString(),anyString(),anyString()); + Assert.assertEquals(500, res.getStatusCodeValue()); + } + + @Test + public void uploadCloneCdumpFailed_creatingVfcmt_createVfcmtRolledBack() throws Exception { + RequestError requestError = new RequestError(); + requestError.setPolicyException(new PolicyException("POL5000", "Error: Internal Server Error. Please try again later.", null)); + when(sdcClientMock.createResourceArtifact(anyString(),anyString(),any(),anyString())).thenThrow(new ASDCException(HttpStatus.INTERNAL_SERVER_ERROR, requestError)); + when(sdcClientMock.createResource(userId,request,requestId)).thenReturn(templateMC); + when(sdcClientMock.getResourceArtifact(anyString(), anyString(), anyString())).thenReturn("{\"flowType\":\"don't override\""); + when(templateMC.getUuid()).thenReturn("3"); + when(sdcClientMock.getResource(anyString(),anyString())).thenReturn(templateMC); + emulateListOfArtifactsWithCompositionYml(); + + vfcmtBusinessLogic.createMcFromTemplate(userId, request, requestId); + + // making sure rollback is performed if exception is thrown + verify(sdcClientMock).changeResourceLifecycleState(anyString(),anyString(),anyString(),anyString(),anyString()); + } + + // happy happy joy joy + @Test + public void successfulCreationAndAttachmentOfVfcmt() throws Exception { + when(templateMC.getUuid()).thenReturn("3"); + when(sdcClientMock.getResource(anyString(),anyString())).thenReturn(templateMC); + ResourceDetailed mockedVfcmt = Mockito.mock(ResourceDetailed.class); + when(mockedVfcmt.getUuid()).thenReturn("5"); + when(sdcClientMock.createResource(anyString(),any(),anyString())).thenReturn(mockedVfcmt); + when(sdcClientMock.getResourceArtifact(anyString(),anyString(),anyString())).thenReturn("3243324"); + + emulateListOfArtifactsWithCompositionYml(); + + vfcmtBusinessLogic.createMcFromTemplate(userId, request,requestId); + + verify(sdcClientMock).createResource(userId, request,requestId); + verify(sdcClientMock).getResource(anyString(),anyString()); + verify(sdcClientMock).getResourceArtifact(anyString(),anyString(),anyString()); + verify(sdcClientMock, times(2)).createResourceArtifact(anyString(),anyString(),any(),anyString()); + verify(sdcClientMock).addExternalMonitoringReference(anyString(),any(),any(),anyString()); + verify(sdcClientMock).changeResourceLifecycleState(anyString(),anyString(),anyString(),anyString(),anyString()); + } + + @Test + public void successfulImportAndAttachmentOfVfcmtAlreadyConnectedWithoutEditDoCheckin() throws Exception { + when(sdcClientMock.getResource(anyString(),anyString())).thenReturn(templateMC); + when(sdcClientMock.getResourceArtifact(anyString(),anyString(),anyString())).thenReturn("{\"flowType\":\"don't override\"}"); + when(templateMC.getLifecycleState()).thenReturn("NOT_CERTIFIED_CHECKOUT"); + emulateListOfArtifactsWithCompositionYmlAndSvcRef(); + request.setCloneVFCMT(false); + request.setUpdateFlowType(false); + vfcmtBusinessLogic.importMC(userId, request, requestId); + + verify(sdcClientMock, times(0)).createResource(userId, request, requestId); + verify(sdcClientMock).getResource(anyString(),anyString()); + verify(sdcClientMock).getResourceArtifact(anyString(),anyString(),anyString()); + verify(sdcClientMock, times(0)).createResourceArtifact(anyString(),anyString(),any(),anyString()); + verify(sdcClientMock, times(0)).updateResourceArtifact(anyString(), anyString(), any(), anyString()); + verify(sdcClientMock).addExternalMonitoringReference(anyString(),any(),any(),anyString()); + verify(sdcClientMock).changeResourceLifecycleState(anyString(),anyString(),anyString(),anyString(),anyString()); + } + + + @Test + public void successfulImportAndAttachmentOfVfcmtAlreadyConnectedUpdateFlowTypeCheckoutCheckin() throws Exception { + when(sdcClientMock.getResource(anyString(),anyString())).thenReturn(templateMC); + when(templateMC.getUuid()).thenReturn("3"); + when(sdcClientMock.changeResourceLifecycleState(anyString(), anyString(), anyString(), anyString(), anyString())).thenReturn(templateMC); + when(sdcClientMock.updateResourceArtifact(anyString(), anyString(), any(), anyString())).thenReturn(new Artifact()); + when(sdcClientMock.getResourceArtifact(anyString(),anyString(),anyString())).thenReturn("{\"cid\":\"xsssdaerrwr\"}"); + when(templateMC.getLifecycleState()).thenReturn("NOT_CERTIFIED_CHECKIN").thenReturn("NOT_CERTIFIED_CHECKOUT"); + emulateListOfArtifactsWithCompositionYmlAndSvcRef(); + request.setCloneVFCMT(false); + request.setUpdateFlowType(true); + vfcmtBusinessLogic.importMC(userId, request, requestId); + + verify(sdcClientMock, times(0)).createResource(userId, request, requestId); + verify(sdcClientMock).getResource(anyString(),anyString()); + verify(sdcClientMock).getResourceArtifact(anyString(),anyString(),anyString()); + verify(sdcClientMock, times(0)).createResourceArtifact(anyString(),anyString(),any(),anyString()); + verify(sdcClientMock, times(1)).updateResourceArtifact(anyString(), anyString(), any(), anyString()); + verify(sdcClientMock).addExternalMonitoringReference(anyString(),any(),any(),anyString()); + verify(sdcClientMock, times(2)).changeResourceLifecycleState(anyString(),anyString(),anyString(),anyString(),anyString()); + } + + + @Test + public void successfulFetchVfcmtDataFull() throws Exception { + String templateUuid = "3"; + when(templateMC.getUuid()).thenReturn(templateUuid); + when(sdcClientMock.getResource(anyString(),anyString())).thenReturn(templateMC); + emulateListOfArtifactsWithCompositionYmlAndSvcRef(); + when(sdcClientMock.getResourceArtifact(templateUuid, "svcRefArtifactUuid", requestId)).thenReturn("thisIsTheServiceId/resources/thisIsTheVfiName"); + when(sdcClientMock.getResourceArtifact(templateUuid, "compositionArtifactUuid", requestId)).thenReturn("\"flowType\":\"Syslog\""); + ResponseEntity<VfcmtData> result = vfcmtBusinessLogic.getVfcmtReferenceData(templateUuid, requestId); + verify(sdcClientMock).getResource(anyString(),anyString()); + verify(sdcClientMock,times(2)).getResourceArtifact(anyString(),anyString(),anyString()); + Assert.assertEquals(200, result.getStatusCodeValue()); + Assert.assertEquals("Syslog", result.getBody().getFlowType()); + Assert.assertEquals("thisIsTheServiceId", result.getBody().getServiceUuid()); + Assert.assertEquals("thisIsTheVfiName", result.getBody().getVfiName()); + } + + @Test + public void successfulFetchVfcmtDataPartial() throws Exception { + String templateUuid = "3"; + when(templateMC.getUuid()).thenReturn(templateUuid); + when(sdcClientMock.getResource(anyString(),anyString())).thenReturn(templateMC); + emulateListOfArtifactsWithCompositionYml(); + when(sdcClientMock.getResourceArtifact(templateUuid, "compositionArtifactUuid", requestId)).thenReturn("\"flowType\":\"Syslog\""); + ResponseEntity<VfcmtData> result = vfcmtBusinessLogic.getVfcmtReferenceData(templateUuid, requestId); + verify(sdcClientMock).getResource(anyString(),anyString()); + verify(sdcClientMock,times(1)).getResourceArtifact(anyString(),anyString(),anyString()); + Assert.assertEquals(200, result.getStatusCodeValue()); + Assert.assertEquals("Syslog", result.getBody().getFlowType()); + Assert.assertEquals(null, result.getBody().getServiceUuid()); + Assert.assertEquals(null, result.getBody().getVfiName()); + } + + @Test + public void successfulFetchVfcmtDataEmpty() throws Exception { + + String templateUuid = "3"; + when(templateMC.getUuid()).thenReturn(templateUuid); + when(sdcClientMock.getResource(anyString(),anyString())).thenReturn(templateMC); + emulateListOfArtifactsWithCompositionYml(); + when(sdcClientMock.getResourceArtifact(templateUuid, "compositionArtifactUuid", requestId)).thenReturn(""); + ResponseEntity<VfcmtData> result = vfcmtBusinessLogic.getVfcmtReferenceData(templateUuid, requestId); + verify(sdcClientMock).getResource(anyString(),anyString()); + verify(sdcClientMock,times(1)).getResourceArtifact(anyString(),anyString(),anyString()); + Assert.assertEquals(200, result.getStatusCodeValue()); + Assert.assertEquals(null, result.getBody().getFlowType()); + Assert.assertEquals(null, result.getBody().getServiceUuid()); + Assert.assertEquals(null, result.getBody().getVfiName()); + } + + @Test + public void fetchVfcmtDataNoCompositionFound() throws Exception { + + String templateUuid = "3"; + when(templateMC.getUuid()).thenReturn(templateUuid); + when(templateMC.getName()).thenReturn(templateUuid); + when(sdcClientMock.getResource(anyString(),anyString())).thenReturn(templateMC); + ResponseEntity<ResponseFormat> result = vfcmtBusinessLogic.getVfcmtReferenceData(templateUuid, requestId); + verify(sdcClientMock).getResource(anyString(),anyString()); + verify(sdcClientMock,times(0)).getResourceArtifact(anyString(),anyString(),anyString()); + Assert.assertEquals(404, result.getStatusCodeValue()); + Assert.assertEquals("Error – Could not read component 3 details.", result.getBody().getRequestError().getServiceException().getFormattedErrorMessage()); + + } + + @Test + public void getVfcmtsForMigration() throws Exception { + ExternalReferencesMap connectedVfcmts = new ExternalReferencesMap(); + connectedVfcmts.put("11",Arrays.asList("Red", "Blue", "Yellow")); + connectedVfcmts.put("22",Arrays.asList("Ibiza", "Bora Bora", "Mykonos")); + connectedVfcmts.put("33",Arrays.asList("Large", "Medium", "Small")); + connectedVfcmts.put("44",Arrays.asList("Basket", "Foot", "Volley")); + + when(sdcClientMock.getMonitoringReferences(anyString(),anyString(),anyString(),anyString())).thenReturn(connectedVfcmts); + + Resource myRedResource = new Resource(); + myRedResource.setUuid("Red"); + myRedResource.setLastUpdaterUserId("me"); + myRedResource.setLifecycleState(NOT_CERTIFIED_CHECKOUT.name()); + + Resource herRaphaelResource = new Resource(); + herRaphaelResource.setUuid("Raphael"); + herRaphaelResource.setLastUpdaterUserId("her"); + herRaphaelResource.setLifecycleState(NOT_CERTIFIED_CHECKOUT.name()); + + Resource myMediumResource = new Resource(); + myMediumResource.setUuid("Medium"); + myMediumResource.setLastUpdaterUserId("me"); + + Resource herDonateloResource = new Resource(); + herDonateloResource.setUuid("Donatelo"); + herDonateloResource.setLastUpdaterUserId("her"); + herDonateloResource.setVersion("1.0"); + + Resource hisMykonosResource = new Resource(); + hisMykonosResource.setUuid("Mykonos"); + hisMykonosResource.setLastUpdaterUserId("his"); + hisMykonosResource.setLifecycleState(NOT_CERTIFIED_CHECKOUT.name()); + + Resource hisMichaelangeloResource = new Resource(); + hisMichaelangeloResource.setUuid("Michaelangelo"); + hisMichaelangeloResource.setLastUpdaterUserId("his"); + hisMykonosResource.setLifecycleState(CERTIFIED.name()); + hisMykonosResource.setVersion("1.1"); + + // Versions and connectivity to service shouldn't be part of this test as these are passed to SDC to be + // filtered by SDC requests (getMonitoringReference and getResource) + + List<Resource> theVfcmts = Arrays.asList(myRedResource,herRaphaelResource,myMediumResource,herDonateloResource,hisMykonosResource,hisMichaelangeloResource); + + when(sdcClientMock.getResources(anyString(),anyString(),anyString(),anyString())).thenReturn(theVfcmts); + + ResponseEntity<List<Resource>> response = vfcmtBusinessLogic.getVfcmtsForMigration(userId,"service","5544","1.0",requestId); + + Assert.assertEquals(2, response.getBody().size()); + Assert.assertEquals(200, response.getStatusCodeValue()); + } + + private void emulateListOfArtifactsWithCompositionYml() { + List<Artifact> listOfArtifactCompositionYml = new ArrayList<>(); + Artifact compositionArtifact = Mockito.mock(Artifact.class); + when(compositionArtifact.getArtifactName()).thenReturn(DcaeBeConstants.Composition.fileNames.COMPOSITION_YML); + when(compositionArtifact.getArtifactUUID()).thenReturn("compositionArtifactUuid"); + when(compositionArtifact.getPayloadData()).thenReturn("{\"flowType\":\"don't override\"}"); + listOfArtifactCompositionYml.add(compositionArtifact); + when(templateMC.getArtifacts()).thenReturn(listOfArtifactCompositionYml); + } + + private void emulateListOfArtifactsWithCompositionYmlAndSvcRef() { + List<Artifact> listOfArtifactCompositionYml = new ArrayList<>(); + Artifact compositionArtifact = Mockito.mock(Artifact.class); + Artifact svcRefArtifact = Mockito.mock(Artifact.class); + when(compositionArtifact.getArtifactName()).thenReturn(DcaeBeConstants.Composition.fileNames.COMPOSITION_YML); + when(compositionArtifact.getArtifactUUID()).thenReturn("compositionArtifactUuid"); + when(compositionArtifact.getPayloadData()).thenReturn("{\"flowType\":\"don't override\"}"); + when(svcRefArtifact.getArtifactName()).thenReturn(DcaeBeConstants.Composition.fileNames.SVC_REF); + when(svcRefArtifact.getArtifactUUID()).thenReturn("svcRefArtifactUuid"); + listOfArtifactCompositionYml.add(compositionArtifact); + listOfArtifactCompositionYml.add(svcRefArtifact); + when(templateMC.getArtifacts()).thenReturn(listOfArtifactCompositionYml); + } + + @Test + public void uiHasABug_creatingVfcmtWithBadRequestNoServiceUuid_gotResponseWithError400() throws Exception{ + RequestError requestError = new RequestError(); + requestError.setPolicyException(new PolicyException("POL5000", "Error: Internal Server Error. Please try again later.", null)); + when(sdcClientMock.createResource(userId,request,requestId)).thenThrow(new ASDCException(HttpStatus.INTERNAL_SERVER_ERROR, requestError)); + CreateVFCMTRequest req = new CreateVFCMTRequest(); + req.setServiceUuid(null); + ResponseEntity res = vfcmtBusinessLogic.createMcFromTemplate(userId,req,requestId); + verify(sdcClientMock,times(0)).getResource(anyString(),anyString()); + verify(sdcClientMock,times(0)).getResourceArtifact(anyString(),anyString(),anyString()); + Assert.assertEquals(400, res.getStatusCodeValue()); + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/java/org/onap/sdc/dcae/rule/editor/impl/RulesBusinessLogicTest.java b/dcaedt_be/src/test/java/org/onap/sdc/dcae/rule/editor/impl/RulesBusinessLogicTest.java new file mode 100644 index 0000000..d3ae600 --- /dev/null +++ b/dcaedt_be/src/test/java/org/onap/sdc/dcae/rule/editor/impl/RulesBusinessLogicTest.java @@ -0,0 +1,304 @@ +package org.onap.sdc.dcae.rule.editor.impl; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.junit.Before; +import org.junit.Test; +import org.mockito.InjectMocks; +import org.mockito.MockitoAnnotations; +import org.onap.sdc.dcae.composition.restmodels.ruleeditor.*; +import org.onap.sdc.dcae.errormng.ErrorConfigurationLoader; +import org.onap.sdc.dcae.errormng.ResponseFormatManager; +import org.onap.sdc.dcae.errormng.ServiceException; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.junit.Assert.*; + +public class RulesBusinessLogicTest { + private static Gson gson = new GsonBuilder() + .registerTypeAdapter(BaseAction.class, new ActionDeserializer()) + .registerTypeAdapter(BaseCondition.class, new ConditionDeserializer()).create(); + + @InjectMocks + private RulesBusinessLogic rulesBusinessLogic = new RulesBusinessLogic(); + private ResponseFormatManager responseFormatManager = null; + + @Before + public void setup(){ + MockitoAnnotations.initMocks(this); + ErrorConfigurationLoader errorConfigurationLoader = new ErrorConfigurationLoader(System.getProperty("user.dir")+"/src/main/webapp/WEB-INF"); + responseFormatManager = ResponseFormatManager.getInstance(); + } + + @Test + public void translateSingleRuleSingleCopyActionAddSnmpHeader() throws Exception { + String expectedTranslation = "{\"processing\":[{\"phase\":\"snmp_map\",\"processors\":[{\"array\":\"varbinds\",\"datacolumn\":\"varbind_value\",\"keycolumn\":\"varbind_oid\",\"class\":\"SnmpConvertor\"}," + + "{\"phase\":\"phase_1\",\"class\":\"RunPhase\"}]},{\"phase\":\"phase_1\",\"processors\":[{\"updates\":{\"event.commonEventHeader.version\":\"2.0\"},\"class\":\"Set\"}]}," + + "{\"phase\":\"phase_1\",\"processors\":[{\"phase\":\"map_publish\",\"class\":\"RunPhase\"}]}]}"; + + Rule rule = new Rule(); + rule.setActions(new ArrayList<>()); + rule.getActions().add(buildCopyAction("2.0","event.commonEventHeader.version")); + rule.setDescription("description"); + MappingRules mr = new MappingRules(rule); + List<ServiceException> errors = rulesBusinessLogic.validateRules(mr); + assertTrue(errors.isEmpty()); + assertEquals(expectedTranslation, rulesBusinessLogic.translateRules(mr, "snmp_map", "map_publish", "phase_1")); + } + + @Test + public void translateSingleRuleSingleDateFormatterActionSnmpFlow() throws Exception { + String expectedTranslation = "{\"processing\":[{\"phase\":\"snmp_map\",\"processors\":[{\"array\":\"varbinds\",\"datacolumn\":\"varbind_value\",\"keycolumn\":\"varbind_oid\",\"class\":\"SnmpConvertor\"}," + + "{\"phase\":\"phase_1\",\"class\":\"RunPhase\"}]},{\"phase\":\"phase_1\",\"processors\":[{\"fromFormat\":\"fromFormat\",\"fromTz\":\"fromTZ\",\"toField\":\"targetField\",\"toFormat\":\"toFormat\",\"toTz\":\"toTz\",\"value\":\"fromField\",\"class\":\"DateFormatter\"}]}," + + "{\"phase\":\"phase_1\",\"processors\":[{\"phase\":\"map_publish\",\"class\":\"RunPhase\"}]}]}"; + + Rule rule = new Rule(); + rule.setActions(new ArrayList<>()); + rule.getActions().add(buildDateFormatterAction()); + rule.setDescription("description"); + MappingRules mr = new MappingRules(rule); + List<ServiceException> errors = rulesBusinessLogic.validateRules(mr); + assertTrue(errors.isEmpty()); + assertEquals(expectedTranslation, rulesBusinessLogic.translateRules(mr, "snmp_map", "map_publish", "phase_1")); + } + + @Test + public void translateSingleRuleMultipleCopyActionsAddSnmpHeader() throws Exception { + String expectedTranslation = "{\"processing\":[{\"phase\":\"snmp_map\",\"processors\":[{\"array\":\"varbinds\",\"datacolumn\":\"varbind_value\",\"keycolumn\":\"varbind_oid\",\"class\":\"SnmpConvertor\"}," + + "{\"phase\":\"phase_1\",\"class\":\"RunPhase\"}]},{\"phase\":\"phase_1\"," + + "\"processors\":[{\"updates\":{\"event.commonEventHeader.version\":\"2.0\",\"event.commonEventHeader.eventId\":\"${event.commonEventHeader.sourceName}_${eventGroup}\"},\"class\":\"Set\"}," + + "{\"regex\":\"([^:]*):.*\",\"field\":\"targetField\",\"value\":\"extractFromHere\",\"class\":\"ExtractText\"}]},{\"phase\":\"phase_1\",\"processors\":[{\"phase\":\"map_publish\",\"class\":\"RunPhase\"}]}]}"; + + MappingRules mr = new MappingRules(buildRuleWithMultipleCopyActions()); + List<ServiceException> errors = rulesBusinessLogic.validateRules(mr); + assertTrue(errors.isEmpty()); + assertEquals(expectedTranslation, rulesBusinessLogic.translateRules(mr, "snmp_map", "map_publish", "phase_1")); + } + + @Test + public void translateMultipleRulesMultipleCopyActionsAddSnmpHeader() throws Exception { + String expectedTranslation = "{\"processing\":[{\"phase\":\"snmp_map\",\"processors\":[{\"array\":\"varbinds\",\"datacolumn\":\"varbind_value\",\"keycolumn\":\"varbind_oid\",\"class\":\"SnmpConvertor\"}," + + "{\"phase\":\"phase_1\",\"class\":\"RunPhase\"}]},{\"phase\":\"phase_1\"," + + "\"processors\":[{\"updates\":{\"event.commonEventHeader.version\":\"2.0\",\"event.commonEventHeader.eventId\":\"${event.commonEventHeader.sourceName}_${eventGroup}\"},\"class\":\"Set\"}," + + "{\"regex\":\"([^:]*):.*\",\"field\":\"targetField\",\"value\":\"extractFromHere\",\"class\":\"ExtractText\"}]},{\"phase\":\"phase_1\"," + + "\"processors\":[{\"updates\":{\"event.commonEventHeader.version\":\"2.0\",\"event.commonEventHeader.eventId\":\"${event.commonEventHeader.sourceName}_${eventGroup}\"},\"class\":\"Set\"}," + + "{\"regex\":\"([^:]*):.*\",\"field\":\"targetField\",\"value\":\"extractFromHere\",\"class\":\"ExtractText\"}]},{\"phase\":\"phase_1\",\"processors\":[{\"phase\":\"map_publish\",\"class\":\"RunPhase\"}]}]}"; + + MappingRules mr = new MappingRules(buildRuleWithMultipleCopyActions()); + mr.addOrReplaceRule(buildRuleWithMultipleCopyActions()); + assertEquals(expectedTranslation, rulesBusinessLogic.translateRules(mr, "snmp_map", "map_publish", "phase_1")); + } + + @Test + public void emptyStringTest() throws Exception { + String expectedTranslation = "{\"processing\":[{\"phase\":\"snmp_map\",\"processors\":[{\"array\":\"varbinds\",\"datacolumn\":\"varbind_value\",\"keycolumn\":\"varbind_oid\",\"class\":\"SnmpConvertor\"}," + + "{\"phase\":\"phase_1\",\"class\":\"RunPhase\"}]},{\"phase\":\"phase_1\",\"processors\":[{\"map\":{\"\":\"\"},\"field\":\"\",\"toField\":\"mapTargetField\",\"default\":\"\",\"class\":\"MapAlarmValues\"}]}," + + "{\"phase\":\"phase_1\",\"processors\":[{\"phase\":\"map_publish\",\"class\":\"RunPhase\"}]}]}"; + String ruleRequestBody = "{version:4.1,eventType:syslogFields,description:description,actions:[{actionType:map,from:{value:'\"\"'},target:mapTargetField,map:{values:[{key:'\"\"',value:'\"\"'}],haveDefault:true,default:'\"\"'}}]}"; + Rule myRule = gson.fromJson(ruleRequestBody, Rule.class); + MappingRules mr = new MappingRules(myRule); + List<ServiceException> errors = rulesBusinessLogic.validateRules(mr); + assertTrue(errors.isEmpty()); + assertEquals(expectedTranslation, rulesBusinessLogic.translateRules(mr, "snmp_map", "map_publish", "phase_1")); + } + + @Test + public void singleStringConditionTranslationTest() throws Exception { + String expectedTranslation = "{\"processing\":[{\"phase\":\"syslog_map\",\"processors\":[{\"phase\":\"phase_1\",\"class\":\"RunPhase\"}]},{\"phase\":\"phase_1\",\"filter\":{\"string\":\"left\",\"value\":\"right\",\"class\":\"Contains\"}," + + "\"processors\":[{\"updates\":{\"event.commonEventHeader.version\":\"2.0\",\"event.commonEventHeader.eventId\":\"${event.commonEventHeader.sourceName}_${eventGroup}\"},\"class\":\"Set\"}," + + "{\"regex\":\"([^:]*):.*\",\"field\":\"targetField\",\"value\":\"extractFromHere\",\"class\":\"ExtractText\"}]},{\"phase\":\"phase_1\",\"processors\":[{\"phase\":\"map_publish\",\"class\":\"RunPhase\"}]}]}"; + String input = "{operator:contains,left:left,right:[right]}"; + Rule rule = buildRuleWithMultipleCopyActions(); + rule.setCondition(gson.fromJson(input, BaseCondition.class)); + MappingRules mr = new MappingRules(rule); + assertEquals(expectedTranslation, rulesBusinessLogic.translateRules(mr, "syslog_map", "map_publish", "phase_1")); + } + + @Test + public void multiStringConditionTranslationTest() throws Exception { + String expectedTranslation = "{\"processing\":[{\"phase\":\"foi_map\",\"processors\":[{\"phase\":\"phase_1\",\"class\":\"RunPhase\"}]}," + + "{\"phase\":\"phase_1\",\"filter\":{\"filters\":[{\"string\":\"left\",\"value\":\"right1\",\"class\":\"Contains\"},{\"string\":\"left\",\"value\":\"right2\",\"class\":\"Contains\"}],\"class\":\"Or\"}," + + "\"processors\":[{\"updates\":{\"event.commonEventHeader.version\":\"2.0\",\"event.commonEventHeader.eventId\":\"${event.commonEventHeader.sourceName}_${eventGroup}\"},\"class\":\"Set\"}," + + "{\"regex\":\"([^:]*):.*\",\"field\":\"targetField\",\"value\":\"extractFromHere\",\"class\":\"ExtractText\"}]},{\"phase\":\"phase_1\",\"processors\":[{\"phase\":\"map_publish\",\"class\":\"RunPhase\"}]}]}"; + String input = "{operator:contains,left:left,right:[right1, right2]}"; + Rule rule = buildRuleWithMultipleCopyActions(); + rule.setCondition(gson.fromJson(input, BaseCondition.class)); + MappingRules mr = new MappingRules(rule); + assertEquals(expectedTranslation, rulesBusinessLogic.translateRules(mr, "foi_map", "map_publish", "phase_1")); + } + + @Test + public void singleFieldConditionTranslationTest() throws Exception { + String expectedTranslation = "{\"processing\":[{\"phase\":\"snmp_map\",\"processors\":[{\"array\":\"varbinds\",\"datacolumn\":\"varbind_value\",\"keycolumn\":\"varbind_oid\",\"class\":\"SnmpConvertor\"}," + + "{\"phase\":\"phase_1\",\"class\":\"RunPhase\"}]},{\"phase\":\"phase_1\",\"filter\":{\"field\":\"left\",\"value\":\"right\",\"class\":\"Equals\"}," + + "\"processors\":[{\"updates\":{\"event.commonEventHeader.version\":\"2.0\",\"event.commonEventHeader.eventId\":\"${event.commonEventHeader.sourceName}_${eventGroup}\"},\"class\":\"Set\"}," + + "{\"regex\":\"([^:]*):.*\",\"field\":\"targetField\",\"value\":\"extractFromHere\",\"class\":\"ExtractText\"}]},{\"phase\":\"phase_1\",\"processors\":[{\"phase\":\"map_publish\",\"class\":\"RunPhase\"}]}]}"; + String input = "{operator:equals,left:left,right:[right]}"; + Rule rule = buildRuleWithMultipleCopyActions(); + rule.setCondition(gson.fromJson(input, BaseCondition.class)); + MappingRules mr = new MappingRules(rule); + assertEquals(expectedTranslation, rulesBusinessLogic.translateRules(mr, "snmp_map", "map_publish", "phase_1")); + } + + @Test + public void multiFieldConditionTranslationTest() throws Exception { + String expectedTranslation = "{\"processing\":[{\"phase\":\"snmp_map\",\"processors\":[{\"array\":\"varbinds\",\"datacolumn\":\"varbind_value\",\"keycolumn\":\"varbind_oid\",\"class\":\"SnmpConvertor\"}," + + "{\"phase\":\"phase_1\",\"class\":\"RunPhase\"}]},{\"phase\":\"phase_1\",\"filter\":{\"field\":\"left\",\"values\":[\"right1\",\"right2\"],\"class\":\"NotOneOf\"}," + + "\"processors\":[{\"updates\":{\"event.commonEventHeader.version\":\"2.0\",\"event.commonEventHeader.eventId\":\"${event.commonEventHeader.sourceName}_${eventGroup}\"},\"class\":\"Set\"}," + + "{\"regex\":\"([^:]*):.*\",\"field\":\"targetField\",\"value\":\"extractFromHere\",\"class\":\"ExtractText\"}]},{\"phase\":\"phase_1\",\"processors\":[{\"phase\":\"map_publish\",\"class\":\"RunPhase\"}]}]}"; + String input = "{operator:notequal,left:left,right:[right1,right2]}"; + Rule rule = buildRuleWithMultipleCopyActions(); + rule.setCondition(gson.fromJson(input, BaseCondition.class)); + MappingRules mr = new MappingRules(rule); + assertEquals(expectedTranslation, rulesBusinessLogic.translateRules(mr, "snmp_map", "map_publish", "phase_1")); + } + + @Test + public void reorderRuleActionsDuringValidationSuccessTest() { + Rule rule1 = buildValidRuleWithDependentActions(); + Rule rule2 = buildValidRuleWithDependentActions(); + assertEquals(rule1, rule2); + List<ServiceException> errors = rulesBusinessLogic.validateRule(rule1); + assertTrue(errors.isEmpty()); + assertNotEquals(rule1, rule2); + //after validation actions are reordered: 1, 3, 4, 2, 5 + rule2.getActions().add(1, rule2.getActions().get(2)); // 1, 2, 3, 4, 5 -> 1, 3, 2, 3, 4, 5 + rule2.getActions().remove(3); // 1, 3, 2, 3, 4, 5 -> 1, 3, 2, 4, 5 + rule2.getActions().add(2, rule2.getActions().get(3)); // 1, 3, 2, 4, 5 -> 1, 3, 4, 2, 4, 5 + rule2.getActions().remove(4); // 1, 3, 4, 2, 4, 5 -> 1, 3, 4, 2, 5 + assertEquals(rule1, rule2); + } + + @Test + public void reorderRuleActionsDuringValidationFailureTest() { + String expectedError = "A circular dependency was detected between actions. The following fields should be resolved: event.commonEventHeader.eventId, event.commonEventHeader.sourceName, invalidSelfDependency, circularDependencyTarget_3"; + Rule rule1 = buildRuleWithCircularActionDependencies(); + List<ServiceException> errors = rulesBusinessLogic.validateRule(rule1); + assertEquals(expectedError, errors.get(0).getFormattedErrorMessage()); + } + + + @Test + public void reorderMappingRulesByDependencySuccessTest() { + MappingRules mr = new MappingRules(buildRuleWithMultipleCopyActions()); + Rule rule = new Rule(); + rule.setDescription("description"); + rule.setActions(new ArrayList<>()); + // create a dependency between rules + rule.getActions().add(buildCopyAction("${event.commonEventHeader.someField}","event.commonEventHeader.sourceName")); + mr.addOrReplaceRule(rule); + List<String> ruleUids = new ArrayList<>(mr.getRules().keySet()); + String translateBefore = rulesBusinessLogic.translateRules(mr,"snmp_map", "map_publish", "phase_1"); + List<ServiceException> errors = rulesBusinessLogic.validateRules(mr); + assertTrue(errors.isEmpty()); + List<String> ruleUidsMod = new ArrayList<>(mr.getRules().keySet()); + assertEquals(ruleUids.get(0), ruleUidsMod.get(1)); + assertEquals(ruleUids.get(1), ruleUidsMod.get(0)); + assertNotEquals(translateBefore, rulesBusinessLogic.translateRules(mr,"snmp_map", "map_publish", "phase_1")); + } + + @Test + public void reorderMappingRulesCircularDependencyFailureTest() { + + MappingRules mr = new MappingRules(buildRuleWithMultipleCopyActions()); + List<ServiceException> errors = rulesBusinessLogic.validateRules(mr); + assertTrue(errors.isEmpty()); + Rule rule = new Rule(); + rule.setDescription("description"); + rule.setActions(new ArrayList<>()); + // create a circular dependency between rules + rule.getActions().add(buildCopyAction("${event.commonEventHeader.version}","event.commonEventHeader.sourceName")); + String input = "{operator:equals,left:\"${event.commonEventHeader.version}\",right:[\"${event.commonEventHeader.eventId}\"]}"; + rule.setCondition(gson.fromJson(input, BaseCondition.class)); + assertTrue(rulesBusinessLogic.addOrEditRule(mr, rule)); + errors = rulesBusinessLogic.validateRules(mr); + assertFalse(errors.isEmpty()); + String expectedError = String.format("A circular dependency was detected between rules: %s, %s within fields: event.commonEventHeader.sourceName, event.commonEventHeader.version, event.commonEventHeader.eventId", mr.getRules().keySet().toArray()); + assertEquals(expectedError, errors.get(0).getFormattedErrorMessage()); + } + + + @Test + public void translateNestedComplexConditionSuccessTest() { + String expectedTranslation = "{\"processing\":[{\"phase\":\"foi_map\",\"processors\":[{\"phase\":\"phase_1\",\"class\":\"RunPhase\"}]}," + + "{\"phase\":\"phase_1\",\"filter\":{\"filters\":[{\"field\":\"${event.commonEventHeader.version}\",\"value\":\"${event.commonEventHeader.eventId}\",\"class\":\"Equals\"}," + + "{\"filters\":[{\"field\":\"left\",\"value\":\"right\",\"class\":\"NotEqual\"},{\"string\":\"${XXX}\",\"value\":\"right1\",\"class\":\"Contains\"}," + + "{\"string\":\"${XXX}\",\"value\":\"right2\",\"class\":\"Contains\"}],\"class\":\"Or\"}],\"class\":\"And\"}," + + "\"processors\":[{\"updates\":{\"event.commonEventHeader.version\":\"2.0\"},\"class\":\"Set\"}]},{\"phase\":\"phase_1\",\"processors\":[{\"phase\":\"map_publish\",\"class\":\"RunPhase\"}]}]}"; + + Rule rule = new Rule(); + rule.setActions(new ArrayList<>()); + rule.getActions().add(buildCopyAction("2.0","event.commonEventHeader.version")); + rule.setDescription("description"); + String condition = "{type:All,children:[{operator:equals,left:\"${event.commonEventHeader.version}\",right:[\"${event.commonEventHeader.eventId}\"]}," + + "{type:Any,children:[{operator:contains,left:\"${XXX}\",right:[right1,right2]},{operator:notEqual,left:left,right:[right]}]}]}"; + rule.setCondition(gson.fromJson(condition, BaseCondition.class)); + List<ServiceException> errors = rulesBusinessLogic.validateRule(rule); + assertTrue(errors.isEmpty()); + assertEquals(expectedTranslation, rulesBusinessLogic.translateRules(new MappingRules(rule),"foi_map", "map_publish", "phase_1")); + } + + private Rule buildRuleWithMultipleCopyActions() { + Rule rule = new Rule(); + rule.setDescription("description"); + List<BaseAction> actions = new ArrayList<>(); + actions.add(buildCopyAction("2.0","event.commonEventHeader.version")); + actions.add(buildConcatAction(Arrays.asList("${event.commonEventHeader.sourceName}","_","${eventGroup}"), "event.commonEventHeader.eventId")); + actions.add(buildRegexAction("extractFromHere", "targetField", "([^:]*):.*")); + rule.setActions(actions); + return rule; + } + + private Rule buildValidRuleWithDependentActions() { + Rule rule = buildRuleWithMultipleCopyActions(); + rule.getActions().add(buildConcatAction(Arrays.asList("${targetField}","_","${circularDependencyTarget_3}"), "event.commonEventHeader.sourceName")); + rule.getActions().add(buildConcatAction(Arrays.asList("${validSelfDependency}","_","${event.commonEventHeader.version}"), "validSelfDependency")); + return rule; + } + + private Rule buildRuleWithCircularActionDependencies() { + Rule rule = buildValidRuleWithDependentActions(); + rule.getActions().add(buildCopyAction("${invalidSelfDependency}", "invalidSelfDependency")); + rule.getActions().add(buildCopyAction("${event.commonEventHeader.eventId}", "circularDependencyTarget_3")); + return rule; + } + + private BaseAction buildCopyAction(String from, String to) { + BaseAction action = new BaseAction(); + action.setActionType("copy"); + action.setFrom(from); + action.setTarget(to); + return action; + } + + private BaseAction buildConcatAction(List<String> from, String to) { + BaseAction action = new BaseAction(); + action.setActionType("concat"); + action.setFrom(from); + action.setTarget(to); + return action; + } + + private BaseAction buildRegexAction(String from, String to, String regex) { + BaseAction action = new BaseAction(); + action.setActionType("copy"); + action.setFrom(from, regex); + action.setTarget(to); + return action; + } + + private DateFormatterAction buildDateFormatterAction() { + DateFormatterAction action = new DateFormatterAction(); + action.setActionType("date formatter"); + action.setFrom("fromField"); + action.setTarget("targetField"); + action.setFromFormat("fromFormat"); + action.setToFormat("toFormat"); + action.setFromTz("fromTZ"); + action.setToTz("toTz"); + return action; + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/java/org/onap/sdc/dcae/services/GetServicesTest.java b/dcaedt_be/src/test/java/org/onap/sdc/dcae/services/GetServicesTest.java new file mode 100644 index 0000000..68f055f --- /dev/null +++ b/dcaedt_be/src/test/java/org/onap/sdc/dcae/services/GetServicesTest.java @@ -0,0 +1,143 @@ +package org.onap.sdc.dcae.services; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.onap.sdc.dcae.composition.restmodels.DcaeMinimizedService; +import org.onap.sdc.dcae.composition.controller.ServicesController; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants.LifecycleStateEnum; +import org.testng.annotations.Test; + +public class GetServicesTest { + + + @Test + public void parseAndFliterServicesByUser_nullServices_TBD() { +// fail("TODO Auto-generated method stub"); + } + + + @Test + public void parseAndFliterServicesByUser_emptyList_emptyList() { + // arrange + ServicesController target = new ServicesController(); + String user_id = "test"; + String lastUpdaterUserId = "test"; + List<LinkedHashMap<String, String>> services = new ArrayList<LinkedHashMap<String, String>>(); + // act + List<DcaeMinimizedService> result = target.parseAndFilterServicesByUser(lastUpdaterUserId, services, user_id); + // assert + assertThat(result).isEqualTo(new ArrayList<DcaeMinimizedService>()); + } + + + @Test + public void parseAndFliterServicesByUser_singleServicesAsMap_singleServiceParsed() { + // arrange + String user_id = "test"; + String lastUpdaterUserId = user_id; + String uuid = "a"; + String invariantUUID = "1"; + String lifecycleState = LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT.name(); + String version = "0.1"; + String serviceName = "TestService"; + + ServicesController target = new ServicesController(); + LinkedHashMap<String, String> service = createServiceAsMap(lastUpdaterUserId, uuid, invariantUUID, + lifecycleState, version, serviceName); + List<LinkedHashMap<String, String>> services = new ArrayList<LinkedHashMap<String, String>>( + Arrays.asList(service)); + + DcaeMinimizedService expected = new DcaeMinimizedService(uuid, serviceName, lastUpdaterUserId, lifecycleState, + version, invariantUUID); + // act + List<DcaeMinimizedService> result = target.parseAndFilterServicesByUser(lastUpdaterUserId, services, user_id); + // assert + assertThat(result).usingRecursiveFieldByFieldElementComparator().contains(expected); + } + + + @Test + public void parseAndFliterServicesByUser_unsortedServices_sortedServices() { + // arrange + String user_id = "test"; + String lastUpdaterUserId = user_id; + String uuid = "a"; + String lifecycleState = LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT.name(); + String version = "0.1"; + + List<LinkedHashMap<String, String>> unsortedServices = Arrays.asList("d", "a", "c", "b").stream() + .map(x -> createServiceAsMap(lastUpdaterUserId, uuid, UUID.randomUUID().toString(), lifecycleState, version, x)) + .collect(Collectors.toList()); + + ServicesController target = new ServicesController(); + + // act + List<DcaeMinimizedService> result = target.parseAndFilterServicesByUser(lastUpdaterUserId, unsortedServices, + user_id); + // assert + assertThat(result).extracting("name").containsExactly("a","b","c","d"); + } + + + @Test + public void parseAndFliterServicesByUser_allOptionsForLastUpdaterAndIsCheckout_allOptionsButIsCheckoutAndNotLastUpdater() { + // ------------user == last_updater + // -----------------True----False-- + // isCheckout---------------------- + // --------True------V--------X---- + // --------False-----V--------V---- + // -------------------------------- +// fail("TODO Auto-generated method stub"); + } + + + @Test + public void parseAndFliterServicesByUser_singleServiceWithMultiVersions_singleServiceWithLatestVersion() { + // arrange + String user_id = "test"; + String lastUpdaterUserId = user_id; + String uuid = "a"; + String invariantUUID = "1"; + String lifecycleState = LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT.name(); + String serviceName = "TestService"; + + List<LinkedHashMap<String, String>> singleServiceWithMultiVersions = Arrays.asList("1.0", "0.3", "11.0", "2.0", "1.8").stream() + .map(x -> createServiceAsMap(lastUpdaterUserId, uuid, invariantUUID, lifecycleState, x, serviceName)) + .collect(Collectors.toList()); + + ServicesController target = new ServicesController(); + + // act + List<DcaeMinimizedService> result = target.parseAndFilterServicesByUser(lastUpdaterUserId, singleServiceWithMultiVersions, user_id); + + // assert + assertThat(result).extracting("version").containsExactly("11.0"); + } + + + private static LinkedHashMap<String, String> createServiceAsMap(String lastUpdaterUserId, String uuid, + String invariantUUID, String lifecycleState, String version, String serviceName) { + + LinkedHashMap<String, String> service = new LinkedHashMap<String, String>() { + { + put("invariantUUID", invariantUUID); + put("uuid", uuid); + put("name", serviceName); + put("lastUpdaterUserId", lastUpdaterUserId); + put("lifecycleState", lifecycleState); + put("version", version); + } + }; + + return service; + } + +} diff --git a/dcaedt_be/src/test/java/org/onap/sdc/dcae/ves/EventListenerDefinitionTest.java b/dcaedt_be/src/test/java/org/onap/sdc/dcae/ves/EventListenerDefinitionTest.java new file mode 100644 index 0000000..3e9edf5 --- /dev/null +++ b/dcaedt_be/src/test/java/org/onap/sdc/dcae/ves/EventListenerDefinitionTest.java @@ -0,0 +1,61 @@ +package org.onap.sdc.dcae.ves; + +import org.apache.commons.lang.StringUtils; +import org.junit.Test; +import org.onap.sdc.dcae.VesStructureLoaderMock; + + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class EventListenerDefinitionTest { + VesStructureLoaderMock loader = new VesStructureLoaderMock(false); + + @Test + public void resolveRefTypesSimpleTest() throws Exception { + EventListenerDefinition eventListenerDefinition = loader.getEventListeners().get("4.1"); + assertTrue(eventListenerDefinition.propertiesContainReference()); + eventListenerDefinition.resolveRefTypes(); + assertFalse(eventListenerDefinition.propertiesContainReference()); + } + + @Test + public void resolveRefTypesSimpleUnresolvableTest() throws Exception { + EventListenerDefinition eventListenerDefinition = loader.getEventListeners().get("Unresolvable"); + assertTrue(eventListenerDefinition.propertiesContainReference()); + String resolverError = eventListenerDefinition.resolveRefTypes(); + assertTrue(eventListenerDefinition.propertiesContainReference()); + assertEquals("the following definitions containing unresolvable references: [\"otherFields\",\"stateChangeFields\",\"syslogFields\",\"thresholdCrossingAlertFields\"]",resolverError); + } + + @Test + public void validateSuccessTest() throws Exception { + EventListenerDefinition eventListenerDefinition = loader.getEventListeners().get("4.1"); + assertTrue(StringUtils.isBlank(eventListenerDefinition.validate())); + } + + @Test + public void validateTypesFailureTest() throws Exception { + EventListenerDefinition eventListenerDefinition = loader.getEventListeners().get("InvalidType"); + String error = eventListenerDefinition.validate(); + assertEquals("invalid type declaration: invalid", error); + } + + @Test + public void validateRequiredFailureTest() throws Exception { + EventListenerDefinition eventListenerDefinition = loader.getEventListeners().get("InvalidRequiredEntry"); + String error = eventListenerDefinition.validate(); + assertEquals("invalid required entry: codecIdentifier(invalid)", error); + } + + @Test + public void validateEventPropertyFailureTest() throws Exception { + EventListenerDefinition eventListenerDefinition = loader.getEventListeners().get("NoEventProperty"); + String error = eventListenerDefinition.validate(); + assertEquals("schema not containing property: event", error); + } + + + +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/java/org/onap/sdc/dcae/ves/VesStructureLoaderTest.java b/dcaedt_be/src/test/java/org/onap/sdc/dcae/ves/VesStructureLoaderTest.java new file mode 100644 index 0000000..3172eec --- /dev/null +++ b/dcaedt_be/src/test/java/org/onap/sdc/dcae/ves/VesStructureLoaderTest.java @@ -0,0 +1,129 @@ +package org.onap.sdc.dcae.ves; + +import org.junit.Test; +import org.onap.sdc.dcae.VesStructureLoaderMock; + +import java.util.*; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class VesStructureLoaderTest { + + // the file names of test schema files + private final String UNRESOLVABLE_REFERENCES = "CommonEventFormat_vUnresolvable.json"; + private final String VALID_VERSION_4_1 = "CommonEventFormat_v4.1.json"; + private final String VALID_VERSION_5_3 = "CommonEventFormat_v5.3.json"; + private final String INVALID_JSON = "CommonEventFormat_vInvalidJson.json"; + private final String UNSUPPORTED_FILENAME = "unsupportedFilename.json"; + private final String INVALID_SCHEMA_STRUCTURE = "CommonEventFormat_vInvalidSchemaStructure.json"; + private final String INVALID_TYPE = "CommonEventFormat_vInvalidType.json"; + private final String INVALID_REQUIRED_ENTRY = "CommonEventFormat_vInvalidRequiredEntry.json"; + private final String NO_EVENT_PROPERTY = "CommonEventFormat_vNoEventProperty.json"; + private final String NO_COMMON_EVENT_HEADER = "CommonEventFormat_v4.1WithoutCommonEventHeader.json"; + + // schema directory test paths + private final String EMPTY_SCHEMA_DIR = System.getProperty("user.dir") + "/src/test/resources/ves-schema/empty"; + private final String NONE_EXISTING_DIR = EMPTY_SCHEMA_DIR + "/null"; + + private final String ERROR_TEXT = "Error: parsing VES schema file "; + + // files loaded from default path, only valid files are kept, errors logged for invalid files (initError); + @Test + public void defaultInit() { + VesStructureLoaderMock loader = new VesStructureLoaderMock(); + Set<String> expectedAvailableVersions = new HashSet<>(); + expectedAvailableVersions.add(loader.getVersionFromFileName(VALID_VERSION_4_1)); + expectedAvailableVersions.add(loader.getVersionFromFileName(VALID_VERSION_5_3)); + expectedAvailableVersions.add(loader.getVersionFromFileName(NO_COMMON_EVENT_HEADER)); + assertEquals(expectedAvailableVersions, loader.getAvailableVersionsList()); + List<String> expectedLoggedErrors = Arrays + .asList(getExpectedInvalidJsonError(), getExpectedInvalidRequiredEntryError(), getExpectedInvalidStructureError(), getExpectedInvalidTypeError(), getExpectedNoEventDefinitionError(), getExpectedUnresolvableError()); + assertTrue(loader.getInitErrors().containsAll(expectedLoggedErrors)); + assertEquals(expectedLoggedErrors.size(), loader.getInitErrors().size()); + } + + @Test + public void initWithEmptyDir() { + VesStructureLoaderMock loader = new VesStructureLoaderMock(true, EMPTY_SCHEMA_DIR); + assertTrue(loader.getAvailableVersionsList().isEmpty()); + assertEquals("No VES schema files found", loader.getInitErrors().get(0)); + } + + @Test + public void initWithNoneExistingDir() { + VesStructureLoaderMock loader = new VesStructureLoaderMock(true, NONE_EXISTING_DIR); + assertTrue(loader.getAvailableVersionsList().isEmpty()); + assertEquals("No VES schema files found", loader.getInitErrors().get(0)); + } + + @Test + public void complexDataTypeLoaderOutputTest() { + VesStructureLoaderMock loader = new VesStructureLoaderMock(); + VesDataTypeDefinition loaded = loader.getEventListenerDefinitionByVersion("5.3").get("stateChangeFields"); + assertEquals(buildStateChangeFieldsDefinition(), loaded); + } + + private String getExpectedInvalidJsonError() { + return ERROR_TEXT + INVALID_JSON + " failed due to java.lang.IllegalStateException: Expected BEGIN_OBJECT but was STRING at path $"; + } + + private String getExpectedUnresolvableError() { + return ERROR_TEXT + UNRESOLVABLE_REFERENCES + " failed due to the following definitions containing unresolvable references: [\"otherFields\",\"stateChangeFields\",\"syslogFields\",\"thresholdCrossingAlertFields\"]"; + } + + private String getExpectedInvalidStructureError() { + return ERROR_TEXT + INVALID_SCHEMA_STRUCTURE + " failed due to java.lang.IllegalStateException: Expected BEGIN_ARRAY but was BEGIN_OBJECT at line 8 column 20 path $.definitions..properties[0]"; + } + + private String getExpectedInvalidTypeError() { + return ERROR_TEXT + INVALID_TYPE + " failed due to invalid type declaration: invalid"; + } + + private String getExpectedInvalidRequiredEntryError() { + return ERROR_TEXT + INVALID_REQUIRED_ENTRY + " failed due to invalid required entry: codecIdentifier(invalid)"; + } + + private String getExpectedNoEventDefinitionError() { + return ERROR_TEXT + NO_EVENT_PROPERTY + " failed due to schema not containing property: event"; + } + + private VesDataTypeDefinition buildFieldDefinition() { + Map<String, VesDataTypeDefinition> propsMap = new HashMap<>(); + VesDataTypeDefinition prop = buildVesDataType(null, VesSimpleTypesEnum.STRING.getType(), new ArrayList<>(), null, null); + propsMap.put("name", prop); + propsMap.put("value", prop); + return buildVesDataType("name value pair", VesSimpleTypesEnum.OBJECT.getType(), Arrays.asList("name", "value"), propsMap, null); + } + + private VesDataTypeDefinition buildStateChangeFieldsDefinition() { + + VesDataItemsDefinition items = new VesDataItemsDefinition(); + items.add(buildFieldDefinition()); + VesDataTypeDefinition prop = buildVesDataType("additional stateChange fields if needed", VesSimpleTypesEnum.ARRAY.getType(), new ArrayList<>(), null, null); + prop.setItems(items); + Map<String, VesDataTypeDefinition> propsMap = new HashMap<>(); + propsMap.put("additionalFields", prop); + prop = buildVesDataType("new state of the entity", VesSimpleTypesEnum.STRING.getType(), new ArrayList<>(), null, Arrays.asList("inService", "maintenance", "outOfService")); + propsMap.put("newState", prop); + prop = buildVesDataType("previous state of the entity", VesSimpleTypesEnum.STRING.getType(), new ArrayList<>(), null, Arrays.asList("inService", "maintenance", "outOfService")); + propsMap.put("oldState", prop); + prop = buildVesDataType("version of the stateChangeFields block", VesSimpleTypesEnum.NUMBER.getType(), new ArrayList<>(), null, null); + propsMap.put("stateChangeFieldsVersion", prop); + prop = buildVesDataType("card or port name of the entity that changed state", VesSimpleTypesEnum.STRING.getType(), new ArrayList<>(), null, null); + propsMap.put("stateInterface", prop); + VesDataTypeDefinition def = buildVesDataType("stateChange fields", VesSimpleTypesEnum.OBJECT.getType(), Arrays.asList("newState", "oldState", "stateChangeFieldsVersion", "stateInterface"), propsMap, null); + return def; + } + + private VesDataTypeDefinition buildVesDataType(String description, String type, List<String> required, Map<String, VesDataTypeDefinition> properties, List<String> enums) { + VesDataTypeDefinition def = new VesDataTypeDefinition(); + def.setDescription(description); + def.setType(type); + def.setRequired(required); + def.setEnums(enums); + def.setProperties(properties); + return def; + } + +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_v4.1.json b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_v4.1.json new file mode 100644 index 0000000..2f86c38 --- /dev/null +++ b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_v4.1.json @@ -0,0 +1,1165 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "definitions": { + "attCopyrightNotice": { + "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", + "type": "object", + "properties": { + "useAndRedistribution": { + "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", + "type": "string" + }, + "condition1": { + "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", + "type": "string" + }, + "condition2": { + "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", + "type": "string" + }, + "condition3": { + "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", + "type": "string" + }, + "condition4": { + "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", + "type": "string" + }, + "disclaimerLine1": { + "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", + "type": "string" + }, + "disclaimerLine2": { + "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", + "type": "string" + }, + "disclaimerLine3": { + "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", + "type": "string" + }, + "disclaimerLine4": { + "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + "type": "string" + } + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { "type": "string" }, + "numberInUse": { "type": "number" } + }, + "required": [ "codecIdentifier", "numberInUse" ] + }, + "command": { + "description": "command from an event collector toward an event source", + "type": "object", + "properties": { + "commandType": { + "type": "string", + "enum": [ + "heartbeatIntervalChange", + "measurementIntervalChange", + "provideThrottlingState", + "throttlingSpecification" + ] + }, + "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" }, + "measurementInterval": { "type": "number" } + }, + "required": [ "commandType" ] + }, + "commandList": { + "description": "array of commands from an event collector toward an event source", + "type": "array", + "items": { + "$ref": "#/definitions/commandListEntry" + }, + "minItems": 0 + }, + "commandListEntry": { + "description": "reference to a command object", + "type": "object", + "properties": { + "command": {"$ref": "#/definitions/command"} + }, + "required": [ "command" ] + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": [ + "fault", + "heartbeat", + "measurementsForVfScaling", + "mobileFlow", + "other", + "stateChange", + "syslog", + "thresholdCrossingAlert" + ] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventType": { + "description": "unique event topic name", + "type": "string" + }, + "functionalRole": { + "description": "function of the event source e.g., eNodeB, MME, PCRF", + "type": "string" + }, + "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": [ + "High", + "Medium", + "Normal", + "Low" + ] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an OAM VM", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "version": { + "description": "version of the event header", + "type": "number" + } + }, + "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec", + "priority", "reportingEntityName", "sequence", + "sourceName", "startEpochMicrosec" ] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, + "name": { "type": "string" }, + "thresholdCrossed": { "type": "string" }, + "value": { "type": "string"} + }, + "required": [ "criticality", "name", "thresholdCrossed", "value" ] + }, + "cpuUsage": { + "description": "percent usage of an identified CPU", + "type": "object", + "properties": { + "cpuIdentifier": { "type": "string" }, + "percentUsage": { "type": "number" } + }, + "required": [ "cpuIdentifier", "percentUsage" ] + }, + "errors": { + "description": "receive and transmit errors for the measurements domain", + "type": "object", + "properties": { + "receiveDiscards": { "type": "number" }, + "receiveErrors": { "type": "number" }, + "transmitDiscards": { "type": "number" }, + "transmitErrors": { "type": "number" } + }, + "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" }, + "faultFields": { "$ref": "#/definitions/faultFields" }, + "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" }, + "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, + "otherFields": { "$ref": "#/definitions/otherFields" }, + "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, + "syslogFields": { "$ref": "#/definitions/syslogFields" }, + "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" } + }, + "required": [ "commonEventHeader" ] + }, + "eventDomainThrottleSpecification": { + "description": "specification of what information to suppress within an event domain", + "type": "object", + "properties": { + "eventDomain": { + "description": "Event domain enum from the commonEventHeader domain field", + "type": "string" + }, + "suppressedFieldNames": { + "description": "List of optional field names in the event block that should not be sent to the Event Listener", + "type": "array", + "items": { + "type": "string" + } + }, + "suppressedNvPairsList": { + "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", + "type": "array", + "items": { + "$ref": "#/definitions/suppressedNvPairs" + } + } + }, + "required": [ "eventDomain" ] + }, + "eventDomainThrottleSpecificationList": { + "description": "array of eventDomainThrottleSpecifications", + "type": "array", + "items": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "minItems": 0 + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "eventThrottlingState": { + "description": "reports the throttling in force at the event source", + "type": "object", + "properties": { + "eventThrottlingMode": { + "description": "Mode the event manager is in", + "type": "string", + "enum": [ + "normal", + "throttled" + ] + }, + "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" } + }, + "required": [ "eventThrottlingMode" ] + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "description": "additional alarm information", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "number" + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "required": [ "alarmCondition", "eventSeverity", + "eventSourceType", "specificProblem", "vfStatus" ] + }, + "featuresInUse": { + "description": "number of times an identified feature was used over the measurementInterval", + "type": "object", + "properties": { + "featureIdentifier": { "type": "string" }, + "featureUtilization": { "type": "number" } + }, + "required": [ "featureIdentifier", "featureUtilization" ] + }, + "field": { + "description": "name value pair", + "type": "object", + "properties": { + "name": { "type": "string" }, + "value": { "type": "string" } + }, + "required": [ "name", "value" ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { "type": "number" }, + "blockIops": { "type": "number" }, + "blockUsed": { "type": "number" }, + "ephemeralConfigured": { "type": "number" }, + "ephemeralIops": { "type": "number" }, + "ephemeralUsed": { "type": "number" }, + "filesystemName": { "type": "string" } + }, + "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", + "ephemeralIops", "ephemeralUsed", "filesystemName" ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", + "avgReceiveThroughput", "avgTransmitThroughput", + "flowActivationEpoch", "flowActivationMicrosec", + "flowDeactivationEpoch", "flowDeactivationMicrosec", + "flowDeactivationTime", "flowStatus", + "maxPacketDelayVariation", "numActivationFailures", + "numBitErrors", "numBytesReceived", "numBytesTransmitted", + "numDroppedPackets", "numL7BytesReceived", + "numL7BytesTransmitted", "numLostPackets", + "numOutOfOrderPackets", "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", "numTimeouts", "numTunneledL7BytesReceived", + "roundTripTime", "timeToFirstByte" + ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { "type": "number" }, + "highEndOfLatencyBucket": { "type": "number" }, + "lowEndOfLatencyBucket": { "type": "number" } + }, + "required": [ "countsInTheBucket" ] + }, + "measurementGroup": { + "description": "measurement group", + "type": "object", + "properties": { + "name": { "type": "string" }, + "measurements": { + "description": "array of name value pair measurements", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "required": [ "name", "measurements" ] + }, + "measurementsForVfScalingFields": { + "description": "measurementsForVfScaling fields", + "type": "object", + "properties": { + "additionalMeasurements": { + "description": "additional measurement fields", + "type": "array", + "items": { + "$ref": "#/definitions/measurementGroup" + } + }, + "aggregateCpuUsage": { + "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", + "type": "number" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", + "type": "number" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "errors": { "$ref": "#/definitions/errors" }, + "featureUsageArray": { + "description": "array of features in use", + "type": "array", + "items": { + "$ref": "#/definitions/featuresInUse" + } + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the VNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementsForVfScalingVersion": { + "description": "version of the measurementsForVfScaling block", + "type": "number" + }, + "memoryConfigured": { + "description": "memory in MB configured in the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "memoryUsed": { + "description": "memory usage in MB of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "number" + }, + "requestRate": { + "description": "peak rate of service requests per second to the VNF over the measurementInterval", + "type": "number" + }, + "vnfcScalingMetric": { + "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", + "type": "number" + }, + "vNicUsageArray": { + "description": "usage of an array of virtual network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/vNicUsage" + } + } + }, + "required": [ "measurementInterval" ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional mobileFlow fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "number" + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "number" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", + "ipVersion", "otherEndpointIpAddress", "otherEndpointPort", + "reportingEndpointIpAddr", "reportingEndpointPort" ] + }, + "otherFields": { + "description": "additional fields not reported elsewhere", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": [ "messageId", "text" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": [ "newState", "oldState", "stateInterface" ] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ "nvPairFieldName", "suppressedNvPairNames" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "number" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "number" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string" + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp" + ] + }, + "vNicUsage": { + "description": "usage of identified virtual network interface card", + "type": "object", + "properties": { + "broadcastPacketsIn": { "type": "number" }, + "broadcastPacketsOut": { "type": "number" }, + "bytesIn": { "type": "number" }, + "bytesOut": { "type": "number" }, + "multicastPacketsIn": { "type": "number" }, + "multicastPacketsOut": { "type": "number" }, + "packetsIn": { "type": "number" }, + "packetsOut": { "type": "number" }, + "unicastPacketsIn": { "type": "number" }, + "unicastPacketsOut": { "type": "number" }, + "vNicIdentifier": { "type": "string" } + }, + "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": {"$ref": "#/definitions/event"} + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_v4.1WithoutCommonEventHeader.json b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_v4.1WithoutCommonEventHeader.json new file mode 100644 index 0000000..f165818 --- /dev/null +++ b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_v4.1WithoutCommonEventHeader.json @@ -0,0 +1,1087 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "definitions": { + "attCopyrightNotice": { + "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", + "type": "object", + "properties": { + "useAndRedistribution": { + "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", + "type": "string" + }, + "condition1": { + "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", + "type": "string" + }, + "condition2": { + "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", + "type": "string" + }, + "condition3": { + "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", + "type": "string" + }, + "condition4": { + "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", + "type": "string" + }, + "disclaimerLine1": { + "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", + "type": "string" + }, + "disclaimerLine2": { + "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", + "type": "string" + }, + "disclaimerLine3": { + "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", + "type": "string" + }, + "disclaimerLine4": { + "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + "type": "string" + } + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { "type": "string" }, + "numberInUse": { "type": "number" } + }, + "required": [ "codecIdentifier", "numberInUse" ] + }, + "command": { + "description": "command from an event collector toward an event source", + "type": "object", + "properties": { + "commandType": { + "type": "string", + "enum": [ + "heartbeatIntervalChange", + "measurementIntervalChange", + "provideThrottlingState", + "throttlingSpecification" + ] + }, + "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" }, + "measurementInterval": { "type": "number" } + }, + "required": [ "commandType" ] + }, + "commandList": { + "description": "array of commands from an event collector toward an event source", + "type": "array", + "items": { + "$ref": "#/definitions/commandListEntry" + }, + "minItems": 0 + }, + "commandListEntry": { + "description": "reference to a command object", + "type": "object", + "properties": { + "command": {"$ref": "#/definitions/command"} + }, + "required": [ "command" ] + }, + + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, + "name": { "type": "string" }, + "thresholdCrossed": { "type": "string" }, + "value": { "type": "string"} + }, + "required": [ "criticality", "name", "thresholdCrossed", "value" ] + }, + "cpuUsage": { + "description": "percent usage of an identified CPU", + "type": "object", + "properties": { + "cpuIdentifier": { "type": "string" }, + "percentUsage": { "type": "number" } + }, + "required": [ "cpuIdentifier", "percentUsage" ] + }, + "errors": { + "description": "receive and transmit errors for the measurements domain", + "type": "object", + "properties": { + "receiveDiscards": { "type": "number" }, + "receiveErrors": { "type": "number" }, + "transmitDiscards": { "type": "number" }, + "transmitErrors": { "type": "number" } + }, + "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "faultFields": { "$ref": "#/definitions/faultFields" }, + "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" }, + "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, + "otherFields": { "$ref": "#/definitions/otherFields" }, + "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, + "syslogFields": { "$ref": "#/definitions/syslogFields" }, + "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" } + } + + }, + "eventDomainThrottleSpecification": { + "description": "specification of what information to suppress within an event domain", + "type": "object", + "properties": { + "eventDomain": { + "description": "Event domain enum from the commonEventHeader domain field", + "type": "string" + }, + "suppressedFieldNames": { + "description": "List of optional field names in the event block that should not be sent to the Event Listener", + "type": "array", + "items": { + "type": "string" + } + }, + "suppressedNvPairsList": { + "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", + "type": "array", + "items": { + "$ref": "#/definitions/suppressedNvPairs" + } + } + }, + "required": [ "eventDomain" ] + }, + "eventDomainThrottleSpecificationList": { + "description": "array of eventDomainThrottleSpecifications", + "type": "array", + "items": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "minItems": 0 + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "eventThrottlingState": { + "description": "reports the throttling in force at the event source", + "type": "object", + "properties": { + "eventThrottlingMode": { + "description": "Mode the event manager is in", + "type": "string", + "enum": [ + "normal", + "throttled" + ] + }, + "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" } + }, + "required": [ "eventThrottlingMode" ] + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "description": "additional alarm information", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "number" + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "required": [ "alarmCondition", "eventSeverity", + "eventSourceType", "specificProblem", "vfStatus" ] + }, + "featuresInUse": { + "description": "number of times an identified feature was used over the measurementInterval", + "type": "object", + "properties": { + "featureIdentifier": { "type": "string" }, + "featureUtilization": { "type": "number" } + }, + "required": [ "featureIdentifier", "featureUtilization" ] + }, + "field": { + "description": "name value pair", + "type": "object", + "properties": { + "name": { "type": "string" }, + "value": { "type": "string" } + }, + "required": [ "name", "value" ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { "type": "number" }, + "blockIops": { "type": "number" }, + "blockUsed": { "type": "number" }, + "ephemeralConfigured": { "type": "number" }, + "ephemeralIops": { "type": "number" }, + "ephemeralUsed": { "type": "number" }, + "filesystemName": { "type": "string" } + }, + "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", + "ephemeralIops", "ephemeralUsed", "filesystemName" ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", + "avgReceiveThroughput", "avgTransmitThroughput", + "flowActivationEpoch", "flowActivationMicrosec", + "flowDeactivationEpoch", "flowDeactivationMicrosec", + "flowDeactivationTime", "flowStatus", + "maxPacketDelayVariation", "numActivationFailures", + "numBitErrors", "numBytesReceived", "numBytesTransmitted", + "numDroppedPackets", "numL7BytesReceived", + "numL7BytesTransmitted", "numLostPackets", + "numOutOfOrderPackets", "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", "numTimeouts", "numTunneledL7BytesReceived", + "roundTripTime", "timeToFirstByte" + ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { "type": "number" }, + "highEndOfLatencyBucket": { "type": "number" }, + "lowEndOfLatencyBucket": { "type": "number" } + }, + "required": [ "countsInTheBucket" ] + }, + "measurementGroup": { + "description": "measurement group", + "type": "object", + "properties": { + "name": { "type": "string" }, + "measurements": { + "description": "array of name value pair measurements", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "required": [ "name", "measurements" ] + }, + "measurementsForVfScalingFields": { + "description": "measurementsForVfScaling fields", + "type": "object", + "properties": { + "additionalMeasurements": { + "description": "additional measurement fields", + "type": "array", + "items": { + "$ref": "#/definitions/measurementGroup" + } + }, + "aggregateCpuUsage": { + "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", + "type": "number" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", + "type": "number" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "errors": { "$ref": "#/definitions/errors" }, + "featureUsageArray": { + "description": "array of features in use", + "type": "array", + "items": { + "$ref": "#/definitions/featuresInUse" + } + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the VNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementsForVfScalingVersion": { + "description": "version of the measurementsForVfScaling block", + "type": "number" + }, + "memoryConfigured": { + "description": "memory in MB configured in the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "memoryUsed": { + "description": "memory usage in MB of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "number" + }, + "requestRate": { + "description": "peak rate of service requests per second to the VNF over the measurementInterval", + "type": "number" + }, + "vnfcScalingMetric": { + "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", + "type": "number" + }, + "vNicUsageArray": { + "description": "usage of an array of virtual network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/vNicUsage" + } + } + }, + "required": [ "measurementInterval" ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional mobileFlow fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "number" + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "number" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", + "ipVersion", "otherEndpointIpAddress", "otherEndpointPort", + "reportingEndpointIpAddr", "reportingEndpointPort" ] + }, + "otherFields": { + "description": "additional fields not reported elsewhere", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": [ "messageId", "text" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": [ "newState", "oldState", "stateInterface" ] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ "nvPairFieldName", "suppressedNvPairNames" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "number" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "number" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string" + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp" + ] + }, + "vNicUsage": { + "description": "usage of identified virtual network interface card", + "type": "object", + "properties": { + "broadcastPacketsIn": { "type": "number" }, + "broadcastPacketsOut": { "type": "number" }, + "bytesIn": { "type": "number" }, + "bytesOut": { "type": "number" }, + "multicastPacketsIn": { "type": "number" }, + "multicastPacketsOut": { "type": "number" }, + "packetsIn": { "type": "number" }, + "packetsOut": { "type": "number" }, + "unicastPacketsIn": { "type": "number" }, + "unicastPacketsOut": { "type": "number" }, + "vNicIdentifier": { "type": "string" } + }, + "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": {"$ref": "#/definitions/event"} + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_v5.3.json b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_v5.3.json new file mode 100644 index 0000000..fca55d9 --- /dev/null +++ b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_v5.3.json @@ -0,0 +1,2004 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "attCopyrightNotice": { + "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", + "type": "object", + "properties": { + "useAndRedistribution": { + "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", + "type": "string" + }, + "condition1": { + "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", + "type": "string" + }, + "condition2": { + "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", + "type": "string" + }, + "condition3": { + "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", + "type": "string" + }, + "condition4": { + "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", + "type": "string" + }, + "disclaimerLine1": { + "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", + "type": "string" + }, + "disclaimerLine2": { + "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", + "type": "string" + }, + "disclaimerLine3": { + "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", + "type": "string" + }, + "disclaimerLine4": { + "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + "type": "string" + } + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { + "type": "string" + }, + "numberInUse": { + "type": "integer" + } + }, + "required": ["codecIdentifier", + "numberInUse"] + }, + "command": { + "description": "command from an event collector toward an event source", + "type": "object", + "properties": { + "commandType": { + "type": "string", + "enum": ["heartbeatIntervalChange", + "measurementIntervalChange", + "provideThrottlingState", + "throttlingSpecification"] + }, + "eventDomainThrottleSpecification": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "heartbeatInterval": { + "type": "integer" + }, + "measurementInterval": { + "type": "integer" + } + }, + "required": ["commandType"] + }, + "commandList": { + "description": "array of commands from an event collector toward an event source", + "type": "array", + "items": { + "$ref": "#/definitions/command" + }, + "minItems": 0 + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": ["fault", + "heartbeat", + "measurementsForVfScaling", + "mobileFlow", + "other", + "sipSignaling", + "stateChange", + "syslog", + "thresholdCrossingAlert", + "voiceQuality"] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventName": { + "description": "unique event name", + "type": "string" + }, + "eventType": { + "description": "for example - applicationVnf, guestOS, hostOS, platform", + "type": "string" + }, + "internalHeaderFields": { + "$ref": "#/definitions/internalHeaderFields" + }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "nfcNamingCode": { + "description": "3 character network function component type, aligned with vfc naming standards", + "type": "string" + }, + "nfNamingCode": { + "description": "4 character network function type, aligned with vnf naming standards", + "type": "string" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": ["High", + "Medium", + "Normal", + "Low"] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an EMS name; may be the same as sourceName", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "version": { + "description": "version of the event header", + "type": "number" + } + }, + "required": ["domain", + "eventId", + "eventName", + "lastEpochMicrosec", + "priority", + "reportingEntityName", + "sequence", + "sourceName", + "startEpochMicrosec", + "version"] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { + "type": "string", + "enum": ["CRIT", + "MAJ"] + }, + "name": { + "type": "string" + }, + "thresholdCrossed": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "required": ["criticality", + "name", + "thresholdCrossed", + "value"] + }, + "cpuUsage": { + "description": "usage of an identified CPU", + "type": "object", + "properties": { + "cpuIdentifier": { + "description": "cpu identifer", + "type": "string" + }, + "cpuIdle": { + "description": "percentage of CPU time spent in the idle task", + "type": "number" + }, + "cpuUsageInterrupt": { + "description": "percentage of time spent servicing interrupts", + "type": "number" + }, + "cpuUsageNice": { + "description": "percentage of time spent running user space processes that have been niced", + "type": "number" + }, + "cpuUsageSoftIrq": { + "description": "percentage of time spent handling soft irq interrupts", + "type": "number" + }, + "cpuUsageSteal": { + "description": "percentage of time spent in involuntary wait which is neither user, system or idle time and is effectively time that went missing", + "type": "number" + }, + "cpuUsageSystem": { + "description": "percentage of time spent on system tasks running the kernel", + "type": "number" + }, + "cpuUsageUser": { + "description": "percentage of time spent running un-niced user space processes", + "type": "number" + }, + "cpuWait": { + "description": "percentage of CPU time spent waiting for I/O operations to complete", + "type": "number" + }, + "percentUsage": { + "description": "aggregate cpu usage of the virtual machine on which the VNFC reporting the event is running", + "type": "number" + } + }, + "required": ["cpuIdentifier", + "percentUsage"] + }, + "diskUsage": { + "description": "usage of an identified disk", + "type": "object", + "properties": { + "diskIdentifier": { + "description": "disk identifier", + "type": "string" + }, + "diskIoTimeAvg": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the average over the measurement interval", + "type": "number" + }, + "diskIoTimeLast": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskIoTimeMax": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskIoTimeMin": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadAvg": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadLast": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadMax": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadMin": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteAvg": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteLast": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteMax": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteMin": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadAvg": { + "description": "number of octets per second read from a disk or partition; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadLast": { + "description": "number of octets per second read from a disk or partition; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadMax": { + "description": "number of octets per second read from a disk or partition; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadMin": { + "description": "number of octets per second read from a disk or partition; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteAvg": { + "description": "number of octets per second written to a disk or partition; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteLast": { + "description": "number of octets per second written to a disk or partition; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteMax": { + "description": "number of octets per second written to a disk or partition; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteMin": { + "description": "number of octets per second written to a disk or partition; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadAvg": { + "description": "number of read operations per second issued to the disk; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadLast": { + "description": "number of read operations per second issued to the disk; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadMax": { + "description": "number of read operations per second issued to the disk; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadMin": { + "description": "number of read operations per second issued to the disk; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteAvg": { + "description": "number of write operations per second issued to the disk; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteLast": { + "description": "number of write operations per second issued to the disk; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteMax": { + "description": "number of write operations per second issued to the disk; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteMin": { + "description": "number of write operations per second issued to the disk; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsAvg": { + "description": "queue size of pending I/O operations per second; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsLast": { + "description": "queue size of pending I/O operations per second; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsMax": { + "description": "queue size of pending I/O operations per second; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsMin": { + "description": "queue size of pending I/O operations per second; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadAvg": { + "description": "milliseconds a read operation took to complete; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadLast": { + "description": "milliseconds a read operation took to complete; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadMax": { + "description": "milliseconds a read operation took to complete; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadMin": { + "description": "milliseconds a read operation took to complete; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteAvg": { + "description": "milliseconds a write operation took to complete; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteLast": { + "description": "milliseconds a write operation took to complete; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteMax": { + "description": "milliseconds a write operation took to complete; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteMin": { + "description": "milliseconds a write operation took to complete; provide the minimum measurement within the measurement interval", + "type": "number" + } + }, + "required": ["diskIdentifier"] + }, + "endOfCallVqmSummaries": { + "description": "provides end of call voice quality metrics", + "type": "object", + "properties": { + "adjacencyName": { + "description": " adjacency name", + "type": "string" + }, + "endpointDescription": { + "description": "Either Caller or Callee", + "type": "string", + "enum": ["Caller", + "Callee"] + }, + "endpointJitter": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsDiscarded": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsReceived": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsSent": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsDiscarded": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsReceived": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsSent": { + "description": "", + "type": "number" + }, + "localJitter": { + "description": "", + "type": "number" + }, + "localRtpOctetsDiscarded": { + "description": "", + "type": "number" + }, + "localRtpOctetsReceived": { + "description": "", + "type": "number" + }, + "localRtpOctetsSent": { + "description": "", + "type": "number" + }, + "localRtpPacketsDiscarded": { + "description": "", + "type": "number" + }, + "localRtpPacketsReceived": { + "description": "", + "type": "number" + }, + "localRtpPacketsSent": { + "description": "", + "type": "number" + }, + "mosCqe": { + "description": "1-5 1dp", + "type": "number" + }, + "packetsLost": { + "description": "", + "type": "number" + }, + "packetLossPercent": { + "description": "Calculated percentage packet loss based on Endpoint RTP packets lost (as reported in RTCP) and Local RTP packets sent. Direction is based on Endpoint description (Caller, Callee). Decimal (2 dp)", + "type": "number" + }, + "rFactor": { + "description": "0-100", + "type": "number" + }, + "roundTripDelay": { + "description": "millisecs", + "type": "number" + } + }, + "required": ["adjacencyName", + "endpointDescription"] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { + "$ref": "#/definitions/commonEventHeader" + }, + "faultFields": { + "$ref": "#/definitions/faultFields" + }, + "heartbeatFields": { + "$ref": "#/definitions/heartbeatFields" + }, + "measurementsForVfScalingFields": { + "$ref": "#/definitions/measurementsForVfScalingFields" + }, + "mobileFlowFields": { + "$ref": "#/definitions/mobileFlowFields" + }, + "otherFields": { + "$ref": "#/definitions/otherFields" + }, + "sipSignalingFields": { + "$ref": "#/definitions/sipSignalingFields" + }, + "stateChangeFields": { + "$ref": "#/definitions/stateChangeFields" + }, + "syslogFields": { + "$ref": "#/definitions/syslogFields" + }, + "thresholdCrossingAlertFields": { + "$ref": "#/definitions/thresholdCrossingAlertFields" + }, + "voiceQualityFields": { + "$ref": "#/definitions/voiceQualityFields" + } + }, + "required": ["commonEventHeader"] + }, + "eventDomainThrottleSpecification": { + "description": "specification of what information to suppress within an event domain", + "type": "object", + "properties": { + "eventDomain": { + "description": "Event domain enum from the commonEventHeader domain field", + "type": "string" + }, + "suppressedFieldNames": { + "description": "List of optional field names in the event block that should not be sent to the Event Listener", + "type": "array", + "items": { + "type": "string" + } + }, + "suppressedNvPairsList": { + "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", + "type": "array", + "items": { + "$ref": "#/definitions/suppressedNvPairs" + } + } + }, + "required": ["eventDomain"] + }, + "eventDomainThrottleSpecificationList": { + "description": "array of eventDomainThrottleSpecifications", + "type": "array", + "items": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "minItems": 0 + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "eventThrottlingState": { + "description": "reports the throttling in force at the event source", + "type": "object", + "properties": { + "eventThrottlingMode": { + "description": "Mode the event manager is in", + "type": "string", + "enum": ["normal", + "throttled"] + }, + "eventDomainThrottleSpecificationList": { + "$ref": "#/definitions/eventDomainThrottleSpecificationList" + } + }, + "required": ["eventThrottlingMode"] + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "description": "additional alarm information", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventCategory": { + "description": "Event category, for example: license, link, routing, security, signaling", + "type": "string" + }, + "eventSeverity": { + "description": "event severity", + "type": "string", + "enum": ["CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL"] + }, + "eventSourceType": { + "description": "type of event source; examples: card, host, other, port, portThreshold, router, slotThreshold, switch, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "number" + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": ["Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination"] + } + }, + "required": ["alarmCondition", + "eventSeverity", + "eventSourceType", + "faultFieldsVersion", + "specificProblem", + "vfStatus"] + }, + "featuresInUse": { + "description": "number of times an identified feature was used over the measurementInterval", + "type": "object", + "properties": { + "featureIdentifier": { + "type": "string" + }, + "featureUtilization": { + "type": "integer" + } + }, + "required": ["featureIdentifier", + "featureUtilization"] + }, + "field": { + "description": "name value pair", + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "required": ["name", + "value"] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { + "type": "number" + }, + "blockIops": { + "type": "number" + }, + "blockUsed": { + "type": "number" + }, + "ephemeralConfigured": { + "type": "number" + }, + "ephemeralIops": { + "type": "number" + }, + "ephemeralUsed": { + "type": "number" + }, + "filesystemName": { + "type": "string" + } + }, + "required": ["blockConfigured", + "blockIops", + "blockUsed", + "ephemeralConfigured", + "ephemeralIops", + "ephemeralUsed", + "filesystemName"] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", + "type": "array", + "items": { + "type": "array", + "items": [{ + "type": "string" + }, + { + "type": "number" + }] + } + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", + "type": "array", + "items": { + "type": "array", + "items": [{ + "type": "string" + }, + { + "type": "number" + }] + } + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", + "type": "array", + "items": { + "type": "array", + "items": [{ + "type": "string" + }, + { + "type": "number" + }] + } + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "required": ["avgBitErrorRate", + "avgPacketDelayVariation", + "avgPacketLatency", + "avgReceiveThroughput", + "avgTransmitThroughput", + "flowActivationEpoch", + "flowActivationMicrosec", + "flowDeactivationEpoch", + "flowDeactivationMicrosec", + "flowDeactivationTime", + "flowStatus", + "maxPacketDelayVariation", + "numActivationFailures", + "numBitErrors", + "numBytesReceived", + "numBytesTransmitted", + "numDroppedPackets", + "numL7BytesReceived", + "numL7BytesTransmitted", + "numLostPackets", + "numOutOfOrderPackets", + "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", + "numTimeouts", + "numTunneledL7BytesReceived", + "roundTripTime", + "timeToFirstByte"] + }, + "heartbeatFields": { + "description": "optional field block for fields specific to heartbeat events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional heartbeat fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "heartbeatFieldsVersion": { + "description": "version of the heartbeatFields block", + "type": "number" + }, + "heartbeatInterval": { + "description": "current heartbeat interval in seconds", + "type": "integer" + } + }, + "required": ["heartbeatFieldsVersion", + "heartbeatInterval"] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "jsonObject": { + "description": "json object schema, name and other meta-information along with one or more object instances", + "type": "object", + "properties": { + "objectInstances": { + "description": "one or more instances of the jsonObject", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObjectInstance" + } + }, + "objectName": { + "description": "name of the JSON Object", + "type": "string" + }, + "objectSchema": { + "description": "json schema for the object", + "type": "string" + }, + "objectSchemaUrl": { + "description": "Url to the json schema for the object", + "type": "string" + }, + "nfSubscribedObjectName": { + "description": "name of the object associated with the nfSubscriptonId", + "type": "string" + }, + "nfSubscriptionId": { + "description": "identifies an openConfig telemetry subscription on a network function, which configures the network function to send complex object data associated with the jsonObject", + "type": "string" + } + }, + "required": ["objectInstances", + "objectName"] + }, + "jsonObjectInstance": { + "description": "meta-information about an instance of a jsonObject along with the actual object instance", + "type": "object", + "properties": { + "objectInstance": { + "description": "an instance conforming to the jsonObject schema", + "type": "object" + }, + "objectInstanceEpochMicrosec": { + "description": "the unix time aka epoch time associated with this objectInstance--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "objectKeys": { + "description": "an ordered set of keys that identifies this particular instance of jsonObject", + "type": "array", + "items": { + "$ref": "#/definitions/key" + } + } + }, + "required": ["objectInstance"] + }, + "key": { + "description": "tuple which provides the name of a key along with its value and relative order", + "type": "object", + "properties": { + "keyName": { + "description": "name of the key", + "type": "string" + }, + "keyOrder": { + "description": "relative sequence or order of the key with respect to other keys", + "type": "integer" + }, + "keyValue": { + "description": "value of the key", + "type": "string" + } + }, + "required": ["keyName"] + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { + "type": "number" + }, + "highEndOfLatencyBucket": { + "type": "number" + }, + "lowEndOfLatencyBucket": { + "type": "number" + } + }, + "required": ["countsInTheBucket"] + }, + "measurementsForVfScalingFields": { + "description": "measurementsForVfScaling fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional name-value-pair fields", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalMeasurements": { + "description": "array of named name-value-pair arrays", + "type": "array", + "items": { + "$ref": "#/definitions/namedArrayOfFields" + } + }, + "additionalObjects": { + "description": "array of JSON objects described by name, schema and other meta-information", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObject" + } + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", + "type": "integer" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", + "type": "integer" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "diskUsageArray": { + "description": "usage of an array of disks", + "type": "array", + "items": { + "$ref": "#/definitions/diskUsage" + } + }, + "featureUsageArray": { + "description": "array of features in use", + "type": "array", + "items": { + "$ref": "#/definitions/featuresInUse" + } + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the VNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementsForVfScalingVersion": { + "description": "version of the measurementsForVfScaling block", + "type": "number" + }, + "memoryUsageArray": { + "description": "memory usage of an array of VMs", + "type": "array", + "items": { + "$ref": "#/definitions/memoryUsage" + } + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "integer" + }, + "requestRate": { + "description": "peak rate of service requests per second to the VNF over the measurementInterval", + "type": "number" + }, + "vnfcScalingMetric": { + "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", + "type": "integer" + }, + "vNicPerformanceArray": { + "description": "usage of an array of virtual network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/vNicPerformance" + } + } + }, + "required": ["measurementInterval", + "measurementsForVfScalingVersion"] + }, + "memoryUsage": { + "description": "memory usage of an identified virtual machine", + "type": "object", + "properties": { + "memoryBuffered": { + "description": "kibibytes of temporary storage for raw disk blocks", + "type": "number" + }, + "memoryCached": { + "description": "kibibytes of memory used for cache", + "type": "number" + }, + "memoryConfigured": { + "description": "kibibytes of memory configured in the virtual machine on which the VNFC reporting the event is running", + "type": "number" + }, + "memoryFree": { + "description": "kibibytes of physical RAM left unused by the system", + "type": "number" + }, + "memorySlabRecl": { + "description": "the part of the slab that can be reclaimed such as caches measured in kibibytes", + "type": "number" + }, + "memorySlabUnrecl": { + "description": "the part of the slab that cannot be reclaimed even when lacking memory measured in kibibytes", + "type": "number" + }, + "memoryUsed": { + "description": "total memory minus the sum of free, buffered, cached and slab memory measured in kibibytes", + "type": "number" + }, + "vmIdentifier": { + "description": "virtual machine identifier associated with the memory metrics", + "type": "string" + } + }, + "required": ["memoryFree", + "memoryUsed", + "vmIdentifier"] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional mobileFlow fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { + "$ref": "#/definitions/gtpPerFlowMetrics" + }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "number" + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "integer" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "integer" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "integer" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "required": ["flowDirection", + "gtpPerFlowMetrics", + "ipProtocolType", + "ipVersion", + "mobileFlowFieldsVersion", + "otherEndpointIpAddress", + "otherEndpointPort", + "reportingEndpointIpAddr", + "reportingEndpointPort"] + }, + "namedArrayOfFields": { + "description": "an array of name value pairs along with a name for the array", + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "arrayOfFields": { + "description": "array of name value pairs", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "required": ["name", + "arrayOfFields"] + }, + "otherFields": { + "description": "fields for events belonging to the 'other' domain of the commonEventHeader domain enumeration", + "type": "object", + "properties": { + "hashOfNameValuePairArrays": { + "description": "array of named name-value-pair arrays", + "type": "array", + "items": { + "$ref": "#/definitions/namedArrayOfFields" + } + }, + "jsonObjects": { + "description": "array of JSON objects described by name, schema and other meta-information", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObject" + } + }, + "nameValuePairs": { + "description": "array of name-value pairs", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "otherFieldsVersion": { + "description": "version of the otherFields block", + "type": "number" + } + }, + "required": ["otherFieldsVersion"] + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": ["messageId", + "text"] + }, + "sipSignalingFields": { + "description": "sip signaling fields", + "type": "object", + "properties": { + "additionalInformation": { + "description": "additional sip signaling fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "compressedSip": { + "description": "the full SIP request/response including headers and bodies", + "type": "string" + }, + "correlator": { + "description": "this is the same for all events on this call", + "type": "string" + }, + "localIpAddress": { + "description": "IP address on VNF", + "type": "string" + }, + "localPort": { + "description": "port on VNF", + "type": "string" + }, + "remoteIpAddress": { + "description": "IP address of peer endpoint", + "type": "string" + }, + "remotePort": { + "description": "port of peer endpoint", + "type": "string" + }, + "sipSignalingFieldsVersion": { + "description": "version of the sipSignalingFields block", + "type": "number" + }, + "summarySip": { + "description": "the SIP Method or Response (‘INVITE’, ‘200 OK’, ‘BYE’, etc)", + "type": "string" + }, + "vendorVnfNameFields": { + "$ref": "#/definitions/vendorVnfNameFields" + } + }, + "required": ["correlator", + "localIpAddress", + "localPort", + "remoteIpAddress", + "remotePort", + "sipSignalingFieldsVersion", + "vendorVnfNameFields"] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": ["inService", + "maintenance", + "outOfService"] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": ["inService", + "maintenance", + "outOfService"] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": ["newState", + "oldState", + "stateChangeFieldsVersion", + "stateInterface"] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": ["nvPairFieldName", + "suppressedNvPairNames"] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed provided as name=value delimited by a pipe ‘|’ symbol, for example: 'name1=value1|name2=value2|…'", + "type": "string" + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "integer" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "integer" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string", + "enum": ["Alert", + "Critical", + "Debug", + "Emergency", + "Error", + "Info", + "Notice", + "Warning"] + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": ["eventSourceType", + "syslogFieldsVersion", + "syslogMsg", + "syslogTag"] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": ["CLEAR", + "CONT", + "SET"] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": ["CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY"] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { + "type": "string" + } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": ["CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL"] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": ["additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp", + "thresholdCrossingFieldsVersion"] + }, + "vendorVnfNameFields": { + "description": "provides vendor, vnf and vfModule identifying information", + "type": "object", + "properties": { + "vendorName": { + "description": "VNF vendor name", + "type": "string" + }, + "vfModuleName": { + "description": "ASDC vfModuleName for the vfModule generating the event", + "type": "string" + }, + "vnfName": { + "description": "ASDC modelName for the VNF generating the event", + "type": "string" + } + }, + "required": ["vendorName"] + }, + "vNicPerformance": { + "description": "describes the performance and errors of an identified virtual network interface card", + "type": "object", + "properties": { + "receivedBroadcastPacketsAccumulated": { + "description": "Cumulative count of broadcast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedBroadcastPacketsDelta": { + "description": "Count of broadcast packets received within the measurement interval", + "type": "number" + }, + "receivedDiscardedPacketsAccumulated": { + "description": "Cumulative count of discarded packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedDiscardedPacketsDelta": { + "description": "Count of discarded packets received within the measurement interval", + "type": "number" + }, + "receivedErrorPacketsAccumulated": { + "description": "Cumulative count of error packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedErrorPacketsDelta": { + "description": "Count of error packets received within the measurement interval", + "type": "number" + }, + "receivedMulticastPacketsAccumulated": { + "description": "Cumulative count of multicast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedMulticastPacketsDelta": { + "description": "Count of multicast packets received within the measurement interval", + "type": "number" + }, + "receivedOctetsAccumulated": { + "description": "Cumulative count of octets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedOctetsDelta": { + "description": "Count of octets received within the measurement interval", + "type": "number" + }, + "receivedTotalPacketsAccumulated": { + "description": "Cumulative count of all packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedTotalPacketsDelta": { + "description": "Count of all packets received within the measurement interval", + "type": "number" + }, + "receivedUnicastPacketsAccumulated": { + "description": "Cumulative count of unicast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedUnicastPacketsDelta": { + "description": "Count of unicast packets received within the measurement interval", + "type": "number" + }, + "transmittedBroadcastPacketsAccumulated": { + "description": "Cumulative count of broadcast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedBroadcastPacketsDelta": { + "description": "Count of broadcast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedDiscardedPacketsAccumulated": { + "description": "Cumulative count of discarded packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedDiscardedPacketsDelta": { + "description": "Count of discarded packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedErrorPacketsAccumulated": { + "description": "Cumulative count of error packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedErrorPacketsDelta": { + "description": "Count of error packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedMulticastPacketsAccumulated": { + "description": "Cumulative count of multicast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedMulticastPacketsDelta": { + "description": "Count of multicast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedOctetsAccumulated": { + "description": "Cumulative count of octets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedOctetsDelta": { + "description": "Count of octets transmitted within the measurement interval", + "type": "number" + }, + "transmittedTotalPacketsAccumulated": { + "description": "Cumulative count of all packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedTotalPacketsDelta": { + "description": "Count of all packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedUnicastPacketsAccumulated": { + "description": "Cumulative count of unicast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedUnicastPacketsDelta": { + "description": "Count of unicast packets transmitted within the measurement interval", + "type": "number" + }, + "valuesAreSuspect": { + "description": "Indicates whether vNicPerformance values are likely inaccurate due to counter overflow or other condtions", + "type": "string", + "enum": ["true", + "false"] + }, + "vNicIdentifier": { + "description": "vNic identification", + "type": "string" + } + }, + "required": ["valuesAreSuspect", + "vNicIdentifier"] + }, + "voiceQualityFields": { + "description": "provides statistics related to customer facing voice products", + "type": "object", + "properties": { + "additionalInformation": { + "description": "additional voice quality fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "calleeSideCodec": { + "description": "callee codec for the call", + "type": "string" + }, + "callerSideCodec": { + "description": "caller codec for the call", + "type": "string" + }, + "correlator": { + "description": "this is the same for all events on this call", + "type": "string" + }, + "endOfCallVqmSummaries": { + "$ref": "#/definitions/endOfCallVqmSummaries" + }, + "phoneNumber": { + "description": "phone number associated with the correlator", + "type": "string" + }, + "midCallRtcp": { + "description": "Base64 encoding of the binary RTCP data excluding Eth/IP/UDP headers", + "type": "string" + }, + "vendorVnfNameFields": { + "$ref": "#/definitions/vendorVnfNameFields" + }, + "voiceQualityFieldsVersion": { + "description": "version of the voiceQualityFields block", + "type": "number" + } + }, + "required": ["calleeSideCodec", + "callerSideCodec", + "correlator", + "midCallRtcp", + "vendorVnfNameFields", + "voiceQualityFieldsVersion"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": { + "$ref": "#/definitions/event" + } + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidJson.json b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidJson.json new file mode 100644 index 0000000..4b5524c --- /dev/null +++ b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidJson.json @@ -0,0 +1,288 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "definitions": { + + "otherFields": { + "description": "additional fields not reported elsewhere", + "type": "array", + "items": + "$ref": "#/definitions/field" + } + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": [ "messageId", "text" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": [ "newState", "oldState", "stateInterface" ] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ "nvPairFieldName", "suppressedNvPairNames" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "number" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "number" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string" + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp" + ] + }, + "vNicUsage": { + "description": "usage of identified virtual network interface card", + "type": "object", + "properties": { + "broadcastPacketsIn": { "type": "number" }, + "broadcastPacketsOut": { "type": "number" }, + "bytesIn": { "type": "number" }, + "bytesOut": { "type": "number" }, + "multicastPacketsIn": { "type": "number" }, + "multicastPacketsOut": { "type": "number" }, + "packetsIn": { "type": "number" }, + "packetsOut": { "type": "number" }, + "unicastPacketsIn": { "type": "number" }, + "unicastPacketsOut": { "type": "number" }, + "vNicIdentifier": { "type": "string" } + }, + "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": {"$ref": "#/definitions/event"} + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidRequiredEntry.json b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidRequiredEntry.json new file mode 100644 index 0000000..3e3d3a2 --- /dev/null +++ b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidRequiredEntry.json @@ -0,0 +1,1165 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "definitions": { + "attCopyrightNotice": { + "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", + "type": "object", + "properties": { + "useAndRedistribution": { + "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", + "type": "string" + }, + "condition1": { + "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", + "type": "string" + }, + "condition2": { + "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", + "type": "string" + }, + "condition3": { + "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", + "type": "string" + }, + "condition4": { + "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", + "type": "string" + }, + "disclaimerLine1": { + "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", + "type": "string" + }, + "disclaimerLine2": { + "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", + "type": "string" + }, + "disclaimerLine3": { + "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", + "type": "string" + }, + "disclaimerLine4": { + "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + "type": "string" + } + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { "type": "string" }, + "numberInUse": { "type": "number" } + }, + "required": [ "codecIdentifier(invalid)", "numberInUse" ] + }, + "command": { + "description": "command from an event collector toward an event source", + "type": "object", + "properties": { + "commandType": { + "type": "string", + "enum": [ + "heartbeatIntervalChange", + "measurementIntervalChange", + "provideThrottlingState", + "throttlingSpecification" + ] + }, + "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" }, + "measurementInterval": { "type": "number" } + }, + "required": [ "commandType" ] + }, + "commandList": { + "description": "array of commands from an event collector toward an event source", + "type": "array", + "items": { + "$ref": "#/definitions/commandListEntry" + }, + "minItems": 0 + }, + "commandListEntry": { + "description": "reference to a command object", + "type": "object", + "properties": { + "command": {"$ref": "#/definitions/command"} + }, + "required": [ "command" ] + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": [ + "fault", + "heartbeat", + "measurementsForVfScaling", + "mobileFlow", + "other", + "stateChange", + "syslog", + "thresholdCrossingAlert" + ] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventType": { + "description": "unique event topic name", + "type": "string" + }, + "functionalRole": { + "description": "function of the event source e.g., eNodeB, MME, PCRF", + "type": "string" + }, + "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": [ + "High", + "Medium", + "Normal", + "Low" + ] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an OAM VM", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "version": { + "description": "version of the event header", + "type": "number" + } + }, + "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec", + "priority", "reportingEntityName", "sequence", + "sourceName", "startEpochMicrosec" ] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, + "name": { "type": "string" }, + "thresholdCrossed": { "type": "string" }, + "value": { "type": "string"} + }, + "required": [ "criticality", "name", "thresholdCrossed", "value" ] + }, + "cpuUsage": { + "description": "percent usage of an identified CPU", + "type": "object", + "properties": { + "cpuIdentifier": { "type": "string" }, + "percentUsage": { "type": "number" } + }, + "required": [ "cpuIdentifier", "percentUsage" ] + }, + "errors": { + "description": "receive and transmit errors for the measurements domain", + "type": "object", + "properties": { + "receiveDiscards": { "type": "number" }, + "receiveErrors": { "type": "number" }, + "transmitDiscards": { "type": "number" }, + "transmitErrors": { "type": "number" } + }, + "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" }, + "faultFields": { "$ref": "#/definitions/faultFields" }, + "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" }, + "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, + "otherFields": { "$ref": "#/definitions/otherFields" }, + "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, + "syslogFields": { "$ref": "#/definitions/syslogFields" }, + "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" } + }, + "required": [ "commonEventHeader" ] + }, + "eventDomainThrottleSpecification": { + "description": "specification of what information to suppress within an event domain", + "type": "object", + "properties": { + "eventDomain": { + "description": "Event domain enum from the commonEventHeader domain field", + "type": "string" + }, + "suppressedFieldNames": { + "description": "List of optional field names in the event block that should not be sent to the Event Listener", + "type": "array", + "items": { + "type": "string" + } + }, + "suppressedNvPairsList": { + "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", + "type": "array", + "items": { + "$ref": "#/definitions/suppressedNvPairs" + } + } + }, + "required": [ "eventDomain" ] + }, + "eventDomainThrottleSpecificationList": { + "description": "array of eventDomainThrottleSpecifications", + "type": "array", + "items": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "minItems": 0 + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "eventThrottlingState": { + "description": "reports the throttling in force at the event source", + "type": "object", + "properties": { + "eventThrottlingMode": { + "description": "Mode the event manager is in", + "type": "string", + "enum": [ + "normal", + "throttled" + ] + }, + "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" } + }, + "required": [ "eventThrottlingMode" ] + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "description": "additional alarm information", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "number" + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "required": [ "alarmCondition", "eventSeverity", + "eventSourceType", "specificProblem", "vfStatus" ] + }, + "featuresInUse": { + "description": "number of times an identified feature was used over the measurementInterval", + "type": "object", + "properties": { + "featureIdentifier": { "type": "string" }, + "featureUtilization": { "type": "number" } + }, + "required": [ "featureIdentifier", "featureUtilization" ] + }, + "field": { + "description": "name value pair", + "type": "object", + "properties": { + "name": { "type": "string" }, + "value": { "type": "string" } + }, + "required": [ "name", "value" ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { "type": "number" }, + "blockIops": { "type": "number" }, + "blockUsed": { "type": "number" }, + "ephemeralConfigured": { "type": "number" }, + "ephemeralIops": { "type": "number" }, + "ephemeralUsed": { "type": "number" }, + "filesystemName": { "type": "string" } + }, + "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", + "ephemeralIops", "ephemeralUsed", "filesystemName" ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", + "avgReceiveThroughput", "avgTransmitThroughput", + "flowActivationEpoch", "flowActivationMicrosec", + "flowDeactivationEpoch", "flowDeactivationMicrosec", + "flowDeactivationTime", "flowStatus", + "maxPacketDelayVariation", "numActivationFailures", + "numBitErrors", "numBytesReceived", "numBytesTransmitted", + "numDroppedPackets", "numL7BytesReceived", + "numL7BytesTransmitted", "numLostPackets", + "numOutOfOrderPackets", "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", "numTimeouts", "numTunneledL7BytesReceived", + "roundTripTime", "timeToFirstByte" + ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { "type": "number" }, + "highEndOfLatencyBucket": { "type": "number" }, + "lowEndOfLatencyBucket": { "type": "number" } + }, + "required": [ "countsInTheBucket" ] + }, + "measurementGroup": { + "description": "measurement group", + "type": "object", + "properties": { + "name": { "type": "string" }, + "measurements": { + "description": "array of name value pair measurements", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "required": [ "name", "measurements" ] + }, + "measurementsForVfScalingFields": { + "description": "measurementsForVfScaling fields", + "type": "object", + "properties": { + "additionalMeasurements": { + "description": "additional measurement fields", + "type": "array", + "items": { + "$ref": "#/definitions/measurementGroup" + } + }, + "aggregateCpuUsage": { + "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", + "type": "number" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", + "type": "number" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "errors": { "$ref": "#/definitions/errors" }, + "featureUsageArray": { + "description": "array of features in use", + "type": "array", + "items": { + "$ref": "#/definitions/featuresInUse" + } + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the VNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementsForVfScalingVersion": { + "description": "version of the measurementsForVfScaling block", + "type": "number" + }, + "memoryConfigured": { + "description": "memory in MB configured in the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "memoryUsed": { + "description": "memory usage in MB of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "number" + }, + "requestRate": { + "description": "peak rate of service requests per second to the VNF over the measurementInterval", + "type": "number" + }, + "vnfcScalingMetric": { + "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", + "type": "number" + }, + "vNicUsageArray": { + "description": "usage of an array of virtual network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/vNicUsage" + } + } + }, + "required": [ "measurementInterval" ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional mobileFlow fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "number" + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "number" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", + "ipVersion", "otherEndpointIpAddress", "otherEndpointPort", + "reportingEndpointIpAddr", "reportingEndpointPort" ] + }, + "otherFields": { + "description": "additional fields not reported elsewhere", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": [ "messageId", "text" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": [ "newState", "oldState", "stateInterface" ] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ "nvPairFieldName", "suppressedNvPairNames" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "number" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "number" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string" + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp" + ] + }, + "vNicUsage": { + "description": "usage of identified virtual network interface card", + "type": "object", + "properties": { + "broadcastPacketsIn": { "type": "number" }, + "broadcastPacketsOut": { "type": "number" }, + "bytesIn": { "type": "number" }, + "bytesOut": { "type": "number" }, + "multicastPacketsIn": { "type": "number" }, + "multicastPacketsOut": { "type": "number" }, + "packetsIn": { "type": "number" }, + "packetsOut": { "type": "number" }, + "unicastPacketsIn": { "type": "number" }, + "unicastPacketsOut": { "type": "number" }, + "vNicIdentifier": { "type": "string" } + }, + "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": {"$ref": "#/definitions/event"} + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidSchemaStructure.json b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidSchemaStructure.json new file mode 100644 index 0000000..469bf1a --- /dev/null +++ b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidSchemaStructure.json @@ -0,0 +1,1165 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "definitions": { + "attCopyrightNotice": { + "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", + "type": "object", + "properties": [{ + "useAndRedistribution": { + "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", + "type": "string" + }, + "condition1": { + "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", + "type": "string" + }, + "condition2": { + "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", + "type": "string" + }, + "condition3": { + "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", + "type": "string" + }, + "condition4": { + "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", + "type": "string" + }, + "disclaimerLine1": { + "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", + "type": "string" + }, + "disclaimerLine2": { + "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", + "type": "string" + }, + "disclaimerLine3": { + "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", + "type": "string" + }, + "disclaimerLine4": { + "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + "type": "string" + } + }] + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { "type": "string" }, + "numberInUse": { "type": "number" } + }, + "required": [ "codecIdentifier", "numberInUse" ] + }, + "command": { + "description": "command from an event collector toward an event source", + "type": "object", + "properties": { + "commandType": { + "type": "string", + "enum": [ + "heartbeatIntervalChange", + "measurementIntervalChange", + "provideThrottlingState", + "throttlingSpecification" + ] + }, + "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" }, + "measurementInterval": { "type": "number" } + }, + "required": [ "commandType" ] + }, + "commandList": { + "description": "array of commands from an event collector toward an event source", + "type": "array", + "items": { + "$ref": "#/definitions/commandListEntry" + }, + "minItems": 0 + }, + "commandListEntry": { + "description": "reference to a command object", + "type": "object", + "properties": { + "command": {"$ref": "#/definitions/command"} + }, + "required": [ "command" ] + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": [ + "fault", + "heartbeat", + "measurementsForVfScaling", + "mobileFlow", + "other", + "stateChange", + "syslog", + "thresholdCrossingAlert" + ] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventType": { + "description": "unique event topic name", + "type": "string" + }, + "functionalRole": { + "description": "function of the event source e.g., eNodeB, MME, PCRF", + "type": "string" + }, + "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": [ + "High", + "Medium", + "Normal", + "Low" + ] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an OAM VM", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "version": { + "description": "version of the event header", + "type": "number" + } + }, + "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec", + "priority", "reportingEntityName", "sequence", + "sourceName", "startEpochMicrosec" ] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, + "name": { "type": "string" }, + "thresholdCrossed": { "type": "string" }, + "value": { "type": "string"} + }, + "required": [ "criticality", "name", "thresholdCrossed", "value" ] + }, + "cpuUsage": { + "description": "percent usage of an identified CPU", + "type": "object", + "properties": { + "cpuIdentifier": { "type": "string" }, + "percentUsage": { "type": "number" } + }, + "required": [ "cpuIdentifier", "percentUsage" ] + }, + "errors": { + "description": "receive and transmit errors for the measurements domain", + "type": "object", + "properties": { + "receiveDiscards": { "type": "number" }, + "receiveErrors": { "type": "number" }, + "transmitDiscards": { "type": "number" }, + "transmitErrors": { "type": "number" } + }, + "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" }, + "faultFields": { "$ref": "#/definitions/faultFields" }, + "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" }, + "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, + "otherFields": { "$ref": "#/definitions/otherFields" }, + "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, + "syslogFields": { "$ref": "#/definitions/syslogFields" }, + "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" } + }, + "required": [ "commonEventHeader" ] + }, + "eventDomainThrottleSpecification": { + "description": "specification of what information to suppress within an event domain", + "type": "object", + "properties": { + "eventDomain": { + "description": "Event domain enum from the commonEventHeader domain field", + "type": "string" + }, + "suppressedFieldNames": { + "description": "List of optional field names in the event block that should not be sent to the Event Listener", + "type": "array", + "items": { + "type": "string" + } + }, + "suppressedNvPairsList": { + "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", + "type": "array", + "items": { + "$ref": "#/definitions/suppressedNvPairs" + } + } + }, + "required": [ "eventDomain" ] + }, + "eventDomainThrottleSpecificationList": { + "description": "array of eventDomainThrottleSpecifications", + "type": "array", + "items": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "minItems": 0 + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "eventThrottlingState": { + "description": "reports the throttling in force at the event source", + "type": "object", + "properties": { + "eventThrottlingMode": { + "description": "Mode the event manager is in", + "type": "string", + "enum": [ + "normal", + "throttled" + ] + }, + "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" } + }, + "required": [ "eventThrottlingMode" ] + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "description": "additional alarm information", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "number" + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "required": [ "alarmCondition", "eventSeverity", + "eventSourceType", "specificProblem", "vfStatus" ] + }, + "featuresInUse": { + "description": "number of times an identified feature was used over the measurementInterval", + "type": "object", + "properties": { + "featureIdentifier": { "type": "string" }, + "featureUtilization": { "type": "number" } + }, + "required": [ "featureIdentifier", "featureUtilization" ] + }, + "field": { + "description": "name value pair", + "type": "object", + "properties": { + "name": { "type": "string" }, + "value": { "type": "string" } + }, + "required": [ "name", "value" ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { "type": "number" }, + "blockIops": { "type": "number" }, + "blockUsed": { "type": "number" }, + "ephemeralConfigured": { "type": "number" }, + "ephemeralIops": { "type": "number" }, + "ephemeralUsed": { "type": "number" }, + "filesystemName": { "type": "string" } + }, + "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", + "ephemeralIops", "ephemeralUsed", "filesystemName" ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", + "avgReceiveThroughput", "avgTransmitThroughput", + "flowActivationEpoch", "flowActivationMicrosec", + "flowDeactivationEpoch", "flowDeactivationMicrosec", + "flowDeactivationTime", "flowStatus", + "maxPacketDelayVariation", "numActivationFailures", + "numBitErrors", "numBytesReceived", "numBytesTransmitted", + "numDroppedPackets", "numL7BytesReceived", + "numL7BytesTransmitted", "numLostPackets", + "numOutOfOrderPackets", "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", "numTimeouts", "numTunneledL7BytesReceived", + "roundTripTime", "timeToFirstByte" + ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { "type": "number" }, + "highEndOfLatencyBucket": { "type": "number" }, + "lowEndOfLatencyBucket": { "type": "number" } + }, + "required": [ "countsInTheBucket" ] + }, + "measurementGroup": { + "description": "measurement group", + "type": "object", + "properties": { + "name": { "type": "string" }, + "measurements": { + "description": "array of name value pair measurements", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "required": [ "name", "measurements" ] + }, + "measurementsForVfScalingFields": { + "description": "measurementsForVfScaling fields", + "type": "object", + "properties": { + "additionalMeasurements": { + "description": "additional measurement fields", + "type": "array", + "items": { + "$ref": "#/definitions/measurementGroup" + } + }, + "aggregateCpuUsage": { + "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", + "type": "number" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", + "type": "number" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "errors": { "$ref": "#/definitions/errors" }, + "featureUsageArray": { + "description": "array of features in use", + "type": "array", + "items": { + "$ref": "#/definitions/featuresInUse" + } + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the VNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementsForVfScalingVersion": { + "description": "version of the measurementsForVfScaling block", + "type": "number" + }, + "memoryConfigured": { + "description": "memory in MB configured in the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "memoryUsed": { + "description": "memory usage in MB of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "number" + }, + "requestRate": { + "description": "peak rate of service requests per second to the VNF over the measurementInterval", + "type": "number" + }, + "vnfcScalingMetric": { + "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", + "type": "number" + }, + "vNicUsageArray": { + "description": "usage of an array of virtual network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/vNicUsage" + } + } + }, + "required": [ "measurementInterval" ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional mobileFlow fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "number" + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "number" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", + "ipVersion", "otherEndpointIpAddress", "otherEndpointPort", + "reportingEndpointIpAddr", "reportingEndpointPort" ] + }, + "otherFields": { + "description": "additional fields not reported elsewhere", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": [ "messageId", "text" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": [ "newState", "oldState", "stateInterface" ] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ "nvPairFieldName", "suppressedNvPairNames" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "number" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "number" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string" + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp" + ] + }, + "vNicUsage": { + "description": "usage of identified virtual network interface card", + "type": "object", + "properties": { + "broadcastPacketsIn": { "type": "number" }, + "broadcastPacketsOut": { "type": "number" }, + "bytesIn": { "type": "number" }, + "bytesOut": { "type": "number" }, + "multicastPacketsIn": { "type": "number" }, + "multicastPacketsOut": { "type": "number" }, + "packetsIn": { "type": "number" }, + "packetsOut": { "type": "number" }, + "unicastPacketsIn": { "type": "number" }, + "unicastPacketsOut": { "type": "number" }, + "vNicIdentifier": { "type": "string" } + }, + "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": {"$ref": "#/definitions/event"} + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidType.json b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidType.json new file mode 100644 index 0000000..c925f63 --- /dev/null +++ b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vInvalidType.json @@ -0,0 +1,1165 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "definitions": { + "attCopyrightNotice": { + "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", + "type": "invalid", + "properties": { + "useAndRedistribution": { + "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", + "type": "string" + }, + "condition1": { + "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", + "type": "string" + }, + "condition2": { + "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", + "type": "string" + }, + "condition3": { + "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", + "type": "string" + }, + "condition4": { + "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", + "type": "string" + }, + "disclaimerLine1": { + "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", + "type": "string" + }, + "disclaimerLine2": { + "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", + "type": "string" + }, + "disclaimerLine3": { + "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", + "type": "string" + }, + "disclaimerLine4": { + "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + "type": "string" + } + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { "type": "string" }, + "numberInUse": { "type": "number" } + }, + "required": [ "codecIdentifier", "numberInUse" ] + }, + "command": { + "description": "command from an event collector toward an event source", + "type": "object", + "properties": { + "commandType": { + "type": "string", + "enum": [ + "heartbeatIntervalChange", + "measurementIntervalChange", + "provideThrottlingState", + "throttlingSpecification" + ] + }, + "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" }, + "measurementInterval": { "type": "number" } + }, + "required": [ "commandType" ] + }, + "commandList": { + "description": "array of commands from an event collector toward an event source", + "type": "array", + "items": { + "$ref": "#/definitions/commandListEntry" + }, + "minItems": 0 + }, + "commandListEntry": { + "description": "reference to a command object", + "type": "object", + "properties": { + "command": {"$ref": "#/definitions/command"} + }, + "required": [ "command" ] + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": [ + "fault", + "heartbeat", + "measurementsForVfScaling", + "mobileFlow", + "other", + "stateChange", + "syslog", + "thresholdCrossingAlert" + ] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventType": { + "description": "unique event topic name", + "type": "string" + }, + "functionalRole": { + "description": "function of the event source e.g., eNodeB, MME, PCRF", + "type": "string" + }, + "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": [ + "High", + "Medium", + "Normal", + "Low" + ] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an OAM VM", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "version": { + "description": "version of the event header", + "type": "number" + } + }, + "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec", + "priority", "reportingEntityName", "sequence", + "sourceName", "startEpochMicrosec" ] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, + "name": { "type": "string" }, + "thresholdCrossed": { "type": "string" }, + "value": { "type": "string"} + }, + "required": [ "criticality", "name", "thresholdCrossed", "value" ] + }, + "cpuUsage": { + "description": "percent usage of an identified CPU", + "type": "object", + "properties": { + "cpuIdentifier": { "type": "string" }, + "percentUsage": { "type": "number" } + }, + "required": [ "cpuIdentifier", "percentUsage" ] + }, + "errors": { + "description": "receive and transmit errors for the measurements domain", + "type": "object", + "properties": { + "receiveDiscards": { "type": "number" }, + "receiveErrors": { "type": "number" }, + "transmitDiscards": { "type": "number" }, + "transmitErrors": { "type": "number" } + }, + "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" }, + "faultFields": { "$ref": "#/definitions/faultFields" }, + "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" }, + "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, + "otherFields": { "$ref": "#/definitions/otherFields" }, + "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, + "syslogFields": { "$ref": "#/definitions/syslogFields" }, + "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" } + }, + "required": [ "commonEventHeader" ] + }, + "eventDomainThrottleSpecification": { + "description": "specification of what information to suppress within an event domain", + "type": "object", + "properties": { + "eventDomain": { + "description": "Event domain enum from the commonEventHeader domain field", + "type": "string" + }, + "suppressedFieldNames": { + "description": "List of optional field names in the event block that should not be sent to the Event Listener", + "type": "array", + "items": { + "type": "string" + } + }, + "suppressedNvPairsList": { + "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", + "type": "array", + "items": { + "$ref": "#/definitions/suppressedNvPairs" + } + } + }, + "required": [ "eventDomain" ] + }, + "eventDomainThrottleSpecificationList": { + "description": "array of eventDomainThrottleSpecifications", + "type": "array", + "items": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "minItems": 0 + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "eventThrottlingState": { + "description": "reports the throttling in force at the event source", + "type": "object", + "properties": { + "eventThrottlingMode": { + "description": "Mode the event manager is in", + "type": "string", + "enum": [ + "normal", + "throttled" + ] + }, + "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" } + }, + "required": [ "eventThrottlingMode" ] + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "description": "additional alarm information", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "number" + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "required": [ "alarmCondition", "eventSeverity", + "eventSourceType", "specificProblem", "vfStatus" ] + }, + "featuresInUse": { + "description": "number of times an identified feature was used over the measurementInterval", + "type": "object", + "properties": { + "featureIdentifier": { "type": "string" }, + "featureUtilization": { "type": "number" } + }, + "required": [ "featureIdentifier", "featureUtilization" ] + }, + "field": { + "description": "name value pair", + "type": "object", + "properties": { + "name": { "type": "string" }, + "value": { "type": "string" } + }, + "required": [ "name", "value" ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { "type": "number" }, + "blockIops": { "type": "number" }, + "blockUsed": { "type": "number" }, + "ephemeralConfigured": { "type": "number" }, + "ephemeralIops": { "type": "number" }, + "ephemeralUsed": { "type": "number" }, + "filesystemName": { "type": "string" } + }, + "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", + "ephemeralIops", "ephemeralUsed", "filesystemName" ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", + "avgReceiveThroughput", "avgTransmitThroughput", + "flowActivationEpoch", "flowActivationMicrosec", + "flowDeactivationEpoch", "flowDeactivationMicrosec", + "flowDeactivationTime", "flowStatus", + "maxPacketDelayVariation", "numActivationFailures", + "numBitErrors", "numBytesReceived", "numBytesTransmitted", + "numDroppedPackets", "numL7BytesReceived", + "numL7BytesTransmitted", "numLostPackets", + "numOutOfOrderPackets", "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", "numTimeouts", "numTunneledL7BytesReceived", + "roundTripTime", "timeToFirstByte" + ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { "type": "number" }, + "highEndOfLatencyBucket": { "type": "number" }, + "lowEndOfLatencyBucket": { "type": "number" } + }, + "required": [ "countsInTheBucket" ] + }, + "measurementGroup": { + "description": "measurement group", + "type": "object", + "properties": { + "name": { "type": "string" }, + "measurements": { + "description": "array of name value pair measurements", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "required": [ "name", "measurements" ] + }, + "measurementsForVfScalingFields": { + "description": "measurementsForVfScaling fields", + "type": "object", + "properties": { + "additionalMeasurements": { + "description": "additional measurement fields", + "type": "array", + "items": { + "$ref": "#/definitions/measurementGroup" + } + }, + "aggregateCpuUsage": { + "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", + "type": "number" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", + "type": "number" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "errors": { "$ref": "#/definitions/errors" }, + "featureUsageArray": { + "description": "array of features in use", + "type": "array", + "items": { + "$ref": "#/definitions/featuresInUse" + } + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the VNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementsForVfScalingVersion": { + "description": "version of the measurementsForVfScaling block", + "type": "number" + }, + "memoryConfigured": { + "description": "memory in MB configured in the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "memoryUsed": { + "description": "memory usage in MB of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "number" + }, + "requestRate": { + "description": "peak rate of service requests per second to the VNF over the measurementInterval", + "type": "number" + }, + "vnfcScalingMetric": { + "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", + "type": "number" + }, + "vNicUsageArray": { + "description": "usage of an array of virtual network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/vNicUsage" + } + } + }, + "required": [ "measurementInterval" ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional mobileFlow fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "number" + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "number" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", + "ipVersion", "otherEndpointIpAddress", "otherEndpointPort", + "reportingEndpointIpAddr", "reportingEndpointPort" ] + }, + "otherFields": { + "description": "additional fields not reported elsewhere", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": [ "messageId", "text" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": [ "newState", "oldState", "stateInterface" ] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ "nvPairFieldName", "suppressedNvPairNames" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "number" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "number" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string" + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp" + ] + }, + "vNicUsage": { + "description": "usage of identified virtual network interface card", + "type": "object", + "properties": { + "broadcastPacketsIn": { "type": "number" }, + "broadcastPacketsOut": { "type": "number" }, + "bytesIn": { "type": "number" }, + "bytesOut": { "type": "number" }, + "multicastPacketsIn": { "type": "number" }, + "multicastPacketsOut": { "type": "number" }, + "packetsIn": { "type": "number" }, + "packetsOut": { "type": "number" }, + "unicastPacketsIn": { "type": "number" }, + "unicastPacketsOut": { "type": "number" }, + "vNicIdentifier": { "type": "string" } + }, + "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": {"$ref": "#/definitions/event"} + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vNoEventProperty.json b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vNoEventProperty.json new file mode 100644 index 0000000..bd4ba59 --- /dev/null +++ b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vNoEventProperty.json @@ -0,0 +1,1165 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "definitions": { + "attCopyrightNotice": { + "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", + "type": "object", + "properties": { + "useAndRedistribution": { + "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", + "type": "string" + }, + "condition1": { + "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", + "type": "string" + }, + "condition2": { + "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", + "type": "string" + }, + "condition3": { + "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", + "type": "string" + }, + "condition4": { + "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", + "type": "string" + }, + "disclaimerLine1": { + "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", + "type": "string" + }, + "disclaimerLine2": { + "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", + "type": "string" + }, + "disclaimerLine3": { + "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", + "type": "string" + }, + "disclaimerLine4": { + "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + "type": "string" + } + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { "type": "string" }, + "numberInUse": { "type": "number" } + }, + "required": [ "codecIdentifier", "numberInUse" ] + }, + "command": { + "description": "command from an event collector toward an event source", + "type": "object", + "properties": { + "commandType": { + "type": "string", + "enum": [ + "heartbeatIntervalChange", + "measurementIntervalChange", + "provideThrottlingState", + "throttlingSpecification" + ] + }, + "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" }, + "measurementInterval": { "type": "number" } + }, + "required": [ "commandType" ] + }, + "commandList": { + "description": "array of commands from an event collector toward an event source", + "type": "array", + "items": { + "$ref": "#/definitions/commandListEntry" + }, + "minItems": 0 + }, + "commandListEntry": { + "description": "reference to a command object", + "type": "object", + "properties": { + "command": {"$ref": "#/definitions/command"} + }, + "required": [ "command" ] + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": [ + "fault", + "heartbeat", + "measurementsForVfScaling", + "mobileFlow", + "other", + "stateChange", + "syslog", + "thresholdCrossingAlert" + ] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventType": { + "description": "unique event topic name", + "type": "string" + }, + "functionalRole": { + "description": "function of the event source e.g., eNodeB, MME, PCRF", + "type": "string" + }, + "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": [ + "High", + "Medium", + "Normal", + "Low" + ] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an OAM VM", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "version": { + "description": "version of the event header", + "type": "number" + } + }, + "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec", + "priority", "reportingEntityName", "sequence", + "sourceName", "startEpochMicrosec" ] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, + "name": { "type": "string" }, + "thresholdCrossed": { "type": "string" }, + "value": { "type": "string"} + }, + "required": [ "criticality", "name", "thresholdCrossed", "value" ] + }, + "cpuUsage": { + "description": "percent usage of an identified CPU", + "type": "object", + "properties": { + "cpuIdentifier": { "type": "string" }, + "percentUsage": { "type": "number" } + }, + "required": [ "cpuIdentifier", "percentUsage" ] + }, + "errors": { + "description": "receive and transmit errors for the measurements domain", + "type": "object", + "properties": { + "receiveDiscards": { "type": "number" }, + "receiveErrors": { "type": "number" }, + "transmitDiscards": { "type": "number" }, + "transmitErrors": { "type": "number" } + }, + "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" }, + "faultFields": { "$ref": "#/definitions/faultFields" }, + "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" }, + "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, + "otherFields": { "$ref": "#/definitions/otherFields" }, + "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, + "syslogFields": { "$ref": "#/definitions/syslogFields" }, + "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" } + }, + "required": [ "commonEventHeader" ] + }, + "eventDomainThrottleSpecification": { + "description": "specification of what information to suppress within an event domain", + "type": "object", + "properties": { + "eventDomain": { + "description": "Event domain enum from the commonEventHeader domain field", + "type": "string" + }, + "suppressedFieldNames": { + "description": "List of optional field names in the event block that should not be sent to the Event Listener", + "type": "array", + "items": { + "type": "string" + } + }, + "suppressedNvPairsList": { + "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", + "type": "array", + "items": { + "$ref": "#/definitions/suppressedNvPairs" + } + } + }, + "required": [ "eventDomain" ] + }, + "eventDomainThrottleSpecificationList": { + "description": "array of eventDomainThrottleSpecifications", + "type": "array", + "items": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "minItems": 0 + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "eventThrottlingState": { + "description": "reports the throttling in force at the event source", + "type": "object", + "properties": { + "eventThrottlingMode": { + "description": "Mode the event manager is in", + "type": "string", + "enum": [ + "normal", + "throttled" + ] + }, + "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" } + }, + "required": [ "eventThrottlingMode" ] + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "description": "additional alarm information", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "number" + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "required": [ "alarmCondition", "eventSeverity", + "eventSourceType", "specificProblem", "vfStatus" ] + }, + "featuresInUse": { + "description": "number of times an identified feature was used over the measurementInterval", + "type": "object", + "properties": { + "featureIdentifier": { "type": "string" }, + "featureUtilization": { "type": "number" } + }, + "required": [ "featureIdentifier", "featureUtilization" ] + }, + "field": { + "description": "name value pair", + "type": "object", + "properties": { + "name": { "type": "string" }, + "value": { "type": "string" } + }, + "required": [ "name", "value" ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { "type": "number" }, + "blockIops": { "type": "number" }, + "blockUsed": { "type": "number" }, + "ephemeralConfigured": { "type": "number" }, + "ephemeralIops": { "type": "number" }, + "ephemeralUsed": { "type": "number" }, + "filesystemName": { "type": "string" } + }, + "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", + "ephemeralIops", "ephemeralUsed", "filesystemName" ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", + "avgReceiveThroughput", "avgTransmitThroughput", + "flowActivationEpoch", "flowActivationMicrosec", + "flowDeactivationEpoch", "flowDeactivationMicrosec", + "flowDeactivationTime", "flowStatus", + "maxPacketDelayVariation", "numActivationFailures", + "numBitErrors", "numBytesReceived", "numBytesTransmitted", + "numDroppedPackets", "numL7BytesReceived", + "numL7BytesTransmitted", "numLostPackets", + "numOutOfOrderPackets", "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", "numTimeouts", "numTunneledL7BytesReceived", + "roundTripTime", "timeToFirstByte" + ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { "type": "number" }, + "highEndOfLatencyBucket": { "type": "number" }, + "lowEndOfLatencyBucket": { "type": "number" } + }, + "required": [ "countsInTheBucket" ] + }, + "measurementGroup": { + "description": "measurement group", + "type": "object", + "properties": { + "name": { "type": "string" }, + "measurements": { + "description": "array of name value pair measurements", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "required": [ "name", "measurements" ] + }, + "measurementsForVfScalingFields": { + "description": "measurementsForVfScaling fields", + "type": "object", + "properties": { + "additionalMeasurements": { + "description": "additional measurement fields", + "type": "array", + "items": { + "$ref": "#/definitions/measurementGroup" + } + }, + "aggregateCpuUsage": { + "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", + "type": "number" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", + "type": "number" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "errors": { "$ref": "#/definitions/errors" }, + "featureUsageArray": { + "description": "array of features in use", + "type": "array", + "items": { + "$ref": "#/definitions/featuresInUse" + } + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the VNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementsForVfScalingVersion": { + "description": "version of the measurementsForVfScaling block", + "type": "number" + }, + "memoryConfigured": { + "description": "memory in MB configured in the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "memoryUsed": { + "description": "memory usage in MB of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "number" + }, + "requestRate": { + "description": "peak rate of service requests per second to the VNF over the measurementInterval", + "type": "number" + }, + "vnfcScalingMetric": { + "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", + "type": "number" + }, + "vNicUsageArray": { + "description": "usage of an array of virtual network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/vNicUsage" + } + } + }, + "required": [ "measurementInterval" ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional mobileFlow fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "number" + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "number" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", + "ipVersion", "otherEndpointIpAddress", "otherEndpointPort", + "reportingEndpointIpAddr", "reportingEndpointPort" ] + }, + "otherFields": { + "description": "additional fields not reported elsewhere", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": [ "messageId", "text" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": [ "newState", "oldState", "stateInterface" ] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ "nvPairFieldName", "suppressedNvPairNames" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "number" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "number" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string" + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp" + ] + }, + "vNicUsage": { + "description": "usage of identified virtual network interface card", + "type": "object", + "properties": { + "broadcastPacketsIn": { "type": "number" }, + "broadcastPacketsOut": { "type": "number" }, + "bytesIn": { "type": "number" }, + "bytesOut": { "type": "number" }, + "multicastPacketsIn": { "type": "number" }, + "multicastPacketsOut": { "type": "number" }, + "packetsIn": { "type": "number" }, + "packetsOut": { "type": "number" }, + "unicastPacketsIn": { "type": "number" }, + "unicastPacketsOut": { "type": "number" }, + "vNicIdentifier": { "type": "string" } + }, + "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "events": {"$ref": "#/definitions/event"} + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vUnresolvable.json b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vUnresolvable.json new file mode 100644 index 0000000..5439966 --- /dev/null +++ b/dcaedt_be/src/test/resources/ves-schema/CommonEventFormat_vUnresolvable.json @@ -0,0 +1,288 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "definitions": { + + "otherFields": { + "description": "additional fields not reported elsewhere", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": [ "messageId", "text" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": [ "newState", "oldState", "stateInterface" ] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ "nvPairFieldName", "suppressedNvPairNames" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "number" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "number" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string" + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp" + ] + }, + "vNicUsage": { + "description": "usage of identified virtual network interface card", + "type": "object", + "properties": { + "broadcastPacketsIn": { "type": "number" }, + "broadcastPacketsOut": { "type": "number" }, + "bytesIn": { "type": "number" }, + "bytesOut": { "type": "number" }, + "multicastPacketsIn": { "type": "number" }, + "multicastPacketsOut": { "type": "number" }, + "packetsIn": { "type": "number" }, + "packetsOut": { "type": "number" }, + "unicastPacketsIn": { "type": "number" }, + "unicastPacketsOut": { "type": "number" }, + "vNicIdentifier": { "type": "string" } + }, + "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": {"$ref": "#/definitions/event"} + } +}
\ No newline at end of file diff --git a/dcaedt_be/src/test/resources/ves-schema/unsupportedFilename.json b/dcaedt_be/src/test/resources/ves-schema/unsupportedFilename.json new file mode 100644 index 0000000..2f86c38 --- /dev/null +++ b/dcaedt_be/src/test/resources/ves-schema/unsupportedFilename.json @@ -0,0 +1,1165 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "definitions": { + "attCopyrightNotice": { + "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", + "type": "object", + "properties": { + "useAndRedistribution": { + "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", + "type": "string" + }, + "condition1": { + "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", + "type": "string" + }, + "condition2": { + "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", + "type": "string" + }, + "condition3": { + "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", + "type": "string" + }, + "condition4": { + "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", + "type": "string" + }, + "disclaimerLine1": { + "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", + "type": "string" + }, + "disclaimerLine2": { + "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", + "type": "string" + }, + "disclaimerLine3": { + "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", + "type": "string" + }, + "disclaimerLine4": { + "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", + "type": "string" + } + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { "type": "string" }, + "numberInUse": { "type": "number" } + }, + "required": [ "codecIdentifier", "numberInUse" ] + }, + "command": { + "description": "command from an event collector toward an event source", + "type": "object", + "properties": { + "commandType": { + "type": "string", + "enum": [ + "heartbeatIntervalChange", + "measurementIntervalChange", + "provideThrottlingState", + "throttlingSpecification" + ] + }, + "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" }, + "measurementInterval": { "type": "number" } + }, + "required": [ "commandType" ] + }, + "commandList": { + "description": "array of commands from an event collector toward an event source", + "type": "array", + "items": { + "$ref": "#/definitions/commandListEntry" + }, + "minItems": 0 + }, + "commandListEntry": { + "description": "reference to a command object", + "type": "object", + "properties": { + "command": {"$ref": "#/definitions/command"} + }, + "required": [ "command" ] + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": [ + "fault", + "heartbeat", + "measurementsForVfScaling", + "mobileFlow", + "other", + "stateChange", + "syslog", + "thresholdCrossingAlert" + ] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventType": { + "description": "unique event topic name", + "type": "string" + }, + "functionalRole": { + "description": "function of the event source e.g., eNodeB, MME, PCRF", + "type": "string" + }, + "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": [ + "High", + "Medium", + "Normal", + "Low" + ] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an OAM VM", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "version": { + "description": "version of the event header", + "type": "number" + } + }, + "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec", + "priority", "reportingEntityName", "sequence", + "sourceName", "startEpochMicrosec" ] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, + "name": { "type": "string" }, + "thresholdCrossed": { "type": "string" }, + "value": { "type": "string"} + }, + "required": [ "criticality", "name", "thresholdCrossed", "value" ] + }, + "cpuUsage": { + "description": "percent usage of an identified CPU", + "type": "object", + "properties": { + "cpuIdentifier": { "type": "string" }, + "percentUsage": { "type": "number" } + }, + "required": [ "cpuIdentifier", "percentUsage" ] + }, + "errors": { + "description": "receive and transmit errors for the measurements domain", + "type": "object", + "properties": { + "receiveDiscards": { "type": "number" }, + "receiveErrors": { "type": "number" }, + "transmitDiscards": { "type": "number" }, + "transmitErrors": { "type": "number" } + }, + "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" }, + "faultFields": { "$ref": "#/definitions/faultFields" }, + "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" }, + "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, + "otherFields": { "$ref": "#/definitions/otherFields" }, + "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, + "syslogFields": { "$ref": "#/definitions/syslogFields" }, + "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" } + }, + "required": [ "commonEventHeader" ] + }, + "eventDomainThrottleSpecification": { + "description": "specification of what information to suppress within an event domain", + "type": "object", + "properties": { + "eventDomain": { + "description": "Event domain enum from the commonEventHeader domain field", + "type": "string" + }, + "suppressedFieldNames": { + "description": "List of optional field names in the event block that should not be sent to the Event Listener", + "type": "array", + "items": { + "type": "string" + } + }, + "suppressedNvPairsList": { + "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", + "type": "array", + "items": { + "$ref": "#/definitions/suppressedNvPairs" + } + } + }, + "required": [ "eventDomain" ] + }, + "eventDomainThrottleSpecificationList": { + "description": "array of eventDomainThrottleSpecifications", + "type": "array", + "items": { + "$ref": "#/definitions/eventDomainThrottleSpecification" + }, + "minItems": 0 + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "eventThrottlingState": { + "description": "reports the throttling in force at the event source", + "type": "object", + "properties": { + "eventThrottlingMode": { + "description": "Mode the event manager is in", + "type": "string", + "enum": [ + "normal", + "throttled" + ] + }, + "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" } + }, + "required": [ "eventThrottlingMode" ] + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "description": "additional alarm information", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "number" + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "required": [ "alarmCondition", "eventSeverity", + "eventSourceType", "specificProblem", "vfStatus" ] + }, + "featuresInUse": { + "description": "number of times an identified feature was used over the measurementInterval", + "type": "object", + "properties": { + "featureIdentifier": { "type": "string" }, + "featureUtilization": { "type": "number" } + }, + "required": [ "featureIdentifier", "featureUtilization" ] + }, + "field": { + "description": "name value pair", + "type": "object", + "properties": { + "name": { "type": "string" }, + "value": { "type": "string" } + }, + "required": [ "name", "value" ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { "type": "number" }, + "blockIops": { "type": "number" }, + "blockUsed": { "type": "number" }, + "ephemeralConfigured": { "type": "number" }, + "ephemeralIops": { "type": "number" }, + "ephemeralUsed": { "type": "number" }, + "filesystemName": { "type": "string" } + }, + "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", + "ephemeralIops", "ephemeralUsed", "filesystemName" ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", + "type": "array", + "items": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "number" } + ] + } + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", + "avgReceiveThroughput", "avgTransmitThroughput", + "flowActivationEpoch", "flowActivationMicrosec", + "flowDeactivationEpoch", "flowDeactivationMicrosec", + "flowDeactivationTime", "flowStatus", + "maxPacketDelayVariation", "numActivationFailures", + "numBitErrors", "numBytesReceived", "numBytesTransmitted", + "numDroppedPackets", "numL7BytesReceived", + "numL7BytesTransmitted", "numLostPackets", + "numOutOfOrderPackets", "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", "numTimeouts", "numTunneledL7BytesReceived", + "roundTripTime", "timeToFirstByte" + ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { "type": "number" }, + "highEndOfLatencyBucket": { "type": "number" }, + "lowEndOfLatencyBucket": { "type": "number" } + }, + "required": [ "countsInTheBucket" ] + }, + "measurementGroup": { + "description": "measurement group", + "type": "object", + "properties": { + "name": { "type": "string" }, + "measurements": { + "description": "array of name value pair measurements", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + } + }, + "required": [ "name", "measurements" ] + }, + "measurementsForVfScalingFields": { + "description": "measurementsForVfScaling fields", + "type": "object", + "properties": { + "additionalMeasurements": { + "description": "additional measurement fields", + "type": "array", + "items": { + "$ref": "#/definitions/measurementGroup" + } + }, + "aggregateCpuUsage": { + "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", + "type": "number" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", + "type": "number" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "errors": { "$ref": "#/definitions/errors" }, + "featureUsageArray": { + "description": "array of features in use", + "type": "array", + "items": { + "$ref": "#/definitions/featuresInUse" + } + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the VNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementsForVfScalingVersion": { + "description": "version of the measurementsForVfScaling block", + "type": "number" + }, + "memoryConfigured": { + "description": "memory in MB configured in the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "memoryUsed": { + "description": "memory usage in MB of the VM on which the VNFC reporting the event is running", + "type": "number" + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "number" + }, + "requestRate": { + "description": "peak rate of service requests per second to the VNF over the measurementInterval", + "type": "number" + }, + "vnfcScalingMetric": { + "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", + "type": "number" + }, + "vNicUsageArray": { + "description": "usage of an array of virtual network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/vNicUsage" + } + } + }, + "required": [ "measurementInterval" ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional mobileFlow fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "number" + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "number" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "number" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", + "ipVersion", "otherEndpointIpAddress", "otherEndpointPort", + "reportingEndpointIpAddr", "reportingEndpointPort" ] + }, + "otherFields": { + "description": "additional fields not reported elsewhere", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "required": [ "messageId", "text" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional stateChange fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "number" + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "required": [ "newState", "oldState", "stateInterface" ] + }, + "suppressedNvPairs": { + "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", + "type": "object", + "properties": { + "nvPairFieldName": { + "description": "Name of the field within which are the nvpair names to suppress", + "type": "string" + }, + "suppressedNvPairNames": { + "description": "Array of nvpair names to suppress within the nvpairFieldName", + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ "nvPairFieldName", "suppressedNvPairNames" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional syslog fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "number" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "number" + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "number" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string" + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "description": "additional threshold crossing alert fields if needed", + "type": "array", + "items": { + "$ref": "#/definitions/field" + } + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "number" + } + }, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp" + ] + }, + "vNicUsage": { + "description": "usage of identified virtual network interface card", + "type": "object", + "properties": { + "broadcastPacketsIn": { "type": "number" }, + "broadcastPacketsOut": { "type": "number" }, + "bytesIn": { "type": "number" }, + "bytesOut": { "type": "number" }, + "multicastPacketsIn": { "type": "number" }, + "multicastPacketsOut": { "type": "number" }, + "packetsIn": { "type": "number" }, + "packetsOut": { "type": "number" }, + "unicastPacketsIn": { "type": "number" }, + "unicastPacketsOut": { "type": "number" }, + "vNicIdentifier": { "type": "string" } + }, + "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] + } + }, + "title": "Event Listener", + "type": "object", + "properties": { + "event": {"$ref": "#/definitions/event"} + } +}
\ No newline at end of file diff --git a/dcaedt_catalog/.gitignore b/dcaedt_catalog/.gitignore new file mode 100644 index 0000000..a6609e3 --- /dev/null +++ b/dcaedt_catalog/.gitignore @@ -0,0 +1,12 @@ +.classpath +.settings/ +.project +.DS_Store + +#a bit dangerous, make sure we do not have any package named target .. +**/target/ +pom.xml.tag +pom.xml.releaseBackup +pom.xml.versionsBackup +pom.xml.next +release.properties diff --git a/dcaedt_catalog/api/pom.xml b/dcaedt_catalog/api/pom.xml new file mode 100644 index 0000000..234f12f --- /dev/null +++ b/dcaedt_catalog/api/pom.xml @@ -0,0 +1,198 @@ +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog</artifactId> + <version>1806.0.1-SNAPSHOT</version> + </parent> + <artifactId>DCAE-DT-Catalog-API</artifactId> + <packaging>jar</packaging> + <name>DCAE DT Catalog API</name> + + <build> + <sourceDirectory>src/main/java</sourceDirectory> + <testSourceDirectory>src/test/java</testSourceDirectory> + <plugins> + <plugin> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.1</version> + <configuration> + <source>1.8</source> + <target>1.8</target> + <encoding>${project.build.sourceEncoding}</encoding> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-dependency-plugin</artifactId> + <version>2.10</version> + <executions> + <execution> + <id>copy-dependencies</id> + <phase>package</phase> + <goals> + <goal>copy-dependencies</goal> + </goals> + <configuration> + <outputDirectory>${project.build.directory}/deps</outputDirectory> + <overWriteReleases>false</overWriteReleases> + <overWriteSnapshots>false</overWriteSnapshots> + <overWriteIfNewer>true</overWriteIfNewer> + </configuration> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>buildnumber-maven-plugin</artifactId> + <version>1.4</version> + <executions> + <execution> + <phase>validate</phase> + <goals> + <goal>create</goal> + </goals> + </execution> + </executions> + <configuration> + <doCheck>false</doCheck> + <doUpdate>false</doUpdate> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>2.1</version> + <configuration> + <archive> + <manifest> + <addDefaultImplementationEntries>true</addDefaultImplementationEntries> + </manifest> + <manifestEntries> + <Implementation-Build>${buildNumber}</Implementation-Build> + </manifestEntries> + </archive> + </configuration> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>3.0.2</version> + <executions> + <execution> + <goals> + <goal>test-jar</goal> + </goals> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-assembly-plugin</artifactId> + <version>2.6</version> + <configuration> + <descriptorRefs> + <descriptorRef>jar-with-dependencies</descriptorRef> + </descriptorRefs> + <archive> + <manifest> + <mainClass>org.onap.sdc.dcae.catalog.test.Cataloged</mainClass> + </manifest> + <manifestEntries> + <Implementation-Build>${buildNumber}</Implementation-Build> + </manifestEntries> + </archive> + </configuration> + <!-- <executions> <execution> <id>make-assembly</id> this is used for + inheritance merges <phase>package</phase> bind to the packaging phase <goals> + <goal>single</goal> </goals> </execution> </executions> --> + </plugin> + </plugins> + </build> + <repositories> + <repository> + <snapshots> + <enabled>false</enabled> + </snapshots> + <id>jcenter</id> + <name>Bintray JCenter</name> + <url>http://repo1.maven.org/maven2/</url> + </repository> + </repositories> + <dependencies> + <dependency> + <groupId>commons-jxpath</groupId> + <artifactId>commons-jxpath</artifactId> + <version>1.3</version> + </dependency> + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-lang3</artifactId> + <version>3.5</version> + </dependency> + <dependency> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpasyncclient</artifactId> + <version>4.1</version> + </dependency> + <dependency> + <groupId>commons-io</groupId> + <artifactId>commons-io</artifactId> + <version>2.4</version> + </dependency> + <dependency> + <groupId>commons-cli</groupId> + <artifactId>commons-cli</artifactId> + <version>1.3</version> + </dependency> + + <dependency> + <groupId>org.json</groupId> + <artifactId>json</artifactId> + <version>20160810</version> + </dependency> + + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog-Commons</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog-ASDC</artifactId> + <version>${project.version}</version> + </dependency> + + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Validator-Checker</artifactId> + <version>${project.version}</version> + </dependency> + + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <version>4.12</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-core</artifactId> + <version>1.10.19</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.assertj</groupId> + <artifactId>assertj-core</artifactId> + <!-- use 2.8.0 for Java 7 projects --> + <version>3.8.0</version> + <scope>test</scope> + </dependency> + + + </dependencies> +</project> diff --git a/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/Catalog.java b/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/Catalog.java new file mode 100644 index 0000000..b73bb09 --- /dev/null +++ b/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/Catalog.java @@ -0,0 +1,440 @@ +package org.onap.sdc.dcae.catalog; + +import java.net.URI; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.LinkedList; +import java.util.HashMap; +import java.util.EnumSet; + +import org.json.JSONObject; +import org.onap.sdc.dcae.catalog.commons.Action; +import org.onap.sdc.dcae.catalog.commons.Future; +import org.onap.sdc.dcae.catalog.commons.Futures; +import org.onap.sdc.dcae.catalog.commons.Proxies; + + +import org.json.JSONArray; + +/* + * + */ +public interface Catalog { + + + public abstract URI getUri(); + + public abstract String namespace(); + + public abstract boolean same(Catalog theCatalog); + + public abstract <T> T proxy(JSONObject theData, Class<T> theType); + + + /* Base class for all Catalog objects. + */ + public static interface Element<T extends Element<T>> { + + /** + * provide a typed 'self' reference + */ + public default T self() { return (T)this; } + + /** + */ + public default Class<T> selfClass() { + return (Class<T>)getClass().getInterfaces()[0]; + } + + /* */ + public Catalog catalog(); + + /** + */ + public String id(); + + /** + * Direct access to the underlying JSON object. + * Warning: Modifications to the JSON object are reflected in the Element. + */ + public JSONObject data(); + + /** + * Provides the labels of the artifacts (we use labels to type/classify the + * neo4j artifacts, nodes and edges. + * Currently not all queries retrieve the labels. + */ + public String[] labels(); + + /* Allows for typed deep exploration of the backing JSON data structure + * <pre> + * {@code + * element("type", Type.class); + * } + * </pre> + * + * @arg theName name of a JSON entry ; It must map another JSONObject. + * @arg theType the expected wrapping catalog artifact type + * @return the JSON entry wrapped in the specified type + */ + public default <E extends Element<E>> E element(String theName, Class<E> theType) { + JSONObject elemData = data().optJSONObject(theName); + if (elemData == null) + return null; + else + return catalog().proxy(elemData, theType); + } + + /* Similar to {@link #element(String,Class)} but for collection wrapping. + * Example: + * <pre> + * {@code + * element("nodes", Nodes.class); + * } + * </pre> + */ + public default <E extends Elements> E elements(String theName, Class<E> theType) { + //throws ReflectiveOperationException { + JSONArray elemsData = data().optJSONArray(theName); + if (elemsData == null) { + return null; + } + else { + Class etype = Proxies.typeArgument(theType); + Elements elems = null; + try { + elems = theType.newInstance(); + } + catch (ReflectiveOperationException rox) { + throw new RuntimeException("Failed to instantiate " + theType, rox); + } + + try{ + for (Iterator i = elemsData.iterator(); i.hasNext();) { + JSONObject elemData = (JSONObject)i.next(); + elems.add(catalog().proxy(elemData, etype)); + } + } + catch(Exception e){ + throw new RuntimeException("Failed to fetch json data ", e); + } + return (E)elems; + } + } + + /* + */ + public default boolean same(Element theElem) { + return this.catalog().same(theElem.catalog()) && + this.id().equals(theElem.id()); + } + } + + /* + * Base class for all collections of elements. + */ + public static class Elements<T extends Element> + extends LinkedList<T> { + + public String toString() { + StringBuilder sb = new StringBuilder("["); + for (Element el: this) { + sb.append(el.selfClass().getSimpleName()) + .append("(") + .append(el.data()) + .append("),"); + } + sb.append("]"); + return sb.toString(); + } + } + + /* + * We need this contraption in order to store a mix of Folders and CatalogItem + * instances (Elements in self is not good because it is defined around a + * type variable so we cannot use reflection to determine the type at runtime + * - generics are resolved compile time) + */ + public static class Mixels extends Elements<Element> { + } + + /* + */ + public static interface Item<T extends Item<T>> extends Element<T> { + + public String name(); + + public String description(); + + /* catalog item native identifier */ + public String itemId(); + + /* similar to @ItemAction#withModels + */ + default public Future<Templates> models() { + Templates t = elements("models", Templates.class); + if (t != null) + return Futures.succeededFuture(t); + else + return Futures.advance(catalog().item(itemId()) + .withModels() + .execute(), + item -> (Templates)item.elements("models", Templates.class)); + } + + /* similar to @ItemAction#withAnnotations + */ + default public Future<Annotations> annotations() { + Annotations a = elements("annotations", Annotations.class); + if (a != null) + return Futures.succeededFuture(a); + else + return Futures.advance(catalog().item(itemId()) + .withAnnotations() + .execute(), + item -> (Annotations)item.elements("annotations", Annotations.class)); + } + } + + /* + * Collection of catalog items. + */ + public static class Items extends Elements<Item> { + } + + /* + */ + public static interface Folder extends Element<Folder> { + + public String name(); + + public String description(); + + public String itemId(); + + /* the namespace is immutable */ + public default String namespace() { + return catalog().namespace(); + } + + /* + */ + default public Future<Items> items() { + Items i = elements("items", Items.class); + if (i != null) + return Futures.succeededFuture(i); + else + return Futures.advance(catalog().folder(itemId()) + .withItems() + .execute(), + folder -> (Items)folder.elements("items", Items.class)); + } + + /* + */ + default public Future<Folders> parts() { + Folders f = elements("parts", Folders.class); + if (f != null) + return Futures.succeededFuture(f); + else + return Futures.advance(catalog().folder(itemId()) + .withParts() + .execute(), + folder -> (Folders)folder.elements("parts", Folders.class)); + } + + /* + */ + public Future<Folders> partof(); + + } + + + public static class Folders extends Elements<Folder> { + } + + //no predefined properties here + public static interface Annotation extends Element<Annotation> { + + public default String namespace() { + return catalog().namespace(); + } + } + + public static class Annotations extends Elements<Annotation> { + } + + /** + * A TOSCA teamplate. + * When a deep loading method is used to obtain a Template its collection + * of inputs and nodes will be immediately available (and 'cached' within + * the backing JSON object). It can be retrieved through a call to + * {@link Element#elements(String,Class)} as in: + * elements("inputs", Inputs.class) + * or + * elements("nodes", Nodes.class) + * + * The same result will be obtained through one of the methods of the + * navigation interface, {@link #inputs()} or {@link #nodes()}; in this case + * the result does not become part of the backing JSONObject. + */ + public static interface Template extends Element<Template> { + + public String name(); + + public String version(); + + public String description(); + + } + + /** + * Collection of {@link Catalog.Template template} instances. + */ + public static class Templates extends Elements<Template> { + } + + + /** + * A TOSCA type declaration. + */ + public interface Type extends Element<Type> { + + public String name(); + + /** + * Allows navigation to the parent {@link Catalog.Type type}, if any. + */ + public Future<Type> derivedfrom(); + + } + + /** + * Collection of {@link Catalog.Type type} instances. + */ + public static class Types extends Elements<Type> { + } + + + public static interface TemplateAction extends Action<Template> { + + public TemplateAction withInputs(); + + public TemplateAction withOutputs(); + + public TemplateAction withNodes(); + + public TemplateAction withNodeProperties(); + + public TemplateAction withNodeRequirements(); + + public TemplateAction withNodePropertiesAssignments(); + + public TemplateAction withNodeCapabilities(); + + public TemplateAction withNodeCapabilityProperties(); + + public TemplateAction withNodeCapabilityPropertyAssignments(); + + public TemplateAction withPolicies(); + + public TemplateAction withPolicyProperties(); + + public TemplateAction withPolicyPropertiesAssignments(); + + @Override + public Future<Template> execute(); + + } + + /* + */ + public static interface TypeAction extends Action<Type> { + + public TypeAction withHierarchy(); + + public TypeAction withRequirements(); + + public TypeAction withCapabilities(); + + @Override + public Future<Type> execute(); + + } + + /* + */ + public static interface FolderAction extends Action<Folder> { + + public FolderAction withAnnotations(); + + public FolderAction withAnnotations(String theSelector); + + public FolderAction withItems(); + + public FolderAction withItemAnnotations(); + + public FolderAction withItemAnnotations(String theSelector); + + public FolderAction withItemModels(); + + public FolderAction withParts(); + + public FolderAction withPartAnnotations(); + + public FolderAction withPartAnnotations(String theSelector); + + @Override + public Future<Folder> execute(); + } + + /* + */ + public static interface ItemAction<T extends Item> extends Action<T> { + + public ItemAction<T> withModels(); + + public ItemAction<T> withAnnotations(); + + @Override + public Future<T> execute(); + + } + + /** + */ + public abstract Future<Folders> roots(); + + /** + */ + public abstract Future<Folders> rootsByLabel(String theLabel); + + /** + */ + public abstract Future<Mixels> lookup(JSONObject theSelector); + + public abstract Future<Mixels> lookup(String theAnnotation, JSONObject theSelector); + + /** + */ + public abstract FolderAction folder(String theFolderId); + + /** + */ + public abstract <T extends Item> ItemAction<T> item(String theItemId); + + /** + */ + public abstract TemplateAction template(String theTemplateId); + + /** + */ + public abstract TypeAction type(String theNamespace, String theTypeName); + + + +} diff --git a/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCCatalog.java b/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCCatalog.java new file mode 100644 index 0000000..e08f3a6 --- /dev/null +++ b/dcaedt_catalog/api/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCCatalog.java @@ -0,0 +1,1227 @@ +package org.onap.sdc.dcae.catalog.asdc; + +import com.google.common.collect.ImmutableMap; +import org.apache.commons.io.IOUtils; +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.jxpath.JXPathNotFoundException; +import org.apache.commons.lang3.StringUtils; +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.dcae.catalog.Catalog; +import org.onap.sdc.dcae.catalog.commons.*; +import org.onap.sdc.dcae.checker.*; + +import java.io.*; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.*; +import java.util.function.BiFunction; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +public class ASDCCatalog implements Catalog { + + private + static final String JXPATH_NOT_FOUND_EXCEPTION = "JXPathNotFoundException {}"; + private + static final String OCCURRENCES = "occurrences"; + private + static final String TOPOLOGY_TEMPLATE_NODE_TEMPLATES = "/topology_template/node_templates"; + private + static final String NODES_NAME = "/nodes[name='"; + private + static final String ITEM_ID = "itemId"; + private + static final String LABELS = "labels"; + private + static final String ARTIFACT_URL = "artifactURL"; + private + static final String CAPABILITY = "capability"; + private + static final String DATABASE = "Database"; + private + static final String COLLECTOR = "Collector"; + private + static final String MICROSERVICE = "Microservice"; + private + static final String ANALYTICS = "Analytics"; + private + static final String POLICY = "Policy"; + private + static final String SOURCE = "Source"; + private + static final String UTILITY = "Utility"; + private + static final String NAME = "name"; + private + static final String ID = "id"; + private + static final String ARTIFACT_NAME = "artifactName"; + private + static final String DESCRIPTION = "description"; + private + static final String MODELS = "models"; + private + static final String ARTIFACTS = "artifacts"; + private + static final String ITEMS = "items"; + private + static final String PROPERTIES = "']/properties"; + private + static final String TOPOLOGY_TEMPLATE_NODE_TEMPLATES1 = "/topology_template/node_templates/"; + private + static final String PROPERTIES_NAME = "']/properties[name='"; + private + static final String CAPABILITIES = "']/capabilities"; + private + static final String CAPABILITIES_NAME = "']/capabilities[name='"; + + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private ASDC asdc; + + private JSONObject folders = new JSONObject(); + private String[] folderFields = new String[] {ID, ITEM_ID, NAME}; + + private ProxyBuilder proxies; + private Map<Target, JXPathContext> contexts = new HashMap<Target, JXPathContext>(); + + // resource and its catalog + private Map<UUID, org.onap.sdc.dcae.checker.Catalog> catalogs = new HashMap<UUID, org.onap.sdc.dcae.checker.Catalog>(); + + public ASDCCatalog(URI theURI) { + + this.asdc = new ASDC(); + this.asdc.setUri(theURI); + + initFolders(); + + this.proxies = new ProxyBuilder().withConverter(v -> v == null ? null : UUID.fromString(v.toString()), UUID.class) + .withExtensions( + new ImmutableMap.Builder<String, BiFunction<Proxy, Object[], Object>>().put("data", (proxy, args) -> proxy.data()) + .build()).withContext(new ImmutableMap.Builder<String, Object>().put("catalog", this).build()); + } + + private void initFolders() { + + JSONArray labels = new JSONArray(); + labels.put("Folder"); + labels.put("DCAE"); + labels.put("Superportfolio"); // for CCD compatibility + + folders.put(DATABASE, new JSONObject().put(NAME, DATABASE).put(ID, "dcae_database") + .put(ITEM_ID, DATABASE).put(LABELS, labels)); + folders.put(COLLECTOR, new JSONObject().put(NAME, COLLECTOR).put(ID, "dcae_collector") + .put(ITEM_ID, COLLECTOR).put(LABELS, labels)); + folders.put(MICROSERVICE, new JSONObject().put(NAME, MICROSERVICE).put(ID, "dcae_microservice") + .put(ITEM_ID, MICROSERVICE).put(LABELS, labels)); + folders.put(ANALYTICS, new JSONObject().put(NAME, ANALYTICS).put(ID, "dcae_analytics") + .put(ITEM_ID, ANALYTICS).put(LABELS, labels)); + folders.put(POLICY, new JSONObject().put(NAME, POLICY).put(ID, "dcae_policy").put(ITEM_ID, POLICY) + .put(LABELS, labels)); + folders.put(SOURCE, new JSONObject().put(NAME, SOURCE).put(ID, "dcae_source").put(ITEM_ID, SOURCE) + .put(LABELS, labels)); + folders.put(UTILITY, new JSONObject().put(NAME, UTILITY).put(ID, "dcae_utility") + .put(ITEM_ID, UTILITY).put(LABELS, labels)); + } + + public URI getUri() { + return this.asdc.getUri(); + } + + public String namespace() { + return "asdc"; + } + + public boolean same(Catalog theCatalog) { + return true; + } + + public <T> T proxy(JSONObject theData, Class<T> theType) { + return proxies.build(theData, theType); + } + + /** */ + public Future<Folders> roots() { + + Folders roots = new Folders(); + for (Iterator fi = folders.keys(); fi.hasNext();) { + roots.add(proxies.build(folders.getJSONObject((String) fi.next()), Folder.class)); + } + return Futures.succeededFuture(roots); + } + + /** */ + public Future<Folders> rootsByLabel(String theLabel) { + + Folders roots = new Folders(); + for (Iterator fi = folders.keys(); fi.hasNext();) { + JSONObject folder = folders.getJSONObject((String) fi.next()); + JSONArray labels = folder.getJSONArray(LABELS); + + for (int i = 0; i < labels.length(); i++) { + if (labels.get(i).equals(theLabel)) { + roots.add(proxies.build(folder, Folder.class)); + } + } + } + return Futures.succeededFuture(roots); + } + + /** */ + public Future<Mixels> lookup(JSONObject theSelector) { + return Futures.succeededFuture(new Mixels()); + } + + public Future<Mixels> lookup(String theAnnotation, JSONObject theSelector) { + return Futures.succeededFuture(new Mixels()); + } + + /** */ + public ItemAction item(String theItemId) { + return new ResourceAction(UUID.fromString(theItemId)); + } + + /** */ + public FolderAction folder(String theFolderId) { + return new FolderAction(theFolderId); + } + + public TemplateAction template(String theId) { + return new TemplateAction(theId); + } + + public TypeAction type(String theItemId, String theName) { + return new TypeAction(UUID.fromString(theItemId), theName); + } + + protected static String resolveTargetName(Target theTarget) { + return (String) ((Map) ((Map) theTarget.getTarget()).get("metadata")).get("template_name"); + } + + protected Object resolve(Target theTarget, String thePath) { + try { + return contexts.get(theTarget).getValue(thePath); + } catch (JXPathNotFoundException pnfx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "JXPathNotFoundException {}", pnfx); + return null; + } + } + + // covers common TOSCA pattern of single entry maps + public Map.Entry<String, Map> toEntry(Object theValue) { + return (Map.Entry<String, Map>) ((Map) theValue).entrySet().iterator().next(); + } + + protected Map selectEntries(Map theOriginal, String... theKeys) { + Arrays.sort(theKeys); + Map selection = ((Set<Map.Entry>) theOriginal.entrySet()).stream() + .filter(e -> Arrays.binarySearch(theKeys, e.getKey().toString()) >= 0) + .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())); + return selection; + } + + protected Map evictEntries(Map theOriginal, String... theKeys) { + Arrays.sort(theKeys); + Map selection = ((Set<Map.Entry>) theOriginal.entrySet()).stream() + .filter(e -> Arrays.binarySearch(theKeys, e.getKey().toString()) < 0) + .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())); + return selection; + } + + protected MapBuilder renderEntry(Map.Entry theEntry, String... theKeys) { + MapBuilder out = new MapBuilder(); + out.put(NAME, theEntry.getKey()); + + for (String key : theKeys) { + out.put(key, ((Map) theEntry.getValue()).get(key)); + } + return out; + } + + protected <T> Stream<T> stream(Iterator<T> theSource) { + return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theSource, + Spliterator.NONNULL | Spliterator.DISTINCT | Spliterator.IMMUTABLE), false); + } + + private JSONArray selectModels(JSONArray theArtifacts) { + JSONArray models = new JSONArray(); + if (theArtifacts == null) { + return models; + } + + for (int i = 0; i < theArtifacts.length(); i++) { + JSONObject artifact = theArtifacts.getJSONObject(i); + String name = artifact.optString(ARTIFACT_NAME); + if (name != null && StringUtils.containsIgnoreCase(name, "template")) { + models.put(new JSONObject().putOpt(NAME, artifact.optString(ARTIFACT_NAME)) + .putOpt("version", artifact.optString("artifactVersion")) + .putOpt(DESCRIPTION, artifact.optString("artifactType")) + .putOpt(ID, artifact.optString(ARTIFACT_URL)) + .putOpt(ITEM_ID, artifact.optString(ARTIFACT_URL))); + } + } + return models; + } + + private JSONObject patchResource(JSONObject theResource) { + + theResource.remove("resources"); + theResource.putOpt(ID, theResource.opt("uuid")); + theResource.putOpt(ITEM_ID, theResource.opt("uuid")); + + return theResource; + } + + private static void dumpTargets(String theDirName, Collection<Target> theTargets) { + try { + File targetDir = new File(theDirName); + if (!targetDir.exists() && !targetDir.mkdirs()) { + throw new IllegalStateException("Couldn't create dir: " + theDirName); + } + for (Target t : theTargets) { + FileWriter dump = new FileWriter(new File(theDirName, t.getName())); + IOUtils.copy(t.open(), dump); + dump.close(); + } + } catch (IOException iox) { + debugLogger.log(LogLevel.DEBUG,"ASDCCatalog", "IOException {}", iox); + } + } + + private static URI asURI(String theValue) { + try { + return new URI(theValue); + } catch (URISyntaxException urisx) { + throw new IllegalArgumentException("Invalid URI", urisx); + } + } + + private static UUID asUUID(String theValue) { + return UUID.fromString(theValue); + } + + private org.onap.sdc.dcae.checker.Catalog getCatalog(UUID theResourceId) { + return this.catalogs.get(theResourceId); + } + + private String getArtifactVersion(JSONObject theData) { + return theData.getString("artifactVersion"); + } + + private String getArtifactName(JSONObject theData) { + return theData.getString(ARTIFACT_NAME); + } + + private String getArtifactURL(JSONObject theData) { + return theData.getString(ARTIFACT_URL); + } + + private URI getArtifactURI(JSONObject theData) { + return asURI(theData.getString(ARTIFACT_URL)); + } + + /** */ + public class ResourceAction implements Catalog.ItemAction<Resource> { + + private UUID iid; + private boolean doModels; + + ResourceAction(UUID theItemId) { + this.iid = theItemId; + } + + public ResourceAction withModels() { + this.doModels = true; + return this; + } + + public ResourceAction withAnnotations() { + return this; + } + + @Override + public Future<Resource> execute() { + + return Futures.advance(asdc.getResource(this.iid, JSONObject.class), resourceData -> { + if (doModels) { + resourceData.put(MODELS, selectModels(resourceData.optJSONArray(ARTIFACTS))); + } + return proxies.build(patchResource(resourceData), Resource.class); + }); + } + + protected Future<JSONObject> executeRaw() { + + return Futures.advance(asdc.getResource(this.iid, JSONObject.class), resourceData -> { + if (doModels) { + resourceData.put(MODELS, selectModels(resourceData.optJSONArray(ARTIFACTS))); + } + return resourceData; + }, resourceError -> new RuntimeException("Failed to retrieve item " + this.iid, resourceError)); + } + } + + public class FolderAction implements Catalog.FolderAction { + + private boolean doItemModels; + private String folderName; + + // use the id/UUID of the folder ?? + private FolderAction(String theFolderName) { + this.folderName = theFolderName; + } + + public FolderAction withAnnotations() { + return this; + } + + public FolderAction withAnnotations(String theSelector) { + return this; + } + + public FolderAction withItems() { + return this; + } + + public FolderAction withItemAnnotations() { + return this; + } + + public FolderAction withItemAnnotations(String theSelector) { + return this; + } + + public FolderAction withItemModels() { + doItemModels = true; + return this; + } + + public FolderAction withParts() { + return this; + } + + public FolderAction withPartAnnotations() { + return this; + } + + public FolderAction withPartAnnotations(String theSelector) { + return this; + } + + @Override + public Future<Folder> execute() { + + JSONObject folder = folders.optJSONObject(this.folderName); + if (folder == null) { + return Futures.failedFuture(new RuntimeException("No such folder " + this.folderName)); + } + + final JSONObject folderView = new JSONObject(folder, folderFields); + + return Futures.advance(asdc.getResources(JSONArray.class, "DCAE Component", this.folderName), + resourcesData -> { + + Actions.CompoundAction<Resource> itemsAction = new Actions.BasicCompoundAction<Resource>(); + for (int i = 0; i < resourcesData.length(); i++) { + JSONObject resource = resourcesData.getJSONObject(i); + + if (doItemModels) { + itemsAction + .addAction(new ResourceAction(asUUID(resource.getString("uuid"))).withModels()); + } else { + folderView.append(ITEMS, patchResource(resource)); + } + } + + try { + List<Resource> items = itemsAction.execute().waitForResult(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Number of DCAE item for : {} is {}", this.folderName, items.size()); + + for (Resource res : filterLatestVersion(items)) { + folderView.append(ITEMS, patchResource(res.data())); + } + } catch (Exception x) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Exception {}", x); + throw new RuntimeException("Failed to retrieve folder items", x); + } + + return proxies.build(folderView, Folder.class); + }, resourcesError -> new RuntimeException("Failed to retrieve resources", resourcesError)); + } + + public Collection<Resource> filterLatestVersion(Collection<Resource> items) throws IllegalArgumentException { + if (items == null) { + throw new IllegalArgumentException("null is not acceptable as a list of items"); + } + Map<UUID, Resource> itemsMap = new HashMap<UUID, Resource>(items.size()); + for (Resource r : items) { + if (itemsMap.containsKey(r.invariantUUID()) && isNewerVersion(itemsMap, r)) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Avoiding adding item {} since it has a advanced version already", r.toString()); + continue; + } + itemsMap.put(r.invariantUUID(), r); + } + return itemsMap.values(); + } + + private boolean isNewerVersion(Map<UUID, Resource> itemsMap, Resource r) { + return Float.valueOf(itemsMap.get(r.invariantUUID()).version()) > Float.valueOf(r.version()); + } + + } + + /** */ + public class TemplateAction implements Catalog.TemplateAction { + + private String artifactId; + private Target target; + private org.onap.sdc.dcae.checker.Catalog catalog; + private JXPathContext ctx = JXPathContext.newContext(new HashMap()); + + private boolean doNodes, doNodeProperties, doNodePropertiesAssignments, doNodeRequirements, doNodeCapabilities, + doNodeCapabilityProperties, doNodeCapabilityPropertyAssignments; + + protected TemplateAction(Target theTarget) { + this.target = theTarget; + } + + /* + * expected to be the relative url provided by asdc for the template + * artifact + */ + protected TemplateAction(String theArtifactId) { + this.artifactId = theArtifactId; + } + + public TemplateAction withInputs() { + return this; + } + + public TemplateAction withOutputs() { + return this; + } + + public TemplateAction withNodes() { + this.doNodes = true; + return this; + } + + protected TemplateAction doNodes() { + if (!this.doNodes) { + return this; + } + + Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES); + if (nodes == null) { + return this; + } + + ctx.setValue("/nodes", + nodes.entrySet().stream() + .map(nodeEntry -> new MapBuilder().put(NAME, ((Map.Entry) nodeEntry).getKey()) + .put(DESCRIPTION, this.artifactId) + .putAll(selectEntries((Map) ((Map.Entry) nodeEntry).getValue(), "type")).build()) + .collect(Collectors.toList())); + + return this; + } + + // pre-requisite: a call to 'withNodes' + public TemplateAction withNodeProperties() { + this.doNodeProperties = true; + return this; + } + + protected TemplateAction doNodeProperties() { + if (!this.doNodeProperties) { + return this; + } + + Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES); + if (nodes == null) { + return this; + } + + nodes.entrySet().stream().forEach(node -> ctx.setValue( + NODES_NAME + ((Map.Entry) node).getKey() + PROPERTIES, + stream(catalog.facets(Construct.Node, Facet.properties, + ((Map) ((Map.Entry) node).getValue()).get("type").toString())) + .map(propEntry -> new MapBuilder().put(NAME, propEntry.getKey()) + .putAll((Map) propEntry.getValue()).build()) + .collect(Collectors.toList()))); + + return this; + } + + // pre-requisite: a call to 'withNodesProperties' + public TemplateAction withNodePropertiesAssignments() { + this.doNodePropertiesAssignments = true; + return this; + } + + protected TemplateAction doNodePropertiesAssignments() { + if (!this.doNodePropertiesAssignments) { + return this; + } + + Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES); + if (nodes == null) { + return this; + } + + nodes.entrySet().stream().forEach(node -> { + List nodeProps = null; + try { + nodeProps = (List) ctx.getValue(NODES_NAME + ((Map.Entry) node).getKey() + PROPERTIES); + } catch (JXPathNotFoundException pnfx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), JXPATH_NOT_FOUND_EXCEPTION, pnfx); + return; + } + + nodeProps.stream().forEach(prop -> { + // pick from + String propPath = TOPOLOGY_TEMPLATE_NODE_TEMPLATES1 + ((Map.Entry) node).getKey() + + "/properties/" + ((Map) prop).get(NAME); + Object propValue = resolve(this.target, propPath); + // to conform with the db based api we should analyze the + // value for function calls + // dump at .. + propPath = NODES_NAME + ((Map.Entry) node).getKey() + PROPERTIES_NAME + + ((Map) prop).get(NAME) + "']"; + if (propValue != null) { + ctx.setValue(propPath + "/assignment", + new ImmutableMap.Builder().put("value", propValue).build()); + } + }); + }); + + return this; + } + + protected Map renderRequirementDefinition(Map.Entry theReq) { + Map def = (Map) theReq.getValue(); + return new MapBuilder().put(NAME, theReq.getKey()) + // capability must be present + .put(CAPABILITY, + new MapBuilder().put(NAME, def.get(CAPABILITY)) + .put(ID, this.target.getName() + "/" + def.get(CAPABILITY)).build()) + .putAll(evictEntries(def, CAPABILITY)).build(); + } + + // TODO: see how this comes out of neo and match it + protected Map renderRequirementAssignment(Map.Entry theReq) { + Map def = (Map) theReq.getValue(); + return new MapBuilder().put(NAME, theReq.getKey()) + // capability must be present + .put(CAPABILITY, + new MapBuilder().put(NAME, def.get(CAPABILITY)) + // we provide an id only if the capability + // points to a type + .putOpt(ID, + catalog.hasType(Construct.Capability, (String) def.get(CAPABILITY)) + ? (this.target.getName() + "/" + def.get(CAPABILITY)) : null) + .build()) + .putAll(evictEntries(def, CAPABILITY)).build(); + } + + public TemplateAction withNodeRequirements() { + this.doNodeRequirements = true; + return this; + } + + TemplateAction doNodeRequirements() { + if (!this.doNodeRequirements) { + return this; + } + + // requirements come first from the type and then can be further + // refined by their assignment within the + // node template + Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES); + if (nodes == null) { + return this; + } + + // type + nodes.entrySet().stream() + .forEach( + node -> ctx + .setValue( + NODES_NAME + + ((Map.Entry) node) + .getKey() + + "']/requirements", + StreamSupport + .stream(Spliterators.spliteratorUnknownSize( + catalog.requirements(((Map) ((Map.Entry) node).getValue()) + .get("type").toString()), + Spliterator.NONNULL | Spliterator.DISTINCT + | Spliterator.IMMUTABLE), + false) + .map((Map.Entry reqEntry) -> renderRequirementDefinition(reqEntry)) + .collect(Collectors.toList()))); + + // merge assignments on top of definitions + nodes.entrySet().stream().forEach(node -> { + List nodeReqsAssigns = (List) resolve(this.target, + TOPOLOGY_TEMPLATE_NODE_TEMPLATES1 + ((Map.Entry) node).getKey() + "/requirements"); + if (nodeReqsAssigns == null) { + return; + } + nodeReqsAssigns.stream().forEach(req -> { + Map.Entry reqAssign = toEntry(req); + catalog.mergeDefinitions((Map) ctx.getValue(NODES_NAME + ((Map.Entry) node).getKey() + + "']/requirements[name='" + reqAssign.getKey() + "']"), + renderRequirementAssignment(reqAssign)); + }); + }); + + return this; + } + + public TemplateAction withNodeCapabilities() { + this.doNodeCapabilities = true; + return this; + } + + protected Map renderCapabilityDefinition(Map.Entry theCap) { + Map def = (Map) theCap.getValue(); + return new MapBuilder().put(NAME, theCap.getKey()) + .put("type", + new MapBuilder().put(NAME, def.get("type")) + .put(ID, this.target.getName() + "/" + def.get("type")).build()) + .putAll(evictEntries(def, "properties", "type")).build(); + } + + TemplateAction doNodeCapabilities() { + if (!this.doNodeCapabilities) { + return this; + } + + Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES); + if (nodes == null) { + return this; + } + + // collect capabilities through the node type hierarchy + + // we evict the properties from the node type capability declaration + // (when declaring a capability with the + // node type some re-definition of capability properties can take + // place). + nodes.entrySet().stream() + .forEach(node -> ctx.setValue(NODES_NAME + ((Map.Entry) node).getKey() + CAPABILITIES, + + stream(catalog.facets(Construct.Node, Facet.capabilities, + ((Map) ((Map.Entry) node).getValue()).get("type").toString())) + .map((Map.Entry capEntry) -> renderCapabilityDefinition(capEntry)) + .collect(Collectors.toList()))); + + return this; + } + + public TemplateAction withNodeCapabilityProperties() { + this.doNodeCapabilityProperties = true; + return this; + } + + TemplateAction doNodeCapabilityProperties() { + + if (!this.doNodeCapabilityProperties) { + return this; + } + + Map nodes = (Map) resolve(this.target, TOPOLOGY_TEMPLATE_NODE_TEMPLATES); + if (nodes == null) { + return this; + } + + // pick up all the properties from the capability type hierarchy + // definition + nodes.entrySet().stream().forEach(node -> { + List nodeCapabilities = (List) ctx + .getValue(NODES_NAME + ((Map.Entry) node).getKey() + CAPABILITIES); + if (nodeCapabilities == null) { + return; + } + + // collect properties from the capability type hierarchy + nodeCapabilities.stream().forEach(capability -> { + List capabilityProperties = StreamSupport + .stream(Spliterators.spliteratorUnknownSize( + catalog.facets(Construct.Capability, Facet.properties, + ((Map)((Map)capability).get("type")).get(NAME).toString()), + Spliterator.NONNULL | Spliterator.DISTINCT | Spliterator.IMMUTABLE), false) + .map((Map.Entry capEntry) -> new MapBuilder().put(NAME, capEntry.getKey()) + .putAll((Map) capEntry.getValue()).build()) + .collect(Collectors.toList()); + + if (!capabilityProperties.isEmpty()) { + ctx.setValue(NODES_NAME + ((Map.Entry) node).getKey() + CAPABILITIES_NAME + + ((Map) capability).get(NAME) + PROPERTIES, capabilityProperties); + } + }); + + // and go over the node type (hierarchy) and pick up any + // re-definitions from there. + StreamSupport + .stream(Spliterators.spliteratorUnknownSize( + catalog.facets(Construct.Node, Facet.capabilities, + ((Map) ((Map.Entry) node).getValue()).get("type").toString()), + Spliterator.NONNULL | Spliterator.DISTINCT | Spliterator.IMMUTABLE), false) + .forEach((Map.Entry capability) -> { + // for each capability property that has some node + // type level re-definition + Map properties = (Map) ((Map) capability.getValue()).get("properties"); + if (properties == null) { + return; + } + + properties.entrySet().stream().forEach(property -> { + String propertyLoc = NODES_NAME + ((Map.Entry) node).getKey() + + CAPABILITIES_NAME + ((Map) capability).get(NAME) + + PROPERTIES_NAME + ((Map.Entry) property).getKey() + "']"; + ctx.setValue(propertyLoc, catalog.mergeDefinitions((Map) ctx.getValue(propertyLoc), + (Map) ((Map.Entry) property).getValue())); + }); + }); + }); + + return this; + } + + public TemplateAction withNodeCapabilityPropertyAssignments() { + this.doNodeCapabilityPropertyAssignments = true; + return this; + } + + TemplateAction doNodeCapabilityPropertyAssignments() { + if (!this.doNodeCapabilityPropertyAssignments) { + return this; + } + + // this is a wasteful: we go over all declared + // nodes/capabilities/properties and check if there is an assigned + // value in the actual template. It is optimal to approach the + // problem from the other direction: go over delared + // assignments and set them in the output structure .. + + List nodes = null; + try { + nodes = (List) ctx.getValue("/nodes"); + } catch (JXPathNotFoundException pnfx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), JXPATH_NOT_FOUND_EXCEPTION, pnfx); + return this; + } + + nodes.stream().forEach(node -> { + List capabilities = (List) ctx.getValue(NODES_NAME + ((Map) node).get(NAME) + CAPABILITIES); + if (capabilities == null) { + return; + } + + capabilities.stream().forEach(capability -> { + List properties = null; + try { + properties = (List) ctx.getValue(NODES_NAME + ((Map) node).get(NAME) + + CAPABILITIES_NAME + ((Map) capability).get(NAME) + PROPERTIES); + } catch (JXPathNotFoundException pnfx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), JXPATH_NOT_FOUND_EXCEPTION, pnfx); + return; + } + + properties.stream().forEach(property -> { + String location = NODES_NAME + ((Map) node).get(NAME) + CAPABILITIES_NAME + + ((Map) capability).get(NAME) + PROPERTIES_NAME + ((Map) property).get(NAME) + + "']/assignment"; + + // pick the value from the original + try { + Object assignment = resolve(this.target, + TOPOLOGY_TEMPLATE_NODE_TEMPLATES1 + ((Map) node).get(NAME) + "/capabilities/" + + ((Map) capability).get(NAME) + "/properties/" + + ((Map) property).get(NAME)); + if (assignment != null) { + ctx.setValue(location, new ImmutableMap.Builder().put("value", assignment).build()); + } + } catch (JXPathNotFoundException pnfx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), JXPATH_NOT_FOUND_EXCEPTION, pnfx); + // it's ok, no assignment + } + }); + }); + }); + + return this; + } + + public TemplateAction withPolicies() { + return this; + } + + public TemplateAction withPolicyProperties() { + return this; + } + + public TemplateAction withPolicyPropertiesAssignments() { + return this; + } + + public Future<Template> execute() { + + if (this.target == null) { + + String[] parts = this.artifactId.split("/"); + if (parts.length != 8) { + return Futures + .failedFuture(new Exception("Unexpected artifact id for template " + this.artifactId)); + } + + UUID resourceId = asUUID(parts[5]); + this.catalog = ASDCCatalog.this.catalogs.get(resourceId); + + // if we find a catalog for this resource we have to figure out + // if it contains the required target .. + + try { + JSONObject resource = new ResourceAction(resourceId).executeRaw().waitForResult(); + + Checker checker = new Checker(); + TargetLocator locator = new ASDCLocator(resource.getJSONArray(ARTIFACTS), + ASDCCatalog.this.catalogs.get(resourceId)); + checker.setTargetLocator(locator); + + Target template = locator.resolve("template"); + if (template == null) { + return Futures.failedFuture(new Exception("Failed to locate template in " + resource)); + } + + checker.check(template); + + for (Target t : checker.targets()) { + if (t.getReport().hasErrors()) { + dumpTargets(resourceId.toString(), checker.targets()); + return Futures.failedFuture(new Exception("Failed template validation: " + t.getReport())); + } + } + + this.target = template; + this.catalog = checker.catalog(); + ASDCCatalog.this.catalogs.put(resourceId, this.catalog); + // we should only be doing this if we discovered an update + // (by checking timestampts). Actually, we should + // only do the artifact fetching if we detect an update + ASDCCatalog.this.contexts.put(template, JXPathContext.newContext(template.getTarget())); + } catch (Exception x) { + return Futures.failedFuture(x); + } + } + + this.doNodes().doNodeProperties().doNodePropertiesAssignments().doNodeRequirements().doNodeCapabilities() + .doNodeCapabilityProperties().doNodeCapabilityPropertyAssignments(); + + JSONObject pack = new JSONObject((Map) ctx.getContextBean()).put(NAME, this.target.getName().toString()) + .put(ID, this.target.getLocation().toString()) + .put(ITEM_ID, this.target.getLocation().toString()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), pack.toString(2)); + + return Futures.succeededFuture(proxies.build(pack, Template.class)); + } + } + + public class TypeAction implements Catalog.TypeAction { + + private String name; + private UUID resourceId; + private JXPathContext ctx; + + private boolean doHierarchy = false, doRequirements = false, doCapabilities = false; + + private TypeAction(UUID theResourceId, /* Construct theConstruct, */ String theName) { + this.resourceId = theResourceId; + this.name = theName; + } + + public TypeAction withHierarchy() { + this.doHierarchy = true; + return this; + } + + TypeAction doHierarchy(org.onap.sdc.dcae.checker.Catalog theCatalog) { + if (!this.doHierarchy) { + return this; + } + + ctx.setValue("/hierarchy", + stream(theCatalog.hierarchy(Construct.Node, this.name)).skip(1) // skip + // self + .map((Map.Entry type) -> new MapBuilder() + .put(NAME, type.getKey()).put(ID, resourceId + "/" + type.getKey()) + .putOpt(DESCRIPTION, ((Map) type.getValue()).get(DESCRIPTION)).build()) + // renderEntry((Map.Entry)type, + // "description").build()) + .collect(Collectors.toList())); + return this; + } + + public TypeAction withRequirements() { + this.doRequirements = true; + return this; + } + + TypeAction doRequirements(org.onap.sdc.dcae.checker.Catalog theCatalog) { + if (!this.doRequirements) { + return this; + } + + ctx.setValue("requirements", stream(theCatalog.requirements(this.name)).map((Map.Entry req) -> { + String capability = (String) ((Map) req.getValue()).get(CAPABILITY), + node = (String) ((Map) req.getValue()).get(CAPABILITY); + return new MapBuilder().put(NAME, req.getKey()).put(ID, resourceId + "/" + req.getKey()) + .put(OCCURRENCES, ((Map) req.getValue()).get(OCCURRENCES)) + .put(CAPABILITY, + new MapBuilder().put(NAME, capability) + // if the capability points to a + // capability type then encode + // the type reference, else it is a name + // (within a node type) + .put(ID, + getCatalog(resourceId).hasType(Construct.Capability, capability) + ? (resourceId + "/" + capability) : capability.toString()) + .build()) + .put("node", new MapBuilder().putOpt(NAME, node).putOpt(ID, node == null ? null + : (resourceId + "/" + node)).buildOpt()) + .put("relationship", ((Map) req.getValue()).get("relationship")) + // renderEntry((Map.Entry)requirement, "occurrences", + // "node", "capability", "relationship") + .build(); + }).collect(Collectors.toList())); + + return this; + } + + public TypeAction withCapabilities() { + this.doCapabilities = true; + return this; + } + + TypeAction doCapabilities(org.onap.sdc.dcae.checker.Catalog theCatalog) { + if (!this.doCapabilities) { + return this; + } + + ctx.setValue("capabilities", + stream(theCatalog + .facets(Construct.Node, Facet.capabilities, + this.name)) + .map((Map.Entry capability) -> new MapBuilder() + .put(NAME, capability.getKey()).put("type", + new MapBuilder() + .put(NAME, ((Map) capability.getValue()) + .get("type")) + .put(ID, + resourceId + "/" + + ((Map) capability.getValue()) + .get("type")) + .build()) + .put(OCCURRENCES, + ((Map) capability.getValue()).get(OCCURRENCES)) + .putOpt("validSourceTypes", + ((Map) capability.getValue()).get("validSourceTypes")) + .build() + // renderEntry((Map.Entry)capability, + // "occurrences", + // "validSourceTypes") + ).collect(Collectors.toList())); + return this; + } + + public Future<Type> execute() { + org.onap.sdc.dcae.checker.Catalog catalog = ASDCCatalog.this.catalogs.get(this.resourceId); + if (catalog == null) { + return Futures.failedFuture(new Exception("No catalog available for resource " + this.resourceId + + ". You might want to fetch the model first.")); + } + + if (!catalog.hasType(Construct.Node, this.name)) { + return Futures.failedFuture( + new Exception("No " + this.name + " type in catalog for resource " + this.resourceId)); + } + + this.ctx = JXPathContext + .newContext(new MapBuilder().put(NAME, this.name).put(ID, this.resourceId + "/" + this.name) + .put(ITEM_ID, this.resourceId + "/" + this.name).build()); + + this.doHierarchy(catalog).doRequirements(catalog).doCapabilities(catalog); + + JSONObject pack = new JSONObject((Map) this.ctx.getContextBean()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), pack.toString(2)); + + return Futures.succeededFuture(proxies.build((Map) ctx.getContextBean(), Type.class)); + } + } + + public static interface Resource extends Catalog.Item<Resource> { + + @Override + @Proxy.DataMap(map = "uuid") + public String id(); + + public UUID uuid(); + + public UUID invariantUUID(); + + public String category(); + + public String subCategory(); + + public String lastUpdaterFullName(); + + public String version(); + + @Proxy.DataMap(proxy = true, elementType = Artifact.class) + public Artifacts artifacts(); + + } + + public static class Resources extends Elements<Resource> { + } + + public static interface Artifact extends Catalog.Element<Artifact> { + + @Proxy.DataMap(map = ARTIFACT_NAME) + public String name(); + + @Proxy.DataMap(map = "artifactType") + public String type(); + + @Proxy.DataMap(map = "artifactDescription") + public String description(); + + @Proxy.DataMap(map = "artifactUUID") + public UUID uuid(); + + @Proxy.DataMap(map = "artifactVersion") + public int version(); + + } + + public static class Artifacts extends Elements<Artifact> { + } + + public class ASDCLocator implements TargetLocator { + + private JSONArray artifacts; + private org.onap.sdc.dcae.checker.Catalog catalog; + + private ASDCLocator(JSONArray theArtifacts, org.onap.sdc.dcae.checker.Catalog theCatalog) { + this.artifacts = theArtifacts; + this.catalog = theCatalog; + } + + public boolean addSearchPath(URI theURI) { + return false; + } + + public boolean addSearchPath(String thePath) { + return false; + } + + public Iterable<URI> searchPaths() { + return Collections.emptySet(); + } + + public Target resolve(String theName) { + JSONObject targetArtifact = null; + + for (int i = 0; i < this.artifacts.length(); i++) { + JSONObject artifact = this.artifacts.getJSONObject(i); + String artifactName = artifact.getString(ARTIFACT_NAME); + if (StringUtils.containsIgnoreCase(artifactName, theName)) { + targetArtifact = artifact; + } + } + + if (targetArtifact == null) { + return null; + } + + ASDCTarget target = null; + if (this.catalog != null) { + // this is the caching!! + target = (ASDCTarget) this.catalog.getTarget(ASDCCatalog.this.getArtifactURI(targetArtifact)); + if (target != null && target.getVersion().equals(ASDCCatalog.this.getArtifactVersion(targetArtifact))) { + return target; + } + } + + return new ASDCTarget(targetArtifact); + } + } + + public class ASDCTarget extends Target { + + private String content; + private JSONObject artifact; + + private ASDCTarget(JSONObject theArtifact) { + super(ASDCCatalog.this.getArtifactName(theArtifact), ASDCCatalog.this.getArtifactURI(theArtifact)); + this.artifact = theArtifact; + } + + // here is a chance for caching within the catalog! Do not go fetch the + // artifact if it has not been changed since the + // last fetch. + + @Override + public Reader open() throws IOException { + if (this.content == null) { + try { + this.content = ASDCCatalog.this.asdc + .fetch(ASDCCatalog.this.getArtifactURL(this.artifact), String.class).waitForResult(); + } catch (Exception x) { + throw new IOException("Failed to load " + ASDCCatalog.this.getArtifactURL(this.artifact), x); + } + } + + // should return immediately a reader blocked until content + // available .. hard to handle errors + return new StringReader(this.content); + } + + public String getVersion() { + return ASDCCatalog.this.getArtifactVersion(this.artifact); + } + + } + + public static void main(String[] theArgs) throws Exception { + + ASDCCatalog catalog = new ASDCCatalog(new URI(theArgs[0])); + + Folder f = catalog.folder(theArgs[1]).withItems().withItemModels().execute().waitForResult(); + + debugLogger.log(LogLevel.DEBUG, ASDCCatalog.class.getName(), "folder: {}", f.data()); + + Resources items = f.elements(ITEMS, Resources.class); + if (items != null) { + for (Resource item : items) { + debugLogger.log(LogLevel.DEBUG, ASDCCatalog.class.getName(), "\titem: {} : {}",item.name(), item.data()); + Templates templates = item.elements(MODELS, Templates.class); + if (templates != null) { + for (Template t : templates) { + Template ft = catalog.template(t.id()).withNodes().withNodeProperties() + .withNodePropertiesAssignments().execute().waitForResult(); + + debugLogger.log(LogLevel.DEBUG, ASDCCatalog.class.getName(), "template data: {}", ft.data()); + } + } + } + } + } + +} diff --git a/dcaedt_catalog/api/src/main/resources/log4j.properties b/dcaedt_catalog/api/src/main/resources/log4j.properties new file mode 100644 index 0000000..6e159e5 --- /dev/null +++ b/dcaedt_catalog/api/src/main/resources/log4j.properties @@ -0,0 +1,8 @@ +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=%5p [%c] %m%n + +log4j.logger.org.apache.http=DEBUG +log4j.logger.org.apache.http.wire=ERROR
\ No newline at end of file diff --git a/dcaedt_catalog/api/src/test/java/org/onap/sdc/dcae/catalog/ASDCCatalogTest.java b/dcaedt_catalog/api/src/test/java/org/onap/sdc/dcae/catalog/ASDCCatalogTest.java new file mode 100644 index 0000000..fcd92f0 --- /dev/null +++ b/dcaedt_catalog/api/src/test/java/org/onap/sdc/dcae/catalog/ASDCCatalogTest.java @@ -0,0 +1,88 @@ +package org.onap.sdc.dcae.catalog; + +import static org.assertj.core.api.Assertions.*; + +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.UUID; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.onap.sdc.dcae.catalog.asdc.ASDCCatalog; +import org.onap.sdc.dcae.catalog.asdc.ASDCCatalog.FolderAction; +import org.onap.sdc.dcae.catalog.asdc.ASDCCatalog.Resource; + +import static org.mockito.Mockito.*; + + +public class ASDCCatalogTest { + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + private static FolderAction getTarget() { + ASDCCatalog catalog = mock(ASDCCatalog.class); + when(catalog.folder("test")).thenCallRealMethod(); + FolderAction target = catalog.folder("test"); + return target; + } + + @Test + public void filterLatestVersion_null_throwIllegalArgumentException() { + // arrange + FolderAction target = getTarget(); + // assert + thrown.expect(IllegalArgumentException.class); + // act + target.filterLatestVersion(null); + } + + @Test + public void filterLatestVersion_emptyItemsList_emptyItemsList() throws URISyntaxException { + // arrange + FolderAction target = getTarget(); + // act + Collection<Resource> result = target.filterLatestVersion(new ArrayList<>()); + // assert + assertThat(result).isEmpty(); + } + + @Test + public void filterLatestVersion_itemWithTwoVersions_itemWithLatestVersion() { + // arrange + FolderAction target = getTarget(); + + UUID invariantUUID = UUID.randomUUID(); + Resource r1v1 = mock(Resource.class); + Resource r1v2 = mock(Resource.class); + when(r1v1.invariantUUID()).thenReturn(invariantUUID); + when(r1v2.invariantUUID()).thenReturn(invariantUUID); + when(r1v1.version()).thenReturn("1.0"); + when(r1v2.version()).thenReturn("2.0"); + ArrayList<Resource> listItemWithTwoVersions = new ArrayList<Resource>(Arrays.asList(r1v1, r1v2)); + // act + Collection<Resource> result = target.filterLatestVersion(listItemWithTwoVersions); + // assert + assertThat(result).containsExactly(r1v2); + } + + @Test + public void filterLatestVersion_2distinctItems_2distinctItems() { + // arrange + FolderAction target = getTarget(); + + Resource r1 = mock(Resource.class); + Resource r2 = mock(Resource.class); + when(r1.invariantUUID()).thenReturn(UUID.randomUUID()); + when(r2.invariantUUID()).thenReturn(UUID.randomUUID()); + ArrayList<Resource> listOfTwoDistinctItems = new ArrayList<Resource>(Arrays.asList(r1, r2)); + // act + Collection<Resource> result = target.filterLatestVersion(listOfTwoDistinctItems); + // assert + assertThat(result).containsExactlyInAnyOrder(r1, r2); + } + +} diff --git a/dcaedt_catalog/asdc/pom.xml b/dcaedt_catalog/asdc/pom.xml new file mode 100644 index 0000000..14323fe --- /dev/null +++ b/dcaedt_catalog/asdc/pom.xml @@ -0,0 +1,139 @@ +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog</artifactId> + <version>1806.0.1-SNAPSHOT</version> + </parent> + <artifactId>DCAE-DT-Catalog-ASDC</artifactId> + <packaging>jar</packaging> + <name>DCAE DT ASDC</name> + <build> + <sourceDirectory>src/main/java</sourceDirectory> + <plugins> + <plugin> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.1</version> + <configuration> + <source>1.8</source> + <target>1.8</target> + <encoding>${project.build.sourceEncoding}</encoding> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-dependency-plugin</artifactId> + <version>2.10</version> + <executions> + <execution> + <id>copy-dependencies</id> + <phase>package</phase> + <goals> + <goal>copy-dependencies</goal> + </goals> + <configuration> + <outputDirectory>${project.build.directory}/deps</outputDirectory> + <overWriteReleases>false</overWriteReleases> + <overWriteSnapshots>false</overWriteSnapshots> + <overWriteIfNewer>true</overWriteIfNewer> + </configuration> + </execution> + </executions> + </plugin> + <!-- <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>buildnumber-maven-plugin</artifactId> + <version>1.4</version> <executions> <execution> <phase>validate</phase> <goals> + <goal>create</goal> </goals> </execution> </executions> <configuration> <doCheck>false</doCheck> + <doUpdate>false</doUpdate> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> <version>2.1</version> <configuration> + <archive> <manifest> <addDefaultImplementationEntries>true</addDefaultImplementationEntries> + </manifest> <manifestEntries> <Implementation-Build>${buildNumber}</Implementation-Build> + </manifestEntries> </archive> </configuration> </plugin> --> + <plugin> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-maven-plugin</artifactId> + <version>1.3.3.RELEASE</version> + <configuration> + <mainClass>org.onap.sdc.dcae.catalog.asdc.ASDCEngine</mainClass> + </configuration> + <executions> + <execution> + <goals> + <goal>repackage</goal> + </goals> + </execution> + </executions> + </plugin> + </plugins> + </build> + <dependencies> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <version>3.8.1</version> + <scope>test</scope> + </dependency> + <!-- <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> + <version>2.4.4</version> </dependency> --> + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-core</artifactId> + <version>4.3.5.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-web</artifactId> + <version>4.3.5.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-web</artifactId> + <version>1.4.1.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-webmvc</artifactId> + <version>4.3.5.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-autoconfigure</artifactId> + <version>1.4.1.RELEASE</version> + </dependency> + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-lang3</artifactId> + <version>3.5</version> + </dependency> + <dependency> + <groupId>commons-cli</groupId> + <artifactId>commons-cli</artifactId> + <version>1.3</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog-Commons</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Validator-Checker</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>com.google.code.gson</groupId> + <artifactId>gson</artifactId> + <version>2.7</version> + </dependency> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.assertj</groupId> + <artifactId>assertj-core</artifactId> + <scope>test</scope> + </dependency> + </dependencies> +</project> diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDC.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDC.java new file mode 100644 index 0000000..66afab1 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDC.java @@ -0,0 +1,1101 @@ +package org.onap.sdc.dcae.catalog.asdc; + +import java.net.URI; +import java.net.URISyntaxException; + +import java.io.File; +import java.io.IOException; +import java.io.UncheckedIOException; + +import java.util.List; +import java.util.Set; +import java.util.UUID; +import java.util.Collections; + +import java.util.function.UnaryOperator; + +import javax.annotation.PostConstruct; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.enums.ArtifactGroupType; +import org.onap.sdc.dcae.enums.ArtifactType; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.springframework.http.MediaType; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpRequest; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpEntity; +import org.springframework.http.RequestEntity; +import org.springframework.http.ResponseEntity; +import org.springframework.http.client.AsyncClientHttpRequestExecution; +import org.springframework.http.client.AsyncClientHttpRequestInterceptor; +import org.springframework.http.client.ClientHttpResponse; +import org.springframework.web.client.AsyncRestTemplate; +import org.springframework.web.client.RestClientException; +import org.springframework.web.client.HttpClientErrorException; +import org.springframework.http.converter.HttpMessageConverter; + +import org.springframework.util.Base64Utils; +//import org.springframework.util.DigestUtils; +import org.apache.commons.codec.digest.DigestUtils; + +import org.springframework.stereotype.Component; +import org.springframework.context.annotation.Scope; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.beans.factory.annotation.Autowired; + +import org.springframework.util.concurrent.ListenableFuture; +import org.springframework.util.concurrent.ListenableFutureCallback; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; + +import org.json.JSONObject; +import org.onap.sdc.dcae.catalog.commons.Action; +import org.onap.sdc.dcae.catalog.commons.Future; +import org.onap.sdc.dcae.catalog.commons.Futures; +import org.onap.sdc.dcae.catalog.commons.JSONHttpMessageConverter; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.composition.util.SystemProperties; +import org.json.JSONArray; + +import org.apache.commons.cli.BasicParser; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.OptionBuilder; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; + + +@Component("asdc") +@Scope("singleton") +//@ConfigurationProperties(prefix="asdc") +public class ASDC { + + public static enum AssetType { + resource, + service, + product + } + +// public static enum ArtifactType { +// DCAE_TOSCA, +// DCAE_JSON, +// DCAE_POLICY, +// DCAE_DOC, +// DCAE_EVENT, +// DCAE_INVENTORY_TOSCA, +// DCAE_INVENTORY_JSON, +// DCAE_INVENTORY_POLICY, +// DCAE_INVENTORY_DOC, +// DCAE_INVENTORY_BLUEPRINT, +// DCAE_INVENTORY_EVENT, +// HEAT, +// HEAT_VOL, +// HEAT_NET, +// HEAT_NESTED, +// HEAT_ARTIFACT, +// HEAT_ENV, +// OTHER +// } + +// public static enum ArtifactGroupType { +// DEPLOYMENT, +// INFORMATIONAL +// } + + public static enum LifecycleState { + Checkin, + Checkout, + Certify, + undocheckout + } + + +// @Retention(RetentionPolicy.RUNTIME) +// @Target(ElementType.METHOD) +// public @interface Mandatory { +// } + + protected static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + protected static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + @Autowired + private SystemProperties systemProperties; + + private URI rootUri; + private String rootPath = "/sdc/v1/catalog/"; + private String user, + passwd; + private String instanceId; + + + public void setUri(URI theUri) { + //theUri = URI.create(systemProperties.getProperties().getProperty(SystemProperties.ASDC_CATALOG_URL)); + String userInfo = theUri.getUserInfo(); + if (userInfo != null) { + String[] userInfoParts = userInfo.split(":"); + setUser(userInfoParts[0]); + if (userInfoParts.length > 1) + setPassword(userInfoParts[1]); + } + String fragment = theUri.getFragment(); + if (fragment == null) + throw new IllegalArgumentException("The URI must contain a fragment specification, to be used as ASDC instance id"); + setInstanceId(fragment); + + try { + this.rootUri = new URI(theUri.getScheme(), null, theUri.getHost(), theUri.getPort(), theUri.getPath(), theUri.getQuery(), null); + } + catch (URISyntaxException urix) { + throw new IllegalArgumentException("Invalid uri", urix); + } + } + + public URI getUri() { + return this.rootUri; + } + + public void setUser(String theUser) { + this.user = theUser; + } + + public String getUser() { + return this.user; + } + + public void setPassword(String thePassword) { + this.passwd = thePassword; + } + + public String getPassword() { + return this.passwd; + } + + public void setInstanceId(String theId) { + this.instanceId = theId; + } + + public String getInstanceId() { + return this.instanceId; + } + + public void setRootPath(String thePath) { + this.rootPath = systemProperties.getProperties().getProperty(DcaeBeConstants.Config.ASDC_ROOTPATH); + } + + public String getRootPath() { + return systemProperties.getProperties().getProperty(DcaeBeConstants.Config.ASDC_ROOTPATH); + } + + @Scheduled(fixedRateString = "${beans.context.scripts.updateCheckFrequency?:60000}") + public void checkForUpdates() { + } + + @PostConstruct + public void initASDC() { + } + + public <T> Future<T> getResources(Class<T> theType) { + return getAssets(AssetType.resource, theType); + } + + public Future<JSONArray> getResources() { + return getAssets(AssetType.resource, JSONArray.class); + } + + public <T> Future<T> getResources(Class<T> theType, String theCategory, String theSubCategory) { + return getAssets(AssetType.resource, theType, theCategory, theSubCategory); + } + + public Future<JSONArray> getResources(String category, String subCategory, String resourceType) { + return getAssets(AssetType.resource, JSONArray.class, category, subCategory, resourceType); + } + + public <T> Future<T> getServices(Class<T> theType) { + return getAssets(AssetType.service, theType); + } + + public Future<JSONArray> getServices() { + return getAssets(AssetType.service, JSONArray.class); + } + + public <T> Future<T> getServices(Class<T> theType, String theCategory, String theSubCategory) { + return getAssets(AssetType.service, theType, theCategory, theSubCategory); + } + + public Future<JSONArray> getServices(String theCategory, String theSubCategory) { + return getAssets(AssetType.service, JSONArray.class, theCategory, theSubCategory); + } + + public <T> Future<T> getAssets(AssetType theAssetType, Class<T> theType) { + return fetch(refAssets(theAssetType), theType); + } + + public <T> Action<T> getAssetsAction(AssetType theAssetType, Class<T> theType) { + return (() -> fetch(refAssets(theAssetType), theType)); + } + + public <T> Future<T> getAssets(AssetType theAssetType, Class<T> theType, + String theCategory, String theSubCategory) { + return getAssets(theAssetType, theType, theCategory, theSubCategory, null); + } + + public <T> Future<T> getAssets(AssetType theAssetType, Class<T> theType, + String theCategory, String theSubCategory, String theResourceType) { + return fetch(refAssets(theAssetType) + filter(theCategory, theSubCategory, theResourceType), theType); + } + + public <T> Action<T> getAssetsAction(AssetType theAssetType, Class<T> theType, + String theCategory, String theSubCategory, String theResourceType) { + return (() -> fetch(refAssets(theAssetType) + filter(theCategory, theSubCategory, theResourceType), theType)); + } + + protected String refAssets(AssetType theAssetType) { + return this.rootPath + theAssetType + "s/"; + } + + private String filter(String theCategory, String theSubCategory, String theResourceType) { + StringBuilder filter = null; + if (theCategory != null) { + filter = new StringBuilder(); + filter.append("?category=") + .append(theCategory); + if (theSubCategory != null) { + filter.append("&subCategory=") + .append(theSubCategory); + if (theResourceType != null) { + filter.append("&resourceType=") + .append(theResourceType); + } + } + } + return filter == null ? "" : filter.toString(); + } + + protected String refAsset(AssetType theAssetType, UUID theId) { + return this.rootPath + theAssetType + "s/" + theId; + } + + public <T> Future<T> getResource(UUID theId, Class<T> theType) { + return getAsset(AssetType.resource, theId, theType); + } + + public Future<JSONObject> getResource(UUID theId) { + return getAsset(AssetType.resource, theId, JSONObject.class); + } + + public Future<ResourceDetailed> getSDCResource(UUID theId) { + return getAsset(AssetType.resource, theId, ResourceDetailed.class); + } + + + public <T> Future<T> getService(UUID theId, Class<T> theType) { + return getAsset(AssetType.service, theId, theType); + } + + public Future<JSONObject> getService(UUID theId) { + return getAsset(AssetType.service, theId, JSONObject.class); + } + + public <T> Future<T> getAsset(AssetType theAssetType, UUID theId, Class<T> theType) { + return fetch(refAsset(theAssetType, theId) + "/metadata", theType); + } + + public <T> Action<T> getAssetAction(AssetType theAssetType, UUID theId, Class<T> theType) { + return (() -> fetch(refAsset(theAssetType, theId) + "/metadata", theType)); + } + + public Future<byte[]> getResourceArchive(UUID theId) { + return getAssetArchive(AssetType.resource, theId); + } + + public Future<byte[]> getServiceArchive(UUID theId) { + return getAssetArchive(AssetType.service, theId); + } + + public Future<byte[]> getAssetArchive(AssetType theAssetType, UUID theId) { + return fetch(refAsset(theAssetType, theId) + "/toscaModel", byte[].class); + } + + public Action<byte[]> getAssetArchiveAction(AssetType theAssetType, UUID theId) { + return (() -> fetch(refAsset(theAssetType, theId) + "/toscaModel", byte[].class)); + } + + public Future<JSONObject> checkinResource(UUID theId, String theUser, String theMessage) { + return cycleAsset(AssetType.resource, theId, LifecycleState.Checkin, theUser, theMessage); + } + + public Future<JSONObject> checkinService(UUID theId, String theUser, String theMessage) { + return cycleAsset(AssetType.service, theId, LifecycleState.Checkin, theUser, theMessage); + } + + public Future<JSONObject> checkoutResource(UUID theId, String theUser, String theMessage) { + return cycleAsset(AssetType.resource, theId, LifecycleState.Checkout, theUser, theMessage); + } + + public Future<JSONObject> checkoutService(UUID theId, String theUser, String theMessage) { + return cycleAsset(AssetType.service, theId, LifecycleState.Checkout, theUser, theMessage); + } + + public Future<JSONObject> certifyResource(UUID theId, String theUser, String theMessage) { + return cycleAsset(AssetType.resource, theId, LifecycleState.Certify, theUser, theMessage); + } + + public Future<JSONObject> certifyService(UUID theId, String theUser, String theMessage) { + return cycleAsset(AssetType.service, theId, LifecycleState.Certify, theUser, theMessage); + } + + /* Normally theMessage is mandatory (and we'd use put instead of putOpt) but .. not so for undocheckout .. + */ + public Future<JSONObject> cycleAsset(AssetType theAssetType, UUID theId, LifecycleState theState, + String theUser, String theMessage) { + return post(refAsset(theAssetType, theId) + "/lifecycleState/" + theState, + (headers) -> prepareHeaders(headers) + .header("USER_ID", theUser), + new JSONObject().putOpt("userRemarks", theMessage)); + } + + protected String refAssetInstanceArtifact(AssetType theAssetType, UUID theAssetId, String theAssetInstance, UUID theArtifactId) { + return refAsset(theAssetType, theAssetId) + "/resourceInstances/" + theAssetInstance + "/artifacts" + (theArtifactId == null ? "" : ("/" + theArtifactId)); + } + + protected String refAssetArtifact(AssetType theAssetType, UUID theAssetId, UUID theArtifactId) { + return refAsset(theAssetType, theAssetId) + "/artifacts" + (theArtifactId == null ? "" : ("/" + theArtifactId)); + } + + public <T> Future<T> getResourceArtifact(UUID theAssetId, UUID theArtifactId, Class<T> theType) { + return getAssetArtifact(AssetType.resource, theAssetId, theArtifactId, theType); + } + + public <T> Future<T> getServiceArtifact(UUID theAssetId, UUID theArtifactId, Class<T> theType) { + return getAssetArtifact(AssetType.service, theAssetId, theArtifactId, theType); + } + + public <T> Future<T> getResourceInstanceArtifact(UUID theAssetId, UUID theArtifactId, String theInstance, Class<T> theType) { + return getAssetInstanceArtifact(AssetType.resource, theAssetId, theInstance, theArtifactId, theType); + } + + public <T> Future<T> getServiceInstanceArtifact(UUID theAssetId, UUID theArtifactId, String theInstance, Class<T> theType) { + return getAssetInstanceArtifact(AssetType.service, theAssetId, theInstance, theArtifactId, theType); + } + + public <T> Future<T> getAssetArtifact(AssetType theAssetType, UUID theAssetId, UUID theArtifactId, Class<T> theType) { + return fetch(refAssetArtifact(theAssetType, theAssetId, theArtifactId), theType); + } + + public <T> Action<T> getAssetArtifactAction(AssetType theAssetType, UUID theAssetId, UUID theArtifactId, Class<T> theType) { + return (() -> fetch(refAssetArtifact(theAssetType, theAssetId, theArtifactId), theType)); + } + + public <T> Future<T> getAssetInstanceArtifact(AssetType theAssetType, UUID theAssetId, String theInstance, UUID theArtifactId, Class<T> theType) { + return fetch(refAssetInstanceArtifact(theAssetType, theAssetId, theInstance, theArtifactId), theType); + } + + public <T> Action<T> getAssetInstanceArtifactAction(AssetType theAssetType, UUID theAssetId, String theInstance, UUID theArtifactId, Class<T> theType) { + return (() -> fetch(refAssetInstanceArtifact(theAssetType, theAssetId, theInstance, theArtifactId), theType)); + } + + public ArtifactUploadAction createResourceArtifact(UUID theAssetId) { + return createAssetArtifact(AssetType.resource, theAssetId); + } + + public ArtifactUploadAction createServiceArtifact(UUID theAssetId) { + return createAssetArtifact(AssetType.service, theAssetId); + } + + public ArtifactUploadAction createResourceInstanceArtifact(UUID theAssetId, String theInstance) { + return createAssetInstanceArtifact(AssetType.resource, theAssetId, theInstance); + } + + public ArtifactUploadAction createServiceInstanceArtifact(UUID theAssetId, String theInstance) { + return createAssetInstanceArtifact(AssetType.service, theAssetId, theInstance); + } + + public ArtifactUploadAction createAssetArtifact(AssetType theAssetType, UUID theAssetId) { + return new ArtifactUploadAction() + .ofAsset(theAssetType, theAssetId); + } + + public ArtifactUploadAction createAssetInstanceArtifact(AssetType theAssetType, UUID theAssetId, String theInstance) { + return new ArtifactUploadAction() + .ofAssetInstance(theAssetType, theAssetId, theInstance); + } + + public ArtifactUpdateAction updateResourceArtifact(UUID theAssetId, JSONObject theArtifactInfo) { + return updateAssetArtifact(AssetType.resource, theAssetId, theArtifactInfo); + } + + public ArtifactUpdateAction updateResourceInstanceArtifact(UUID theAssetId, String theInstance, JSONObject theArtifactInfo) { + return updateAssetInstanceArtifact(AssetType.resource, theAssetId, theInstance, theArtifactInfo); + } + + public ArtifactUpdateAction updateServiceArtifact(UUID theAssetId, JSONObject theArtifactInfo) { + return updateAssetArtifact(AssetType.service, theAssetId, theArtifactInfo); + } + + public ArtifactUpdateAction updateServiceInstanceArtifact(UUID theAssetId, String theInstance, JSONObject theArtifactInfo) { + return updateAssetInstanceArtifact(AssetType.service, theAssetId, theInstance, theArtifactInfo); + } + + public ArtifactUpdateAction updateAssetArtifact(AssetType theAssetType, UUID theAssetId, JSONObject theArtifactInfo) { + return new ArtifactUpdateAction(theArtifactInfo) + .ofAsset(theAssetType, theAssetId); + } + + public ArtifactUpdateAction updateAssetInstanceArtifact(AssetType theAssetType, UUID theAssetId, String theInstance, JSONObject theArtifactInfo) { + return new ArtifactUpdateAction(theArtifactInfo) + .ofAssetInstance(theAssetType, theAssetId, theInstance); + } + + public ArtifactDeleteAction deleteResourceArtifact(UUID theAssetId, UUID theArtifactId) { + return deleteAssetArtifact(AssetType.resource, theAssetId, theArtifactId); + } + + public ArtifactDeleteAction deleteResourceInstanceArtifact(UUID theAssetId, String theInstance, UUID theArtifactId) { + return deleteAssetInstanceArtifact(AssetType.resource, theAssetId, theInstance, theArtifactId); + } + + public ArtifactDeleteAction deleteServiceArtifact(UUID theAssetId, UUID theArtifactId) { + return deleteAssetArtifact(AssetType.service, theAssetId, theArtifactId); + } + + public ArtifactDeleteAction deleteServiceInstanceArtifact(UUID theAssetId, String theInstance, UUID theArtifactId) { + return deleteAssetInstanceArtifact(AssetType.service, theAssetId, theInstance, theArtifactId); + } + + public ArtifactDeleteAction deleteAssetArtifact(AssetType theAssetType, UUID theAssetId, UUID theArtifactId) { + return new ArtifactDeleteAction(theArtifactId) + .ofAsset(theAssetType, theAssetId); + } + + public ArtifactDeleteAction deleteAssetInstanceArtifact(AssetType theAssetType, UUID theAssetId, String theInstance, UUID theArtifactId) { + return new ArtifactDeleteAction(theArtifactId) + .ofAssetInstance(theAssetType, theAssetId, theInstance); + } + + + public abstract class ASDCAction<A extends ASDCAction<A, T>, T> implements Action<T> { + + protected JSONObject info; //info passed to asdc as request body + protected String operatorId; //id of the SDC user performing the action + + protected ASDCAction(JSONObject theInfo) { + this.info = theInfo; + } + + protected abstract A self(); + + protected ASDC asdc() { + return ASDC.this; + } + + protected A withInfo(JSONObject theInfo) { + merge(this.info, theInfo); + return self(); + } + + public A with(String theProperty, Object theValue) { + info.put(theProperty, theValue); + return self(); + } + + public A withOperator(String theOperator) { + this.operatorId = theOperator; + return self(); + } + + protected abstract String[] mandatoryInfoEntries(); + + protected void checkOperatorId() { + if (this.operatorId == null) { + throw new IllegalStateException("No operator id was provided"); + } + } + + protected void checkMandatoryInfo() { + for (String field: mandatoryInfoEntries()) { + if (!info.has(field)) + throw new IllegalStateException("No '" + field + "' was provided"); + } + } + + protected void checkMandatory() { + checkOperatorId(); + checkMandatoryInfo(); + } + } + + protected static final String[] artifactMandatoryEntries = new String[] {}; + + /** + * We use teh same API to operate on artifacts attached to assets or to their instances + */ + public abstract class ASDCArtifactAction<A extends ASDCArtifactAction<A>> extends ASDCAction<A, JSONObject> { + + protected AssetType assetType; + protected UUID assetId; + protected String assetInstance; + + protected ASDCArtifactAction(JSONObject theInfo) { + super(theInfo); + } + + protected A ofAsset(AssetType theAssetType, UUID theAssetId) { + this.assetType = theAssetType; + this.assetId = theAssetId; + return self(); + } + + protected A ofAssetInstance(AssetType theAssetType, UUID theAssetId, String theInstance) { + this.assetType = theAssetType; + this.assetId = theAssetId; + this.assetInstance = theInstance; + return self(); + } + + protected String normalizeInstanceName(String theName) { + return StringUtils.removePattern(theName, "[ \\.\\-]+").toLowerCase(); + } + + protected String[] mandatoryInfoEntries() { + return ASDC.this.artifactMandatoryEntries; + } + + protected String ref(UUID theArtifactId) { + return (this.assetInstance == null) ? + refAssetArtifact(this.assetType, this.assetId, theArtifactId) : + refAssetInstanceArtifact(this.assetType, this.assetId, normalizeInstanceName(this.assetInstance), theArtifactId); + } + } + + protected static final String[] uploadMandatoryEntries = new String[] { "artifactName", + "artifactType", + "artifactGroupType", + "artifactLabel", + "description", + "payloadData" }; + + public class ArtifactUploadAction extends ASDCArtifactAction<ArtifactUploadAction> { + + protected ArtifactUploadAction() { + super(new JSONObject()); + } + + protected ArtifactUploadAction self() { + return this; + } + + public ArtifactUploadAction withContent(byte[] theContent) { + return with("payloadData", Base64Utils.encodeToString(theContent)); + } + + public ArtifactUploadAction withContent(File theFile) throws IOException { + return withContent(FileUtils.readFileToByteArray(theFile)); + } + + public ArtifactUploadAction withLabel(String theLabel) { + return with("artifactLabel", theLabel); + } + + public ArtifactUploadAction withName(String theName) { + return with("artifactName", theName); + } + + public ArtifactUploadAction withDisplayName(String theName) { + return with("artifactDisplayName", theName); + } + + public ArtifactUploadAction withType(ArtifactType theType) { + return with("artifactType", theType.toString()); + } + + public ArtifactUploadAction withGroupType(ArtifactGroupType theGroupType) { + return with("artifactGroupType", theGroupType.toString()); + } + + public ArtifactUploadAction withDescription(String theDescription) { + return with("description", theDescription); + } + + protected String[] mandatoryInfoEntries() { + return ASDC.this.uploadMandatoryEntries; + } + + public Future<JSONObject> execute() { + checkMandatory(); + return ASDC.this.post(ref(null), + (headers) -> prepareHeaders(headers) + .header("USER_ID", this.operatorId), + this.info); + } + } + + protected static final String[] updateMandatoryEntries = new String[] { "artifactName", + "artifactType", + "artifactGroupType", + "artifactLabel", + "description", + "payloadData" }; + + /** + * In its current form the update relies on a previous artifact retrieval. One cannot build an update from scratch. + * The label, tye and group type must be submitted but cannot be updated + */ + public class ArtifactUpdateAction extends ASDCArtifactAction<ArtifactUpdateAction> { + + + protected ArtifactUpdateAction(JSONObject theInfo) { + super(theInfo); + } + + protected ArtifactUpdateAction self() { + return this; + } + + public ArtifactUpdateAction withContent(byte[] theContent) { + return with("payloadData", Base64Utils.encodeToString(theContent)); + } + + public ArtifactUpdateAction withContent(File theFile) throws IOException { + return withContent(FileUtils.readFileToByteArray(theFile)); + } + + public ArtifactUpdateAction withDescription(String theDescription) { + return with("description", theDescription); + } + + public ArtifactUpdateAction withName(String theName) { + return with("artifactName", theName); + } + + protected String[] mandatoryInfoEntries() { + return ASDC.this.updateMandatoryEntries; + } + + /* The json object originates (normally) from a get so it will have entries we need to cleanup */ + protected void cleanupInfoEntries() { + this.info.remove("artifactChecksum"); + this.info.remove("artifactUUID"); + this.info.remove("artifactVersion"); + this.info.remove("artifactURL"); + this.info.remove("artifactDescription"); + } + + public Future<JSONObject> execute() { + UUID artifactUUID = UUID.fromString(this.info.getString("artifactUUID")); + checkMandatory(); + cleanupInfoEntries(); + return ASDC.this.post(ref(artifactUUID), + (headers) -> prepareHeaders(headers) + .header("USER_ID", this.operatorId), + this.info); + } + } + + public class ArtifactDeleteAction extends ASDCArtifactAction<ArtifactDeleteAction> { + + private UUID artifactId; + + protected ArtifactDeleteAction(UUID theArtifactId) { + super(null); + this.artifactId = theArtifactId; + } + + protected ArtifactDeleteAction self() { + return this; + } + + public Future<JSONObject> execute() { + checkMandatory(); + return ASDC.this.delete(ref(this.artifactId), + (headers) -> prepareHeaders(headers) + .header("USER_ID", this.operatorId)); + } + } + + + + + public VFCMTCreateAction createVFCMT() { + return new VFCMTCreateAction(); + } + + protected static final String[] vfcmtMandatoryEntries = new String[] { "name", + "vendorName", + "vendorRelease", + "contactId" }; + + + public class VFCMTCreateAction extends ASDCAction<VFCMTCreateAction, JSONObject> { + + protected VFCMTCreateAction() { + + super(new JSONObject()); + this + .with("resourceType", "VFCMT") + .with("category", "Template") + .with("subcategory", "Monitoring Template") + .with("icon", "defaulticon"); + } + + protected VFCMTCreateAction self() { + return this; + } + + public VFCMTCreateAction withName(String theName) { + return with("name", theName); + } + + public VFCMTCreateAction withDescription(String theDescription) { + return with("description", theDescription); + } + + public VFCMTCreateAction withVendorName(String theVendorName) { + return with("vendorName", theVendorName); + } + + public VFCMTCreateAction withVendorRelease(String theVendorRelease) { + return with("vendorRelease", theVendorRelease); + } + + public VFCMTCreateAction withTags(String... theTags) { + for (String tag: theTags) + this.info.append("tags", tag); + return this; + } + + public VFCMTCreateAction withIcon(String theIcon) { + return with("icon", theIcon); + } + + protected String[] mandatoryInfoEntries() { + return ASDC.this.vfcmtMandatoryEntries; + } + + public VFCMTCreateAction withContact(String theContact) { + return with("contactId", theContact); + } + + public Future<JSONObject> execute() { + + this.info.putOnce("contactId", this.operatorId); + this.info.append("tags", info.optString("name")); + checkMandatory(); + return ASDC.this.post(refAssets(AssetType.resource), + (headers) -> prepareHeaders(headers) + .header("USER_ID", this.operatorId), + this.info); + } + + } + + public static JSONObject merge(JSONObject theOriginal, JSONObject thePatch) { + for (String key: (Set<String>)thePatch.keySet()) { + if (!theOriginal.has(key)) + theOriginal.put(key, thePatch.get(key)); + } + return theOriginal; + } + + protected URI refUri(String theRef) { + try { + return new URI(this.rootUri + theRef); + } + catch(URISyntaxException urisx) { + throw new UncheckedIOException(new IOException(urisx)); + } + } + + private HttpHeaders prepareHeaders() { + HttpHeaders headers = new HttpHeaders(); + headers.add(HttpHeaders.AUTHORIZATION, "Basic " + Base64Utils.encodeToString((this.user + ":" + this.passwd).getBytes())); + headers.add(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE); + headers.add(HttpHeaders.ACCEPT, MediaType.APPLICATION_OCTET_STREAM_VALUE); + headers.add(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_UTF8_VALUE); + headers.add("X-ECOMP-InstanceID", this.instanceId); + + return headers; + } + + private RequestEntity.HeadersBuilder prepareHeaders(RequestEntity.HeadersBuilder theBuilder) { + return theBuilder + .header(HttpHeaders.AUTHORIZATION, "Basic " + Base64Utils.encodeToString((this.user + ":" + this.passwd).getBytes())) + .header(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE) + .header(HttpHeaders.ACCEPT, MediaType.APPLICATION_OCTET_STREAM_VALUE) + .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_UTF8_VALUE) + .header("X-ECOMP-InstanceID", this.instanceId); + } + + public <T> Future<T> fetch(String theRef, Class<T> theContentType) { + return exchange(theRef, HttpMethod.GET, new HttpEntity(prepareHeaders()), theContentType); + } + + public Future<JSONObject> post(String theRef, JSONObject thePost) { + return exchange(theRef, HttpMethod.POST, new HttpEntity<JSONObject>(thePost, prepareHeaders()), JSONObject.class); + } + + public Future<JSONObject> post(String theRef, UnaryOperator<RequestEntity.HeadersBuilder> theHeadersBuilder, JSONObject thePost) { + RequestEntity.BodyBuilder builder = RequestEntity.post(refUri(theRef)); + theHeadersBuilder.apply(builder); + + return exchange(theRef, HttpMethod.POST, builder.body(thePost), JSONObject.class); + } + + public Future<JSONObject> delete(String theRef, UnaryOperator<RequestEntity.HeadersBuilder> theHeadersBuilder) { + + RequestEntity.HeadersBuilder builder = RequestEntity.delete(refUri(theRef)); + theHeadersBuilder.apply(builder); + + return exchange(theRef, HttpMethod.DELETE, builder.build(), JSONObject.class); + } + + public <T> Future<T> exchange(String theRef, HttpMethod theMethod, HttpEntity theRequest, Class<T> theResponseType) { + + AsyncRestTemplate restTemplate = new AsyncRestTemplate(); + + List<HttpMessageConverter<?>> converters = restTemplate.getMessageConverters(); + converters.add(0, new JSONHttpMessageConverter()); + restTemplate.setMessageConverters(converters); + + restTemplate.setInterceptors(Collections.singletonList(new ContentMD5Interceptor())); + ASDCFuture<T> result = new ASDCFuture<T>(); + String uri = this.rootUri + theRef; + try { + restTemplate + .exchange(uri, theMethod, theRequest, theResponseType) + .addCallback(result.callback); + } + catch (RestClientException rcx) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to fetch {} {}", uri, rcx); + return Futures.failedFuture(rcx); + } + catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to fetch {} {}", uri, x); + return Futures.failedFuture(x); + } + + return result; + } + + + + public class ASDCFuture<T> + extends Futures.BasicFuture<T> { + + private boolean http404toEmpty = false; + + ASDCFuture() { + } + + public ASDCFuture setHttp404ToEmpty(boolean doEmpty) { + this.http404toEmpty = doEmpty; + return this; + } + + ListenableFutureCallback<ResponseEntity<T>> callback = new ListenableFutureCallback<ResponseEntity<T>>() { + + public void onSuccess(ResponseEntity<T> theResult) { + ASDCFuture.this.result(theResult.getBody()); + } + + public void onFailure(Throwable theError) { + if (theError instanceof HttpClientErrorException) { + // if (theError.getRawStatusCode() == 404 && this.http404toEmpty) + // ASDCFuture.this.result(); //th eresult is of type T ... + // else + ASDCFuture.this.cause(new ASDCException((HttpClientErrorException)theError)); + } + else { + ASDCFuture.this.cause(theError); + } + } + }; + + } + + public class ContentMD5Interceptor implements AsyncClientHttpRequestInterceptor { + + @Override + public ListenableFuture<ClientHttpResponse> intercept( + HttpRequest theRequest, byte[] theBody, AsyncClientHttpRequestExecution theExecution) + throws IOException { + if (HttpMethod.POST == theRequest.getMethod()) { + HttpHeaders headers = theRequest.getHeaders(); + headers.add("Content-MD5", Base64Utils.encodeToString( + //DigestUtils.md5Digest(theBody))); + DigestUtils.md5Hex(theBody).getBytes())); + + } + return theExecution.executeAsync(theRequest, theBody); + } + } + + public static void main(String[] theArgs) throws Exception { + + CommandLineParser parser = new BasicParser(); + + String user_id = "jh0003"; + + Options options = new Options(); + options.addOption(OptionBuilder + .withArgName("target") + .withLongOpt("target") + .withDescription("target asdc system") + .hasArg() + .isRequired() + .create('t') ); + + options.addOption(OptionBuilder + .withArgName("action") + .withLongOpt("action") + .withDescription("one of: list, get, getartifact, checkin, checkout") + .hasArg() + .isRequired() + .create('a') ); + + options.addOption(OptionBuilder + .withArgName("assetType") + .withLongOpt("assetType") + .withDescription("one of resource, service, product") + .hasArg() + .isRequired() + .create('k') ); //k for 'kind' .. + + options.addOption(OptionBuilder + .withArgName("assetId") + .withLongOpt("assetId") + .withDescription("asset uuid") + .hasArg() + .create('u') ); //u for 'uuid' + + options.addOption(OptionBuilder + .withArgName("artifactId") + .withLongOpt("artifactId") + .withDescription("artifact uuid") + .hasArg() + .create('s') ); //s for 'stuff' + + options.addOption(OptionBuilder + .withArgName("listFilter") + .withLongOpt("listFilter") + .withDescription("filter for list operations") + .hasArg() + .create('f') ); //u for 'uuid' + + CommandLine line = null; + try { + line = parser.parse(options, theArgs); + } + catch(ParseException exp) { + errLogger.log(LogLevel.ERROR, ASDC.class.getName(), exp.getMessage()); + new HelpFormatter().printHelp("asdc", options); + return; + } + + ASDC asdc = new ASDC(); + asdc.setUri(new URI(line.getOptionValue("target"))); + + String action = line.getOptionValue("action"); + if (action.equals("list")) { + JSONObject filterInfo = new JSONObject( + line.hasOption("listFilter") ? + line.getOptionValue("listFilter") : "{}"); + JSONArray assets = + asdc.getAssets(ASDC.AssetType.valueOf(line.getOptionValue("assetType")), JSONArray.class, + filterInfo.optString("category", null), filterInfo.optString("subCategory", null)) + .waitForResult(); + for (int i = 0; i < assets.length(); i++) { + debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),"> {}", assets.getJSONObject(i).toString(2)); + } + } + else if (action.equals("get")) { + debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(), + asdc.getAsset(ASDC.AssetType.valueOf(line.getOptionValue("assetType")), + UUID.fromString(line.getOptionValue("assetId")), + JSONObject.class) + .waitForResult() + .toString(2) + ); + } + else if (action.equals("getartifact")) { + debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(), + asdc.getAssetArtifact(ASDC.AssetType.valueOf(line.getOptionValue("assetType")), + UUID.fromString(line.getOptionValue("assetId")), + UUID.fromString(line.getOptionValue("artifactId")), + String.class) + .waitForResult() + ); + } + else if (action.equals("checkin")) { + debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(), + asdc.cycleAsset(ASDC.AssetType.valueOf(line.getOptionValue("assetType")), + UUID.fromString(line.getOptionValue("assetId")), + ASDC.LifecycleState.Checkin, + user_id, + "cli op") + .waitForResult() + .toString() + ); + } + else if (action.equals("checkout")) { + debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(), + asdc.cycleAsset(ASDC.AssetType.valueOf(line.getOptionValue("assetType")), + UUID.fromString(line.getOptionValue("assetId")), + ASDC.LifecycleState.Checkout, + user_id, + "cli op") + .waitForResult() + .toString() + ); + } + else if (action.equals("cleanup")) { + JSONArray resources = asdc.getResources() + .waitForResult(); + debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),"Got {} resources", resources.length()); + + // vfcmt cleanup + for (int i = 0; i < resources.length(); i++) { + + JSONObject resource = resources.getJSONObject(i); + + if (resource.getString("resourceType").equals("VFCMT") && + resource.getString("name").contains("test")) { + + debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),"undocheckout for {}", resource.getString("uuid")); + + try { + asdc.cycleAsset(AssetType.resource, UUID.fromString(resource.getString("uuid")), LifecycleState.undocheckout, user_id, null) + .waitForResult(); + } + catch (Exception x) { + debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),"** {}", x); + } + } + } + + } + else { + try { + debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(), + asdc.createVFCMT() + .withName("Clonator") + .withDescription("Clone operation target 06192017") + .withVendorName("CloneInc") + .withVendorRelease("1.0") + .withTags("clone") + .withOperator(user_id) + .execute() + .waitForResult() + .toString() + ); + } + catch(Exception x) { + debugLogger.log(LogLevel.DEBUG, ASDC.class.getName(),"Failed to create VFCMT: {}", x); + } + } + } +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCController.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCController.java new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCController.java diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCEngine.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCEngine.java new file mode 100644 index 0000000..73c7601 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCEngine.java @@ -0,0 +1,25 @@ +package org.onap.sdc.dcae.catalog.asdc; + +import org.onap.sdc.dcae.composition.util.SystemProperties; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.context.annotation.Bean; + +@SpringBootApplication +public class ASDCEngine { + + /** + * Creates and returns a new instance of a {@link SystemProperties} class. + * + * @return New instance of {@link SystemProperties}. + */ + @Bean + public SystemProperties systemProperties() { + return new SystemProperties(); + } + + public static void main(String[] args) { + SpringApplication.run(ASDCEngine.class, args); + } + +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCException.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCException.java new file mode 100644 index 0000000..659653d --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCException.java @@ -0,0 +1,18 @@ +package org.onap.sdc.dcae.catalog.asdc; + +import org.onap.sdc.dcae.errormng.BaseException; +import org.onap.sdc.dcae.errormng.RequestError; +import org.springframework.http.HttpStatus; +import org.springframework.web.client.HttpClientErrorException; + +public class ASDCException extends BaseException { + + ASDCException(HttpClientErrorException error) { + super(error); + } + + public ASDCException(HttpStatus status, RequestError re){ + super(status, re); + } + +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtils.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtils.java new file mode 100644 index 0000000..1d70627 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtils.java @@ -0,0 +1,448 @@ +package org.onap.sdc.dcae.catalog.asdc; + +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.lang3.StringUtils; +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.catalog.commons.Actions; +import org.onap.sdc.dcae.catalog.commons.Future; +import org.onap.sdc.dcae.catalog.commons.Futures; +import org.onap.sdc.dcae.catalog.commons.Recycler; +import org.onap.sdc.dcae.checker.*; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Scope; +import org.springframework.stereotype.Component; +import org.springframework.util.Base64Utils; + +import java.io.*; +import java.net.URI; +import java.util.*; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + + +@Component("asdcutils") +@Scope("singleton") +@ConfigurationProperties(prefix="asdcutils") +public class ASDCUtils { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + @Autowired + private ASDC asdc; + + @Autowired + private Blueprinter blueprint; + + public ASDCUtils() { + // Making sonar happy + } + + public ASDCUtils(URI theASDCURI) { + this(theASDCURI, null); + } + + public ASDCUtils(URI theASDCURI, URI theBlueprinterURI) { + this.asdc = new ASDC(); + this.asdc.setUri(theASDCURI); + if (theBlueprinterURI != null) { + this.blueprint = new Blueprinter(); + this.blueprint.setUri(theBlueprinterURI); + } + } + + public ASDCUtils(ASDC theASDC) { + this(theASDC, null); + } + + public ASDCUtils(ASDC theASDC, Blueprinter theBlueprinter) { + this.asdc = theASDC; + this.blueprint = theBlueprinter; + } + + public CloneAssetArtifactsAction cloneAssetArtifacts(ASDC.AssetType theAssetType, UUID theSourceId, UUID theTargetId) { + return new CloneAssetArtifactsAction(this.asdc, theAssetType, theSourceId, theTargetId); + } + + public static class CloneAssetArtifactsAction extends ASDC.ASDCAction<CloneAssetArtifactsAction, List<JSONObject>> { + + private ASDC.AssetType assetType; + private UUID sourceId, targetId; + + protected CloneAssetArtifactsAction(ASDC theASDC, ASDC.AssetType theAssetType, UUID theSourceId, UUID theTargetId) { + theASDC.super(new JSONObject()); + this.assetType = theAssetType; + this.sourceId = theSourceId; + this.targetId = theTargetId; + } + + protected CloneAssetArtifactsAction self() { + return this; + } + + public CloneAssetArtifactsAction withLabel(String theLabel) { + return with("artifactLabel", theLabel); + } + + protected String[] mandatoryInfoEntries() { + return new String[] {}; + } + + public Future<List<JSONObject>> execute() { + checkMandatory(); + + final Actions.Sequence<JSONObject> sequencer = new Actions.Sequence<JSONObject>(); + + new Actions.Sequence().add(super.asdc().getAssetArchiveAction(this.assetType, this.sourceId)).add(super.asdc().getAssetAction(this.assetType, this.sourceId, JSONObject.class)).execute().setHandler(assetFuture -> { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "*** {}", assetFuture.result()); + processArtifacts((List) assetFuture.result(), (JSONObject theInfo, byte[] theData) -> { + theInfo.remove("artifactChecksum"); + theInfo.remove("artifactUUID"); + theInfo.remove("artifactVersion"); + theInfo.remove("artifactURL"); + theInfo.put("description", theInfo.remove("artifactDescription")); + theInfo.put("payloadData", Base64Utils.encodeToString(theData)); + return theInfo; + }, null).forEach(artifactInfo -> sequencer.add(super.asdc().createAssetArtifact(this.assetType, this.targetId).withInfo(ASDC.merge(artifactInfo, this.info)).withOperator(this.operatorId))); + sequencer.execute(); + }); + + return sequencer.future(); + } + } //the Action class + + /* */ + private static JSONObject lookupArtifactInfo(JSONArray theArtifacts, String theName) { + + for (int i = 0; theArtifacts != null && i < theArtifacts.length(); i++) { + JSONObject artifactInfo = theArtifacts.getJSONObject(i); + if (theName.equals(artifactInfo.getString("artifactName"))) { + debugLogger.log(LogLevel.DEBUG, ASDCUtils.class.getName(), "Found artifact info {}", artifactInfo); + return artifactInfo; + } + } + + return null; + } + + private static byte[] extractArtifactData(InputStream theEntryStream) throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try { + byte[] buff = new byte[4096]; + int cnt = 0; + while ((cnt = theEntryStream.read(buff)) != -1) { + baos.write(buff, 0, cnt); + } + } finally { + baos.close(); + } + return baos.toByteArray(); + } + + /** + * Recycle a cdump, fetch all relevant ASDC artifacts, interact with Shu's toscalib service in order to generate + * a blueprint. No 'Action' object here as there is nothig to set up. + */ + public Future<Future<String>> buildBlueprint(Reader theCdump) { + + final Recycler recycler = new Recycler(); + Object template = null; + + try { + template = recycler.recycle(theCdump); + + } catch (Exception x) { + return Futures.failedFuture(x); + } + + JXPathContext jxroot = JXPathContext.newContext(template); + jxroot.setLenient(true); + + //based on the output of ASDCCatalog the node description will contain the UUID of the resource declaring it + List uuids = (List) StreamSupport.stream(Spliterators.spliteratorUnknownSize(jxroot.iterate("topology_template/node_templates/*/description"), 16), false).distinct().filter(desc -> desc != null) + //the desc contains the full URI and the resource uuid is the 5th path element + .map(desc -> desc.toString().split("/")[5]).collect(Collectors.toList()); + + //prepare fetching all archives/resource details + final Futures.Accumulator accumulator = new Futures.Accumulator(); + uuids.stream().forEach(uuid -> { + UUID rid = UUID.fromString((String) uuid); + accumulator.add(this.asdc.getAssetArchive(ASDC.AssetType.resource, rid)); + accumulator.add(this.asdc.getAsset(ASDC.AssetType.resource, rid, JSONObject.class)); + }); + + final byte[] templateData = recycler.toString(template).getBytes(/*"UTF-8"*/); + //retrieve all resource archive+details, prepare blueprint service request and send its request + return Futures.advance(accumulator.accumulate(), (List theArchives) -> { + Blueprinter.BlueprintAction action = blueprint.generateBlueprint(); + processArtifacts(theArchives, (JSONObject theInfo, byte[] theData) -> new JSONObject().put(theInfo.getString("artifactName").split("\\.")[0], Base64Utils.encodeToString(theData)), + (Stream<JSONObject> theAssetArtifacts) -> theAssetArtifacts.reduce(new JSONObject(), ASDC::merge)).forEach(artifactInfo -> action.withModelInfo(artifactInfo)); + + return action.withTemplateData(templateData).execute(); + }); + } + + public Future<Future<String>> buildBlueprintViaToscaLab(Reader theCdump) { + return processCdump(theCdump, (theTemplate, theArchives) -> { + Blueprinter.BlueprintAction action = blueprint.generateBlueprint(); + processArtifacts(theArchives, (JSONObject theInfo, byte[] theData) -> new JSONObject().put(theInfo.getString("artifactName").split("\\.")[0], Base64Utils.encodeToString(theData)), + (Stream<JSONObject> theAssetArtifacts) -> theAssetArtifacts.reduce(new JSONObject(), ASDC::merge)).forEach(artifactInfo -> action.withModelInfo(artifactInfo)); + + return action.withTemplateData(Recycler.toString(theTemplate).getBytes()).execute(); + + }); + } + + private static class Tracker implements TargetLocator { + + private static enum Position { + SCHEMA, TEMPLATE, TRANSLATE; + } + + private static final int Positions = Position.values().length; + + private List<Target> tgts = new ArrayList<Target>(3); + + public Tracker() { + clear(); + } + + public boolean addSearchPath(URI theURI) { + return false; + } + + public boolean addSearchPath(String thePath) { + return false; + } + + public Iterable<URI> searchPaths() { + return Collections.emptyList(); + } + + protected int position(String... theKeys) { + for (String key : theKeys) { + if ("schema".equals(key)) { + return Position.SCHEMA.ordinal(); + } + if ("template".equals(key)) { + return Position.TEMPLATE.ordinal(); + } + if ("translate".equals(key)) { + return Position.TRANSLATE.ordinal(); + } + } + return -1; + } + + public Target resolve(String theName) { + for (Target tgt : tgts) { + if (tgt != null && tgt.getName().equals(theName)) { + return tgt; + } + } + return null; + } + + public void track(JSONObject theInfo, final byte[] theData) { + String uri = theInfo.getString("artifactURL").split("/")[5]; + String name = theInfo.getString("artifactName"), desc = theInfo.getString("artifactDescription"), label = theInfo.getString("artifactLabel"); + int pos = position(desc, label); + + debugLogger.log(LogLevel.DEBUG, ASDCUtils.class.getName(), "Tracking {} at {}, {}", name, pos, theInfo.optString("artifactURL")); + + if (pos > -1) { + tgts.set(pos, new Target(name, URI.create("asdc:" + uri + "/" + name)) { + @Override + public Reader open(){ + return new BufferedReader(new InputStreamReader(new ByteArrayInputStream(theData))); + } + }); + } + } + + public boolean hasSchema() { + return tgts.get(Position.SCHEMA.ordinal()) != null; + } + + public Target schema() { + return tgts.get(Position.SCHEMA.ordinal()); + } + + public boolean hasTemplate() { + return tgts.get(Position.TEMPLATE.ordinal()) != null; + } + + public Target template() { + return tgts.get(Position.TEMPLATE.ordinal()); + } + + public boolean hasTranslation() { + return tgts.get(Position.TRANSLATE.ordinal()) != null; + } + + public Target translation() { + return tgts.get(Position.TRANSLATE.ordinal()); + } + + public void clear() { + if (tgts.isEmpty()) { + for (int i = 0; i < Positions; i++) { + tgts.add(null); + } + } else { + Collections.fill(tgts, null); + } + } + } + + private Checker buildChecker() { + try { + return new Checker(); + } catch (CheckerException cx) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "CheckerException while creating Checker {}", cx); + return null; + } + } + + public Future<Catalog> buildCatalog(Reader theCdump) { + + // + //the purpose of the tracking is to be able to resolve import references within the 'space' of an + //asset's artifacts + //processing order is important too so we 'order the targets: schema, template, translation + // + final Tracker tracker = new Tracker(); + final Catalog catalog = Checker.buildCatalog(); + + return processCdump(theCdump, (theTemplate, theArchives) -> { + + final Checker checker = buildChecker(); + if (checker == null) { + return null; + } + checker.setTargetLocator(tracker); + + processArtifacts(theArchives, (JSONObject theInfo, byte[] theData) -> { + tracker.track(theInfo, theData); + return (Catalog) null; + }, + // aggregation: this is where the actual processing takes place now that + // we have all the targets + (Stream<Catalog> theAssetArtifacts) -> { + //the stream is full of nulls, ignore it, work with the tracker + + try { + if (tracker.hasSchema()) { + checker.check(tracker.schema(), catalog); + } + if (tracker.hasTemplate()) { + checker.check(tracker.template(), catalog); + } + if (tracker.hasTranslation()) { + checker.check(tracker.translation(), catalog); + } + } catch (CheckerException cx) { + //got to do better than this + errLogger.log(LogLevel.ERROR, ASDC.class.getName(),"CheckerException while checking catalog:{}", cx); + } finally { + tracker.clear(); + } + return checker.catalog(); + }); + + Target cdump = new Target("cdump", URI.create("asdc:cdump")); + cdump.setTarget(theTemplate); + + validateCatalog(catalog, checker, cdump); + + return catalog; + }); + } + + private void validateCatalog(Catalog catalog, Checker checker, Target cdump) { + try { + checker.validate(cdump, catalog); + } catch (CheckerException cx) { + errLogger.log(LogLevel.ERROR, ASDC.class.getName(),"CheckerException while building catalog:{}", cx); + } + } + + /* The common process of recycling, retrieving all related artifacts and then doing 'something' */ + private <T> Future<T> processCdump(Reader theCdump, BiFunction<Object, List, T> theProcessor) { + + final Recycler recycler = new Recycler(); + Object template = null; + try { + template = recycler.recycle(theCdump); + + } catch (Exception x) { + return Futures.failedFuture(x); + } + + JXPathContext jxroot = JXPathContext.newContext(template); + jxroot.setLenient(true); + + //based on the output of ASDCCatalog the node description will contain the UUID of the resource declaring it + //the desc contains the full URI and the resource uuid is the 5th path element + List uuids = (List) StreamSupport.stream(Spliterators.spliteratorUnknownSize(jxroot.iterate("topology_template/node_templates/*/description"), 16), false).distinct().filter(desc -> desc != null) + .map(desc -> desc.toString().split("/")[5]).collect(Collectors.toList()); + + //serialized fetch version + final Actions.Sequence sequencer = new Actions.Sequence(); + uuids.stream().forEach(uuid -> { + UUID rid = UUID.fromString((String) uuid); + sequencer.add(this.asdc.getAssetArchiveAction(ASDC.AssetType.resource, rid)); + sequencer.add(this.asdc.getAssetAction(ASDC.AssetType.resource, rid, JSONObject.class)); + }); + + final Object tmpl = template; + return Futures.advance(sequencer.execute(), (List theArchives) -> theProcessor.apply(tmpl, theArchives)); + } + + private static <T> Stream<T> processArtifacts(List theArtifactData, BiFunction<JSONObject, byte[], T> theProcessor, Function<Stream<T>, T> theAggregator) { + + Stream.Builder<T> assetBuilder = Stream.builder(); + + for (int i = 0; i < theArtifactData.size(); i = i + 2) { //cute old style loop + + JSONObject assetInfo = (JSONObject) theArtifactData.get(i + 1); + byte[] assetData = (byte[]) theArtifactData.get(i + 0); + + JSONArray artifacts = assetInfo.optJSONArray("artifacts"); + + Stream.Builder<T> artifactBuilder = Stream.builder(); + + try (ZipInputStream zipper = new ZipInputStream(new ByteArrayInputStream(assetData))){ + //we process the artifacts in the order they are stored in the archive .. fugly + for (ZipEntry zipped = zipper.getNextEntry(); zipped != null; zipped = zipper.getNextEntry()) { + JSONObject artifactInfo = lookupArtifactInfo(artifacts, StringUtils.substringAfterLast(zipped.getName(), "/")); + if (artifactInfo != null) { + artifactBuilder.add(theProcessor.apply(artifactInfo, extractArtifactData(zipper))); + } + zipper.closeEntry(); + } + } catch (IOException iox) { + errLogger.log(LogLevel.ERROR, ASDC.class.getName(), "IOException: {}", iox); + return null; + } + + if (theAggregator != null) { + assetBuilder.add(theAggregator.apply(artifactBuilder.build())); + } else { + artifactBuilder.build().forEach(entry -> assetBuilder.add(entry)); + } + } + + return assetBuilder.build(); + } +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtilsController.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtilsController.java new file mode 100644 index 0000000..4432712 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/ASDCUtilsController.java @@ -0,0 +1,76 @@ +package org.onap.sdc.dcae.catalog.asdc; + +import java.io.StringReader; + +import java.util.UUID; +import java.util.Map; +import java.util.List; +import java.util.concurrent.Callable; + +import java.net.URI; +import java.net.URISyntaxException; + +import javax.servlet.http.HttpServletRequest; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.springframework.beans.BeansException; + +import org.springframework.web.bind.annotation.RestController; + +import org.onap.sdc.dcae.catalog.asdc.ASDC; +import org.onap.sdc.dcae.catalog.asdc.ASDCUtils; +import org.onap.sdc.dcae.catalog.asdc.ASDCUtilsController; + +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestHeader; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.http.HttpStatus; +import org.springframework.http.HttpHeaders; +import org.springframework.http.ResponseEntity; + +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; + +import org.json.JSONObject; + + +@RestController +@ConfigurationProperties(prefix="asdcUtilsController") +public class ASDCUtilsController implements ApplicationContextAware { + + private ApplicationContext appCtx; + private OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + //Constants// + private static String NOT_CERTIFIED_CHECKOUT = "NOT_CERTIFIED_CHECKOUT"; + private static String NOT_CERTIFIED_CHECKIN = "NOT_CERTIFIED_CHECKIN"; + private static String CERTIFICATION_IN_PROGRESS = "CERTIFICATION_IN_PROGRESS"; + private static String CERTIFIED = "CERTIFIED"; + + + public void setApplicationContext(ApplicationContext theCtx) throws BeansException { + this.appCtx = theCtx; + } + + @PostConstruct + public void initController() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"initASDCUtilsController"); + + //Done + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"ASDCUtilsController started"); + } + + @PreDestroy + public void cleanupController() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(),"cleanupASDCUtilsController"); + } + +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Blueprinter.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Blueprinter.java new file mode 100644 index 0000000..3e78d38 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Blueprinter.java @@ -0,0 +1,76 @@ +package org.onap.sdc.dcae.catalog.asdc; + +import java.net.URI; + +import java.util.Collections; + +import org.json.JSONObject; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.catalog.commons.Action; +import org.onap.sdc.dcae.catalog.commons.Future; +import org.onap.sdc.dcae.catalog.commons.Http; +import org.json.JSONArray; + +import org.springframework.util.Base64Utils; + +import org.springframework.http.MediaType; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpEntity; +import org.springframework.stereotype.Component; +import org.springframework.context.annotation.Scope; +import org.springframework.boot.context.properties.ConfigurationProperties; + +@Component("blueprinter") +@Scope("singleton") +@ConfigurationProperties(prefix="blueprinter") +public class Blueprinter { + + + private URI serviceUri; + private OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + + public Blueprinter() { + } + + public void setUri(URI theUri) { + this.serviceUri = theUri; + } + + public BlueprintAction generateBlueprint() { + return new BlueprintAction(); + } + + public class BlueprintAction implements Action<String> { + + private JSONObject body = new JSONObject(); + + + protected BlueprintAction() { + } + + public BlueprintAction withModelData(byte[] theSchema, byte[] theTemplate, byte[] theTranslation) { + return this; + } + + public BlueprintAction withModelInfo(JSONObject theModelInfo) { + body.append("models", theModelInfo); + return this; + } + + public BlueprintAction withTemplateData(byte[] theData) { + body.put("template", Base64Utils.encodeToString(theData)); + return this; + } + + public Future<String> execute() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Blueprinter::execute() | PAYLOAD to TOSCA_LAB={}", body.toString()); + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.setAccept(Collections.singletonList(MediaType.APPLICATION_JSON)); + return Http.exchange(serviceUri.toString(), HttpMethod.POST, new HttpEntity<String>(body.toString(), headers), String.class); + } + } +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Cloudify.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Cloudify.java new file mode 100644 index 0000000..3208bd2 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/catalog/asdc/Cloudify.java @@ -0,0 +1,249 @@ +package org.onap.sdc.dcae.catalog.asdc; + +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.stream.Stream; + +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.jxpath.Pointer; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.catalog.commons.ListBuilder; +import org.onap.sdc.dcae.catalog.commons.MapBuilder; +import org.onap.sdc.dcae.checker.Catalog; +import org.onap.sdc.dcae.checker.Construct; +import org.onap.sdc.dcae.checker.Target; + +import com.google.common.collect.Lists; +import org.yaml.snakeyaml.DumperOptions; +import org.yaml.snakeyaml.Yaml; + + +public class Cloudify { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + Catalog catalog; + + public Cloudify(Catalog c) + { + catalog = c; + } + public class ModelTemplate { + public Map<String, Map> template; + public JXPathContext jx; + public String node; + public ModelTemplate(Map<String, Map> t, JXPathContext j, String node_name) + { + template = t; + jx = j; + node = node_name; + } + + public Object getPropValue(JXPathContext jx_src, String name) + { + try{ + Object ret = jx_src.getValue("properties/"+name+"/get_input"); + if (ret==null) + return jx_src.getValue("properties/"+name); + return getDefaultPropValue((String)ret); + } + catch (RuntimeException e) { + + } + try{ + return jx_src.getValue("properties/"+name+""); + } + catch (RuntimeException e) { + return null; + } + } + + public Object getDefaultPropValue(String name) { + try { + return jx.getValue("//"+name+"/default"); + } + catch (RuntimeException e) { + return null; + } + + } + } + + public class ModelTranslate { + public Map<String, Map> template; + public JXPathContext jx; + public String node; + + public ModelTranslate(Map<String, Map> t, JXPathContext j, String node_name) + { + template = t; + jx = j; + node = node_name; + } + + public String getTranslateName() + { + Map<String, Object> node_temp = (Map<String, Object>)jx.getValue("//node_templates"); + Iterator it = node_temp.keySet().iterator(); + if (it.hasNext()) + return node + "_"+ it.next(); + else + return null; + } + + public Map<String, Object> translate(JXPathContext jx_src, Map<String, Map> model_lib, String node_name) + { + for (Iterator prop_iter = jx.iteratePointers("//*[@get_input]"); prop_iter.hasNext();) { + + Pointer p = (Pointer)prop_iter.next(); + JXPathContext prop_path = jx.getRelativeContext(p); + + ModelTemplate src_model =(ModelTemplate) model_lib.get(node_name).get("model"); + + Object temp_o = src_model.getPropValue(jx_src, (String) prop_path.getValue("get_input")); + //prop_path.setValue(".", temp_o); + jx.setValue(p.asPath(), temp_o); + } + +// JXPathContext jx_src = JXPathContext.newContext(src); + for (Iterator req_iter = jx_src.iteratePointers("//*/node"); req_iter.hasNext();) { + Pointer p = (Pointer)req_iter.next(); + String req_node_name = (String)jx_src.getValue(p.asPath()); + + for (Iterator it = model_lib.keySet().iterator(); it.hasNext();) { + String key = (String) it.next(); + if (key.indexOf(req_node_name) <0 ) + continue; + ModelTranslate tt = (ModelTranslate) model_lib.get(key).get("translate"); + if (tt == null) + req_node_name = null; + else + { + req_node_name = tt.getTranslateName(); + } + break; + } + + } + + String tn_name = getTranslateName(); + + if (tn_name == null) + return (Map<String, Object>)jx.getValue("//node_templates"); + else + return (new MapBuilder<String, Object>().put(tn_name, jx.getValue("//node_templates/*")).build()); + } + + } + + public ModelTranslate findTranslateTemplate(String ty, String node) { + for (Target t: catalog.targets()) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTranslateTemplate: target {}", t.getName()); + if (t.getName().startsWith("translat") == false) { + continue; + } + + Map<String, Map>temp = (Map<String, Map>)t.getTarget(); + + JXPathContext jxroot = JXPathContext.newContext(temp); + try{ + String sub_type = (String)jxroot.getValue("topology_template/substitution_mappings/node_type"); + if (sub_type != null && sub_type.equals(ty)) { + return new ModelTranslate(temp, jxroot, node); + } + } + catch (RuntimeException e) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "translate template {} does not have substitution mapping section", t.getName()); + } + } + return null; + } + + public ModelTemplate findModelTemplate(String ty, String node) { + for (Target t: catalog.targets()) { + + if (t.getName().startsWith("templat") == false) + continue; + Map<String, Map>temp = (Map<String, Map>)t.getTarget(); + + JXPathContext jxroot = JXPathContext.newContext(temp); + for (Iterator it = jxroot.iterate("topology_template/node_templates/*/type"); it.hasNext();) { + String node_type = (String)it.next(); + if (node_type != null && node_type.equals(ty)) { + return new ModelTemplate(temp, jxroot, node); + } + } + } + return null; + } + + public Map<String, Object> createBlueprint() { + + Map<String, Map> target_temp = null; + for (Target t: catalog.targets()) { + + if (t.getName().equals("cdump")) { + target_temp = catalog.getTargetTemplates(t, Construct.Node); + } + } + + JXPathContext jxroot = JXPathContext.newContext(target_temp); + + Map<String, Object> output_temp = new HashMap<String, Object>(); + Map<String, Map> model_lib = new HashMap<String, Map>(); + + for (Iterator iter = target_temp.keySet().iterator(); iter.hasNext();) + { + String node_key = (String)iter.next(); + //jxroot.getVariables().declareVariable("name", target_temp.get(node_key)); + //String node_type = (String)jxroot.getValue("$name/type"); + String node_type = (String)jxroot.getValue(node_key+"/type"); + + ModelTranslate t_temp = findTranslateTemplate(node_type, node_key); + ModelTemplate t_model = findModelTemplate(node_type, node_key); + + model_lib.put(node_key, new MapBuilder() + .put("model", t_model) + .put("translate", t_temp) + .build()); + } + + for (Iterator iter = model_lib.keySet().iterator(); iter.hasNext();) { + String node_key = (String) iter.next(); + ModelTranslate t = (ModelTranslate) model_lib.get(node_key).get("translate"); + JXPathContext jxnode = jxroot.getRelativeContext(jxroot.getPointer(node_key)); + if (t != null) { + Map<String, Object> t_output =t.translate(jxnode, model_lib, node_key); + if (t_output != null) + output_temp.putAll(t_output); + } + + } + + return new MapBuilder<String, Object>() + .put("tosca_definitions_version", new String("cloudify_dsl_1_3")) + .put("imports", new ListBuilder() + .add(new MapBuilder() + .put("cloudify", + "http://www.getcloudify.org/spec/cloudify/3.4/types.yaml") + .build()) + .build()) + .put("node_templates", output_temp) + .build(); + + } + + public String createBlueprintDocument() { + DumperOptions options = new DumperOptions(); + options.setWidth(1000000); + Yaml yaml = new Yaml(options); + return yaml.dump(createBlueprint()); + } +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/ISdcClient.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/ISdcClient.java new file mode 100644 index 0000000..554991a --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/ISdcClient.java @@ -0,0 +1,47 @@ +package org.onap.sdc.dcae.client; + +import org.onap.sdc.dcae.composition.restmodels.CreateVFCMTRequest; +import org.onap.sdc.dcae.composition.restmodels.sdc.*; +import org.onap.sdc.dcae.composition.restmodels.ReferenceUUID; +import org.onap.sdc.dcae.enums.AssetType; + +import java.util.List; + +public interface ISdcClient { + + ResourceDetailed getResource(String uuid, String requestId) throws Exception; + + ServiceDetailed getService(String uuid, String requestId) throws Exception; + + List<Resource> getResources(String resourceType, String category, String subcategory, String requestId) throws Exception; + + List<Service> getServices(String requestId) throws Exception; + + String addExternalMonitoringReference(String userId, CreateVFCMTRequest resource, ReferenceUUID vfiUuid, String requestId); + + void deleteExternalMonitoringReference(String userId, String context, String uuid, String vfiName, String vfcmtUuid, String requestId); + + ResourceDetailed createResource(String userId, CreateVFCMTRequest resource, String requestId) throws Exception; + + ResourceDetailed changeResourceLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, String requestId) throws Exception; + + ServiceDetailed changeServiceLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, String requestId) throws Exception; + + Asset changeAssetLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, AssetType assetType, String requestId) throws Exception; + + String getResourceArtifact(String resourceUuid, String artifactUuid, String requestId) throws Exception; + + Artifact createResourceArtifact(String userId, String resourceUuid, Artifact artifact, String requestId) throws Exception; + + Artifact updateResourceArtifact(String userId, String resourceUuid, Artifact artifact, String requestId) throws Exception; + + void deleteResourceArtifact(String userId, String resourceUuid, String artifactId, String requestId) throws Exception; + + Artifact createVfInstanceArtifact(String userId, String serviceUuid, String normalizedInstanceName, Artifact artifact, String requestId) throws Exception; + + Artifact updateVfInstanceArtifact(String userId, String serviceUuid, String normalizedInstanceName, Artifact artifact, String requestId) throws Exception; + + ExternalReferencesMap getMonitoringReferences(String context, String uuid, String version, String requestId); + + void deleteInstanceResourceArtifact(String userId, String context, String serviceUuid, String normalizedVfiName, String artifactUuid, String requestId); +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/SdcRestClient.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/SdcRestClient.java new file mode 100644 index 0000000..058d9c7 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/client/SdcRestClient.java @@ -0,0 +1,221 @@ +package org.onap.sdc.dcae.client; + +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicHeader; +import org.onap.sdc.dcae.composition.restmodels.CreateVFCMTRequest; +import org.onap.sdc.dcae.composition.restmodels.ReferenceUUID; +import org.onap.sdc.dcae.composition.restmodels.sdc.*; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.composition.util.SystemProperties; +import org.onap.sdc.dcae.enums.AssetType; +import org.onap.sdc.dcae.enums.SdcConsumerInfo; +import org.onap.sdc.dcae.utils.Normalizers; +import org.onap.sdc.dcae.utils.SDCResponseErrorHandler; +import org.onap.sdc.dcae.utils.SdcRestClientUtils; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.*; +import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; +import org.springframework.stereotype.Component; +import org.springframework.util.Base64Utils; +import org.springframework.web.client.*; + +import javax.annotation.PostConstruct; +import java.net.URI; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +@Component("sdcrestclient") +public class SdcRestClient implements ISdcClient { + + @Autowired + private SystemProperties systemProperties; + + private static final String SLASH = "/"; + private static final String ECOMP_INSTANCE_ID_HEADER = "X-ECOMP-InstanceID"; + private static final String ECOMP_REQUEST_ID_HEADER = "X-ECOMP-RequestID"; + private static final String USER_ID_HEADER = "USER_ID"; + private static final String RESOURCES_PATH = "resources"; + private static final String SERVICES_PATH = "services"; + private static final String ARTIFACTS_PATH = "artifacts"; + private static final String CONTENT_MD5_HEADER = "Content-MD5"; + private static final String RESOURCE_INSTANCES_PATH = "resourceInstances"; + private static final String LIFECYCLE_STATE_PATH = "lifecycleState/{lifecycleOperation}"; + private static final String METADATA_PATH = "metadata"; + private static final String VERSION_PATH = "version"; + private static final String MONITORING_REFERENCES_PATH = "externalReferences/monitoring"; + + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private String uri; + + private RestTemplate client; + + @PostConstruct + private void init() { + URI configUri = URI.create(systemProperties.getProperties().getProperty(DcaeBeConstants.Config.URI)); + EnumMap<SdcConsumerInfo, String> userInfo = SdcRestClientUtils.extractConsumerInfoFromUri(configUri); + CloseableHttpClient httpClient = HttpClientBuilder.create().setDefaultHeaders(defaultHeaders(userInfo)).build(); + HttpComponentsClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory(); + requestFactory.setHttpClient(httpClient); + client = new RestTemplate(requestFactory); + client.setErrorHandler(new SDCResponseErrorHandler()); + uri = userInfo.get(SdcConsumerInfo.CATALOG_URL); + } + + private List<BasicHeader> defaultHeaders(EnumMap<SdcConsumerInfo, String> userInfo) { + List<BasicHeader> headers = new ArrayList<>(); + headers.add(new BasicHeader(HttpHeaders.AUTHORIZATION, userInfo.get(SdcConsumerInfo.AUTH))); + headers.add(new BasicHeader(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE)); + headers.add(new BasicHeader(HttpHeaders.ACCEPT, MediaType.APPLICATION_OCTET_STREAM_VALUE)); + headers.add(new BasicHeader(ECOMP_INSTANCE_ID_HEADER, userInfo.get(SdcConsumerInfo.INSTANCE_ID))); + return headers; + } + + public ResourceDetailed getResource(String uuid, String requestId) { + String url = buildRequestPath(RESOURCES_PATH, uuid, METADATA_PATH); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get resource from SDC. URL={}", url); + return getObject(url, requestId, ResourceDetailed.class); + } + + public ServiceDetailed getService(String uuid, String requestId) { + String url = buildRequestPath(SERVICES_PATH, uuid, METADATA_PATH); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get service from SDC. URL={}", url); + return getObject(url, requestId, ServiceDetailed.class); + } + + public List<Resource> getResources(String resourceType, String category, String subcategory, String requestId) { + String url = buildRequestPath(RESOURCES_PATH, SdcRestClientUtils.buildResourceFilterQuery(resourceType, category, subcategory)); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get resources from SDC. URL={}", url); + return Arrays.asList(getObject(url, requestId, Resource[].class)); + } + + public List<Service> getServices(String requestId) { + String url = buildRequestPath(SERVICES_PATH); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get services from SDC. URL={}", url); + return Arrays.asList(getObject(url, requestId, Service[].class)); + } + + public String addExternalMonitoringReference(String userId, CreateVFCMTRequest resource, ReferenceUUID vfcmtUuid, String requestId) { + String url = buildRequestPath(resource.getContextType(), resource.getServiceUuid(), RESOURCE_INSTANCES_PATH, + Normalizers.normalizeComponentInstanceName(resource.getVfiName()), MONITORING_REFERENCES_PATH); + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Connecting service id {} name {} to vfcmt {} URL={}", + resource.getServiceUuid(), resource.getVfiName(), vfcmtUuid.getReferenceUUID(), url); + + return client.postForObject(url, new HttpEntity<>(vfcmtUuid, postResourceHeaders(userId, requestId)), + String.class); + } + + public void deleteExternalMonitoringReference(String userId, String context, String uuid, String normalizeVfiName, String vfcmtUuid, String requestId) { + String url = buildRequestPath(context, uuid, RESOURCE_INSTANCES_PATH, + normalizeVfiName, MONITORING_REFERENCES_PATH, vfcmtUuid); + client.exchange(url, HttpMethod.DELETE, new HttpEntity(postResourceHeaders(userId, requestId)), String.class); + } + + public ResourceDetailed createResource(String userId, CreateVFCMTRequest resource, String requestId) { + String url = buildRequestPath(RESOURCES_PATH); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Create SDC resource with name {} URL={}", resource.getName(), url); + return client.postForObject(url, new HttpEntity<>(resource, postResourceHeaders(userId, requestId)), ResourceDetailed.class); + } + + public ResourceDetailed changeResourceLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, String requestId) { + String url = buildRequestPath(RESOURCES_PATH, uuid, LIFECYCLE_STATE_PATH); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Change SDC resource lifecycle state ({}). URL={}", lifecycleOperation, url); + return client.postForObject(url, new HttpEntity<>(SdcRestClientUtils.buildUserRemarksObject(userRemarks), postResourceHeaders(userId, requestId)), ResourceDetailed.class, lifecycleOperation); + } + + public ServiceDetailed changeServiceLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, String requestId) { + String url = buildRequestPath(SERVICES_PATH, uuid, LIFECYCLE_STATE_PATH); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Change SDC service lifecycle state ({}). URL={}", lifecycleOperation, url); + return client.postForObject(url, new HttpEntity<>(SdcRestClientUtils.buildUserRemarksObject(userRemarks), postResourceHeaders(userId, requestId)), ServiceDetailed.class, lifecycleOperation); + } + + public Asset changeAssetLifecycleState(String userId, String uuid, String lifecycleOperation, String userRemarks, AssetType assetType, String requestId) { + return AssetType.RESOURCE == assetType ? changeResourceLifecycleState(userId, uuid, lifecycleOperation, userRemarks, requestId) : changeServiceLifecycleState(userId, uuid, lifecycleOperation, userRemarks, requestId); + } + + public String getResourceArtifact(String resourceUuid, String artifactUuid, String requestId) { + String url = buildRequestPath(RESOURCES_PATH, resourceUuid, ARTIFACTS_PATH, artifactUuid); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get resource artifact from SDC. URL={}", url); + return getObject(url, requestId, String.class); + } + + public Artifact createResourceArtifact(String userId, String resourceUuid, Artifact artifact, String requestId) throws Exception { + String url = buildRequestPath(RESOURCES_PATH, resourceUuid, ARTIFACTS_PATH); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Create SDC resource artifact. URL={}", url); + String artifactData = SdcRestClientUtils.artifactToString(artifact); + return client.postForObject(url, new HttpEntity<>(artifactData, postArtifactHeaders(userId, artifactData, requestId)), Artifact.class); + } + + public Artifact updateResourceArtifact(String userId, String resourceUuid, Artifact artifact, String requestId) throws Exception { + String url = buildRequestPath(RESOURCES_PATH, resourceUuid, ARTIFACTS_PATH, artifact.getArtifactUUID()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Update SDC resource artifact. URL={}", url); + String artifactData = SdcRestClientUtils.artifactToString(artifact); + return client.postForObject(url, new HttpEntity<>(artifactData, postArtifactHeaders(userId, artifactData, requestId)), Artifact.class); + } + + public void deleteResourceArtifact(String userId, String resourceUuid, String artifactId, String requestId) { + String url = buildRequestPath(RESOURCES_PATH, resourceUuid, ARTIFACTS_PATH, artifactId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Delete SDC resource artifact. URL={}", url); + client.exchange(url, HttpMethod.DELETE, new HttpEntity(postResourceHeaders(userId, requestId)), Artifact.class); + } + + public Artifact createVfInstanceArtifact(String userId, String serviceUuid, String normalizedInstanceName, Artifact artifact, String requestId) throws Exception { + String url = buildRequestPath(SERVICES_PATH, serviceUuid, RESOURCE_INSTANCES_PATH, normalizedInstanceName, ARTIFACTS_PATH); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Create SDC resource instance artifact. URL={}", url); + String artifactData = SdcRestClientUtils.artifactToString(artifact); + return client.postForObject(url, new HttpEntity<>(artifactData, postArtifactHeaders(userId, artifactData, requestId)), Artifact.class); + } + + public Artifact updateVfInstanceArtifact(String userId, String serviceUuid, String normalizedInstanceName, Artifact artifact, String requestId) throws Exception { + String url = buildRequestPath(SERVICES_PATH, serviceUuid, RESOURCE_INSTANCES_PATH, normalizedInstanceName, ARTIFACTS_PATH, artifact.getArtifactUUID()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Update SDC resource instance artifact. URL={}", url); + String artifactData = SdcRestClientUtils.artifactToString(artifact); + return client.postForObject(url, new HttpEntity<>(artifactData, postArtifactHeaders(userId, artifactData, requestId)), Artifact.class); + } + + public ExternalReferencesMap getMonitoringReferences(String context, String uuid, String version, String requestId) { + String url = buildRequestPath(context, uuid, VERSION_PATH, version, MONITORING_REFERENCES_PATH); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Get SDC service monitoring references. URL={}", url); + return getObject(url, requestId, ExternalReferencesMap.class); + } + + public void deleteInstanceResourceArtifact(String userId, String context, String serviceUuid, String normalizedVfiName, String artifactUuid, String requestId) { + String url = buildRequestPath(context, serviceUuid, RESOURCE_INSTANCES_PATH, normalizedVfiName, ARTIFACTS_PATH, artifactUuid); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Delete SDC instance resource artifact. URL={}", url); + client.exchange(url, HttpMethod.DELETE, new HttpEntity(postResourceHeaders(userId, requestId)), Artifact.class); + } + + private HttpHeaders postResourceHeaders(String userId, String requestId) { + HttpHeaders headers = requestHeader(requestId); + headers.setContentType(MediaType.APPLICATION_JSON_UTF8); + headers.add(USER_ID_HEADER, userId); + return headers; + } + + private HttpHeaders postArtifactHeaders(String userId, String artifact, String requestId) { + HttpHeaders headers = postResourceHeaders(userId, requestId); + String md5 = Base64Utils.encodeToString(DigestUtils.md5Hex(artifact).getBytes()); + headers.add(CONTENT_MD5_HEADER, md5); + return headers; + } + + private HttpHeaders requestHeader(String requestId){ + HttpHeaders headers = new HttpHeaders(); + headers.add(ECOMP_REQUEST_ID_HEADER, requestId); + return headers; + } + + private <T> T getObject(String url, String requestId, Class<T> clazz) { + return client.exchange(url, HttpMethod.GET, new HttpEntity<>(requestHeader(requestId)), clazz).getBody(); + } + + private String buildRequestPath(String... args){ + return uri + Stream.of(args).collect(Collectors.joining(SLASH)); + } +}
\ No newline at end of file diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactGroupType.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactGroupType.java new file mode 100644 index 0000000..98e78c6 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactGroupType.java @@ -0,0 +1,5 @@ +package org.onap.sdc.dcae.enums; + +public enum ArtifactGroupType { + DEPLOYMENT +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactType.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactType.java new file mode 100644 index 0000000..2da4cc7 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/ArtifactType.java @@ -0,0 +1,16 @@ +package org.onap.sdc.dcae.enums; + +public enum ArtifactType { + DCAE_TOSCA, + DCAE_JSON, + DCAE_POLICY, + DCAE_DOC, + DCAE_EVENT, + DCAE_INVENTORY_TOSCA, + DCAE_INVENTORY_JSON, + DCAE_INVENTORY_POLICY, + DCAE_INVENTORY_DOC, + DCAE_INVENTORY_BLUEPRINT, + DCAE_INVENTORY_EVENT, + OTHER +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/AssetType.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/AssetType.java new file mode 100644 index 0000000..576643f --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/AssetType.java @@ -0,0 +1,5 @@ +package org.onap.sdc.dcae.enums; + +public enum AssetType { + RESOURCE, SERVICE +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/LifecycleOperationType.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/LifecycleOperationType.java new file mode 100644 index 0000000..80e01df --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/LifecycleOperationType.java @@ -0,0 +1,16 @@ +package org.onap.sdc.dcae.enums; + + +public enum LifecycleOperationType { + CHECKIN("checkin"), CHECKOUT("checkout"), CERTIFY("certify"), UNDO_CHECKOUT("undoCheckout"); + + private String value; + + LifecycleOperationType(String value){ + this.value = value; + } + + public String getValue(){ + return value; + } +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/SdcConsumerInfo.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/SdcConsumerInfo.java new file mode 100644 index 0000000..aecb61d --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/enums/SdcConsumerInfo.java @@ -0,0 +1,5 @@ +package org.onap.sdc.dcae.enums; + +public enum SdcConsumerInfo { + AUTH, INSTANCE_ID, CATALOG_URL +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/AbstractSdncException.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/AbstractSdncException.java new file mode 100644 index 0000000..360e28b --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/AbstractSdncException.java @@ -0,0 +1,97 @@ +package org.onap.sdc.dcae.errormng; + + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +import java.util.Arrays; +import java.util.Formatter; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class AbstractSdncException { + private String messageId; + + private String text; + + private String[] variables; + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private final static Pattern ERROR_PARAM_PATTERN = Pattern.compile("%\\d"); + + public AbstractSdncException() { + } + + public AbstractSdncException(String messageId, String text, String[] variables) { + super(); + this.messageId = messageId; + this.text = text; + this.variables = validateParameters(messageId, text, variables); + } + + private String[] validateParameters(String messageId, String text, String[] variables) { + String[] res = null; + Matcher m = ERROR_PARAM_PATTERN.matcher(text); + int expectedParamsNum = 0; + while (m.find()) { + expectedParamsNum += 1; + } + int actualParamsNum = (variables != null) ? variables.length : 0; + if (actualParamsNum < expectedParamsNum) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), + "Received less parameters than expected for error with messageId {}, expected: {}, actual: {}. Missing parameters are padded with null values.", + messageId, expectedParamsNum, actualParamsNum); + } else if (actualParamsNum > expectedParamsNum) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), + "Received more parameters than expected for error with messageId {}, expected: {}, actual: {}. Extra parameters are ignored.", + messageId, expectedParamsNum, actualParamsNum); + } + if (variables != null) { + res = Arrays.copyOf(variables, expectedParamsNum); + } + + return res; + } + + public String getMessageId() { + return this.messageId; + } + + public String getText() { + return text; + } + + public String[] getVariables() { + return variables; + } + + public void setMessageId(String messageId) { + this.messageId = messageId; + } + + public void setText(String text) { + this.text = text; + } + + public void setVariables(String[] variables) { + this.variables = variables; + } + + public String getFormattedErrorMessage() { + String res; + if (variables != null && variables.length > 0) { + Formatter formatter = new Formatter(); + try { + res = formatter.format(this.text.replaceAll("%\\d", "%s"), (Object[]) this.variables).toString(); + } finally { + formatter.close(); + } + } else { + res = this.text; + } + return res; + } +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/BaseException.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/BaseException.java new file mode 100644 index 0000000..b559634 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/BaseException.java @@ -0,0 +1,61 @@ +package org.onap.sdc.dcae.errormng; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.google.gson.Gson; +import org.springframework.http.HttpStatus; +import org.springframework.web.client.HttpClientErrorException; + +public class BaseException extends HttpClientErrorException { + + private static Gson gson = new Gson(); + + protected RequestError requestError; + + public RequestError getRequestError() { + return requestError; + } + + public void setRequestError(RequestError requestError) { + this.requestError = requestError; + } + + public BaseException(HttpClientErrorException theError) { + super(theError.getStatusCode()); + String body = theError.getResponseBodyAsString(); + if (body != null) { + requestError = extractRequestError(body); + } + } + + public BaseException(HttpStatus status, RequestError re){ + super(status); + requestError = re; + } + + private RequestError extractRequestError(String error) { + ResponseFormat responseFormat = gson.fromJson(error, ResponseFormat.class); + return responseFormat.getRequestError(); + } + + @JsonIgnore + public String getMessageId() { + return requestError.getMessageId(); + } + + @JsonIgnore + public String[] getVariables() { + return requestError.getVariables(); + } + + @JsonIgnore + public String getText(){ + return requestError.getText(); + } + + @Override + @JsonIgnore + public String getMessage() { + return requestError.getFormattedMessage(); + } + +}
\ No newline at end of file diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/OkResponseInfo.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/OkResponseInfo.java new file mode 100644 index 0000000..53bdf3e --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/OkResponseInfo.java @@ -0,0 +1,8 @@ +package org.onap.sdc.dcae.errormng; + +public class OkResponseInfo extends AbstractSdncException { + + public OkResponseInfo(String messageId, String text, String[] variables) { + super(messageId, text, variables); + } +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/PolicyException.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/PolicyException.java new file mode 100644 index 0000000..3fc2d71 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/PolicyException.java @@ -0,0 +1,11 @@ +package org.onap.sdc.dcae.errormng; + +public class PolicyException extends AbstractSdncException { + + public PolicyException(String messageId, String text, String[] variables) { + super(messageId, text, variables); + } + + public PolicyException() { + } +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/RequestError.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/RequestError.java new file mode 100644 index 0000000..00fe3f2 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/RequestError.java @@ -0,0 +1,65 @@ +package org.onap.sdc.dcae.errormng; + +import com.fasterxml.jackson.annotation.JsonInclude; + +import java.util.List; + +@JsonInclude(JsonInclude.Include.NON_NULL) +public class RequestError { + private PolicyException policyException; + private ServiceException serviceException; + private OkResponseInfo okResponseInfo; + private List<ServiceException> serviceExceptions; + + public PolicyException getPolicyException() { + return policyException; + } + + public ServiceException getServiceException() { + return serviceException; + } + + public OkResponseInfo getOkResponseInfo() { + return okResponseInfo; + } + + public void setPolicyException(PolicyException policyException) { + this.policyException = policyException; + } + + void setServiceException(ServiceException serviceException) { + this.serviceException = serviceException; + } + + void setOkResponseInfo(OkResponseInfo okResponseInfo) { + this.okResponseInfo = okResponseInfo; + } + + public List<ServiceException> getServiceExceptions() { + return serviceExceptions; + } + + void setServiceExceptions(List<ServiceException> serviceExceptions) { + this.serviceExceptions = serviceExceptions; + } + + String getFormattedMessage() { + return getError().getFormattedErrorMessage(); + } + + String getMessageId() { + return getError().getMessageId(); + } + + String[] getVariables() { + return getError().getVariables(); + } + + String getText() { + return getError().getText(); + } + + AbstractSdncException getError() { + return null != serviceException ? serviceException : null != policyException ? policyException : okResponseInfo; + } +}
\ No newline at end of file diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ResponseFormat.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ResponseFormat.java new file mode 100644 index 0000000..ffdce70 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ResponseFormat.java @@ -0,0 +1,75 @@ +package org.onap.sdc.dcae.errormng; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; + +import java.util.List; + +@JsonInclude(JsonInclude.Include.NON_NULL) +public class ResponseFormat { + + @JsonIgnore + private int status; + private RequestError requestError; + private String notes = ""; + + public String getNotes() { + return notes; + } + + void setNotes(String notes) { + this.notes = notes; + } + + public ResponseFormat() { + super(); + } + + public ResponseFormat(int status) { + super(); + this.status = status; + } + + + public void setStatus(int status) { + this.status = status; + } + + public Integer getStatus() { + return status; + } + + public RequestError getRequestError() { + return requestError; + } + + public void setRequestError(RequestError requestError) { + this.requestError = requestError; + } + + void setPolicyException(PolicyException policyException) { + this.requestError = new RequestError(); + requestError.setPolicyException(policyException); + } + + void setServiceException(ServiceException serviceException) { + this.requestError = new RequestError(); + requestError.setServiceException(serviceException); + } + + void setOkResponseInfo(OkResponseInfo okResponseInfo) { + this.requestError = new RequestError(); + requestError.setOkResponseInfo(okResponseInfo); + } + + void setServiceExceptions(List<ServiceException> serviceExceptions) { + this.requestError = new RequestError(); + requestError.setServiceExceptions(serviceExceptions); + } + + @Override + public String toString() { + return "ResponseFormat[" + "status=" + status + ", requestError=" + requestError + ']'; + } + +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ServiceException.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ServiceException.java new file mode 100644 index 0000000..163a07f --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/errormng/ServiceException.java @@ -0,0 +1,12 @@ +package org.onap.sdc.dcae.errormng; + +public class ServiceException extends AbstractSdncException { + + public ServiceException(String messageId, String text, String[] variables) { + super(messageId, text, variables); + } + + public ServiceException() { + } + +}
\ No newline at end of file diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/Normalizers.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/Normalizers.java new file mode 100644 index 0000000..4719607 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/Normalizers.java @@ -0,0 +1,34 @@ +package org.onap.sdc.dcae.utils; + +import org.apache.commons.lang3.text.WordUtils; + +import java.util.regex.Pattern; + +public final class Normalizers { + + private static final Pattern COMPONENT_NAME_DELIMITER_PATTERN = Pattern.compile("[.\\-_]+"); + private static final Pattern ARTIFACT_LABEL_DELIMITER_PATTERN = Pattern.compile("[ \\-+._]+"); + private static final Pattern COMPONENT_INSTANCE_NAME_DELIMITER_PATTERN = Pattern.compile("[ \\-.]+"); + + + public static String normalizeComponentName(String name) { + String normalizedName = name.toLowerCase(); + normalizedName = COMPONENT_NAME_DELIMITER_PATTERN.matcher(normalizedName).replaceAll(" "); + String[] split = normalizedName.split(" "); + StringBuffer sb = new StringBuffer(); + for (String splitElement : split) { + String capitalize = WordUtils.capitalize(splitElement); + sb.append(capitalize); + } + return sb.toString(); + } + + public static String normalizeArtifactLabel(String label) { + return ARTIFACT_LABEL_DELIMITER_PATTERN.matcher(label).replaceAll("").toLowerCase(); + } + + public static String normalizeComponentInstanceName(String name) { + return COMPONENT_INSTANCE_NAME_DELIMITER_PATTERN.matcher(name).replaceAll("").toLowerCase(); + } + +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SDCResponseErrorHandler.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SDCResponseErrorHandler.java new file mode 100644 index 0000000..64da66a --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SDCResponseErrorHandler.java @@ -0,0 +1,43 @@ +package org.onap.sdc.dcae.utils; + +import com.google.gson.Gson; +import org.onap.sdc.dcae.catalog.asdc.ASDCException; +import org.onap.sdc.dcae.errormng.RequestError; +import org.onap.sdc.dcae.errormng.ResponseFormat; +import org.springframework.http.client.ClientHttpResponse; +import org.springframework.web.client.DefaultResponseErrorHandler; +import org.springframework.web.client.HttpClientErrorException; +import org.springframework.web.client.ResponseErrorHandler; + +import java.io.IOException; + +public class SDCResponseErrorHandler implements ResponseErrorHandler { + + private ResponseErrorHandler errorHandler = new DefaultResponseErrorHandler(); + + private static Gson gson = new Gson(); + + public void handleError(ClientHttpResponse response) throws IOException { + try{ + errorHandler.handleError(response); + } catch (HttpClientErrorException e) { + RequestError re = extractRequestError(e); + throw null == re ? e : new ASDCException(e.getStatusCode(), re); + } + } + + public boolean hasError(ClientHttpResponse response) throws IOException{ + return errorHandler.hasError(response); + } + + private RequestError extractRequestError(HttpClientErrorException error) { + try { + String body = error.getResponseBodyAsString(); + ResponseFormat responseFormat = gson.fromJson(body, ResponseFormat.class); + return responseFormat.getRequestError(); + } catch (Exception e) { + return null; + } + } + +} diff --git a/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SdcRestClientUtils.java b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SdcRestClientUtils.java new file mode 100644 index 0000000..33c2f49 --- /dev/null +++ b/dcaedt_catalog/asdc/src/main/java/org/onap/sdc/dcae/utils/SdcRestClientUtils.java @@ -0,0 +1,85 @@ +package org.onap.sdc.dcae.utils; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import org.onap.sdc.dcae.composition.restmodels.sdc.Artifact; +import org.onap.sdc.dcae.enums.ArtifactGroupType; +import org.onap.sdc.dcae.enums.SdcConsumerInfo; +import org.springframework.util.Base64Utils; +import org.springframework.util.StringUtils; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.EnumMap; +import java.util.List; +import java.util.stream.Collectors; + +public class SdcRestClientUtils { + + private static final String SDC_CATALOG_PATH = "/sdc/v1/catalog/"; + + // TODO consider moving params elsewhere (user/password/instanceId can be constant) + public static EnumMap<SdcConsumerInfo, String> extractConsumerInfoFromUri(URI configUri) { + EnumMap<SdcConsumerInfo, String> userInfoMap = new EnumMap<>(SdcConsumerInfo.class); + String userInfo = configUri.getUserInfo(); + if (userInfo != null) { + userInfoMap.put(SdcConsumerInfo.AUTH, "Basic "+ Base64Utils.encodeToString(userInfo.getBytes())); + } + String fragment = configUri.getFragment(); + if (fragment == null) + throw new IllegalArgumentException("The URI must contain a fragment specification, to be used as SDC instance id"); + userInfoMap.put(SdcConsumerInfo.INSTANCE_ID, fragment); + try { + userInfoMap.put(SdcConsumerInfo.CATALOG_URL, new URI(configUri.getScheme(), null, configUri.getHost(), configUri.getPort(), configUri.getPath()+SDC_CATALOG_PATH, null, null).toString()); + } + catch (URISyntaxException se) { + throw new IllegalArgumentException("Invalid uri", se); + } + return userInfoMap; + } + + public static String buildResourceFilterQuery(String resourceType, String category, String subcategory) { + List<String> filters = new ArrayList<>(); + if(!StringUtils.isEmpty(resourceType)) + filters.add("resourceType="+resourceType); + if(!StringUtils.isEmpty(category)) + filters.add("category="+category); + if(!StringUtils.isEmpty(subcategory)) + filters.add("subCategory="+subcategory); + return "?"+filters.stream().collect(Collectors.joining("&")); + } + + public static UserRemarks buildUserRemarksObject(String userRemarks) { + return new UserRemarks(userRemarks); + } + + private static class UserRemarks { + private String userRemarks; + + private UserRemarks(String userRemarks) { + this.userRemarks = userRemarks; + } + + public String getUserRemarks() { + return userRemarks; + } + } + + public static String artifactToString(Artifact artifact) throws JsonProcessingException { + ObjectMapper mapper = new ObjectMapper(); + return mapper.writeValueAsString(artifact); + } + + public static Artifact generateDeploymentArtifact(String description, String name, String type, String label, byte[] payload){ + Artifact artifact = new Artifact(); + artifact.setDescription(description); + artifact.setArtifactName(name); + artifact.setArtifactGroupType(ArtifactGroupType.DEPLOYMENT.name()); + artifact.setArtifactType(type); + artifact.setArtifactLabel(label); + artifact.setPayloadData(Base64Utils.encodeToString(payload)); + return artifact; + } +} diff --git a/dcaedt_catalog/asdc/src/test/org/onap/sdc/dcae/utils/NormalizersTest.java b/dcaedt_catalog/asdc/src/test/org/onap/sdc/dcae/utils/NormalizersTest.java new file mode 100644 index 0000000..bf06e22 --- /dev/null +++ b/dcaedt_catalog/asdc/src/test/org/onap/sdc/dcae/utils/NormalizersTest.java @@ -0,0 +1,51 @@ +package org.onap.sdc.dcae.utils; + +import static org.assertj.core.api.Assertions.*; + +import org.assertj.core.api.Assertions; +import org.junit.Test; +import org.onap.sdc.dcae.utils.Normalizers; + + +public class NormalizersTest { + + @Test + public void normalizeVFCMTName_withDot_withoutDot(){ + Assertions.assertThat(Normalizers.normalizeComponentName("my.dot")).isEqualTo("MyDot"); + } + + @Test + public void normalizeVFCMTName_withUnderscore_withoutUnderscore(){ + Assertions.assertThat(Normalizers.normalizeComponentName("My_Monitoring_Template_example")).isEqualTo("MyMonitoringTemplateExample"); + } + + @Test + public void normalizeVFCMTName_withWhiteSpace_withoutWhiteSpace(){ + Assertions.assertThat(Normalizers.normalizeComponentName(" my dot ")).isEqualTo("MyDot"); + } + + @Test + public void normalizeVFCMTName_withDash_withoutDash(){ + Assertions.assertThat(Normalizers.normalizeComponentName("My-Monitoring-Template-example")).isEqualTo("MyMonitoringTemplateExample"); + } + + @Test + public void normalizeVFCMTName_notCapitalized_capitalized(){ + Assertions.assertThat(Normalizers.normalizeComponentName("my monitoring template eXAMPLE")).isEqualTo("MyMonitoringTemplateExample"); + } + + @Test + public void normalizeArtifactLabel_withDash_withoutDash(){ + Assertions.assertThat(Normalizers.normalizeArtifactLabel("blueprint-other")).isEqualTo("blueprintother"); + } + + @Test + public void normalizeArtifactLabel_withWhiteSpace_withoutWhiteSpace(){ + Assertions.assertThat(Normalizers.normalizeArtifactLabel(" blueprint other")).isEqualTo("blueprintother"); + } + + @Test + public void normalizeArtifactLabel_withPlus_withoutPlus(){ + Assertions.assertThat(Normalizers.normalizeArtifactLabel("+blueprint+++other+")).isEqualTo("blueprintother"); + } +} diff --git a/dcaedt_catalog/commons/pom.xml b/dcaedt_catalog/commons/pom.xml new file mode 100644 index 0000000..d285e1b --- /dev/null +++ b/dcaedt_catalog/commons/pom.xml @@ -0,0 +1,135 @@ +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog</artifactId> + <version>1806.0.1-SNAPSHOT</version> + </parent> + <artifactId>DCAE-DT-Catalog-Commons</artifactId> + <packaging>jar</packaging> + <name>DCAE DT Catalog Commons</name> + + <build> + <sourceDirectory>src/main/java</sourceDirectory> + <plugins> + <plugin> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.1</version> + <configuration> + <source>1.8</source> + <target>1.8</target> + <encoding>${project.build.sourceEncoding}</encoding> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-dependency-plugin</artifactId> + <version>2.10</version> + <executions> + <execution> + <id>copy-dependencies</id> + <phase>package</phase> + <goals> + <goal>copy-dependencies</goal> + </goals> + <configuration> + <outputDirectory>${project.build.directory}/deps</outputDirectory> + <overWriteReleases>false</overWriteReleases> + <overWriteSnapshots>false</overWriteSnapshots> + <overWriteIfNewer>true</overWriteIfNewer> + </configuration> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>buildnumber-maven-plugin</artifactId> + <version>1.4</version> + <executions> + <execution> + <phase>validate</phase> + <goals> + <goal>create</goal> + </goals> + </execution> + </executions> + <configuration> + <doCheck>false</doCheck> + <doUpdate>false</doUpdate> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>2.1</version> + <configuration> + <archive> + <manifest> + <addDefaultImplementationEntries>true</addDefaultImplementationEntries> + </manifest> + <manifestEntries> + <Implementation-Build>${buildNumber}</Implementation-Build> + </manifestEntries> + </archive> + </configuration> + </plugin> + </plugins> + </build> + <dependencies> + <dependency> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpasyncclient</artifactId> + <version>4.1</version> + </dependency> + <dependency> + <groupId>commons-io</groupId> + <artifactId>commons-io</artifactId> + <version>2.4</version> + </dependency> + <dependency> + <groupId>commons-cli</groupId> + <artifactId>commons-cli</artifactId> + <version>1.3</version> + </dependency> + <dependency> + <groupId>commons-beanutils</groupId> + <artifactId>commons-beanutils</artifactId> + <version>1.9.3</version> + </dependency> + <dependency> + <groupId>commons-jxpath</groupId> + <artifactId>commons-jxpath</artifactId> + <version>1.3</version> + </dependency> + <dependency> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + <version>17.0</version> + </dependency> + <dependency> + <groupId>org.yaml</groupId> + <artifactId>snakeyaml</artifactId> + <version>1.17</version> + </dependency> + <dependency> + <groupId>org.json</groupId> + <artifactId>json</artifactId> + <version>20160212</version> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-databind</artifactId> + <version>2.7.8</version> + </dependency> + <dependency> + <groupId>com.github.wnameless</groupId> + <artifactId>json-flattener</artifactId> + <version>0.2.2</version> + </dependency> + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-web</artifactId> + <version>4.3.5.RELEASE</version> + </dependency> + </dependencies> +</project> diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Action.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Action.java new file mode 100644 index 0000000..fb36950 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Action.java @@ -0,0 +1,11 @@ +package org.onap.sdc.dcae.catalog.commons; + +import org.onap.sdc.dcae.catalog.commons.Future; + +/** + */ +public interface Action<T> { + + public Future<T> execute(); + +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Actions.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Actions.java new file mode 100644 index 0000000..132b0c0 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Actions.java @@ -0,0 +1,201 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.util.List; +import java.util.LinkedList; +import java.util.ArrayList; +import java.util.Collections; +import java.util.concurrent.CountDownLatch; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.catalog.commons.Action; +import org.onap.sdc.dcae.catalog.commons.Future; +import org.onap.sdc.dcae.catalog.commons.FutureHandler; +import org.onap.sdc.dcae.catalog.commons.Futures; + +/** + */ +public interface Actions { + + /** */ + public static interface CompoundAction<T> extends Action<List<T>> { + + public CompoundAction<T> addAction(Action<T> theAction); + + public List<Action<T>> actions(); + + public Future<List<T>> execute(); + } + + + public static class BasicCompoundAction<T> implements CompoundAction<T> { + + private LinkedList<Action<T>> actions = new LinkedList<Action<T>>(); + + + + public CompoundAction<T> addAction(Action<T> theAction) { + this.actions.add(theAction); + return this; + } + + public List<Action<T>> actions() { + return this.actions; + } + + public Future<List<T>> execute() { + CompoundFuture<T> cf = new CompoundFuture<T>(this.actions.size()); + for (Action a: this.actions) + cf.addFuture(a.execute()); + return cf; + } + } + + + public static class CompoundFuture<T> extends Futures.BasicFuture<List<T>> { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private LinkedList<Future<T>> futures = new LinkedList<Future<T>>(); + private FutureHandler<T> hnd; + + CompoundFuture(int theActionCount) { + + hnd = new Futures.BasicHandler<T>(new CountDownLatch(theActionCount)) { + + private List<T> results = new ArrayList<T>(Collections.nCopies(theActionCount, null)); + + protected void process(Future<T> theResult) { + synchronized(CompoundFuture.this) { + if (theResult.failed()) { + CompoundFuture.this.cause(theResult.cause()); + //and stop processing of other results + this.results = null; + //?? + } + else { + if (this.results != null) + this.results.set(futures.indexOf(theResult), theResult.result()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Got result for action {}. Count at {}", futures.indexOf(theResult), this.latch.getCount()); + } + if (this.latch.getCount() == 1) {//this was the last result + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Got all results: {}", this.results); + CompoundFuture.this.result(this.results); + } + } + } + }; + } + + CompoundFuture<T> addFuture(Future<T> theFuture) { + synchronized(this) { + futures.add(theFuture); + theFuture.setHandler(this.hnd); + } + return this; + } + + } + +/* + public static class CompoundFutureHandler<T> implements FutureHandler<T> { + + protected List<T> result = null; + protected List<Throwable> error = null; + protected CountDownLatch latch = null; + + CompoundFutureHandler(int theResultCount) { + this(new CountDownLatch(theResultCount)); + } + + public void handle(Future<T> theResult) { + if (this.latch != null) { + this.latch.countDown(); + } + } + + public T result() + throws InterruptedException, RuntimeException { + return result(true); + } + + public BasicHandler<T> waitForCompletion() throws InterruptedException { + this.latch.await(); + return this; + } + + } +*/ + + public static class Sequence<T> implements Action<List<T>> { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private List<Action<T>> actions = new LinkedList<Action<T>>(); + private int current = 0; + private SequenceFuture<T> future = new SequenceFuture<T>(); + + public Sequence<T> add(Action<T> theAction) { + if (this.current > 0) + throw new IllegalStateException("In execution"); + this.actions.add(theAction); + return this; + } + + /* we allow 'early' access to the future so that a client can pass its reference while + * it still builds the sequence, for example. + */ + public Future<List<T>> future() { + return this.future; + } + + //need to add protection when for the 'no action' case + public Future<List<T>> execute() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Starting serialized execution of {}", actions); + if (hasNext()) + next().execute().setHandler(future.hnd); + return this.future; + } + + protected boolean hasNext() { + return this.current < actions.size(); + } + + protected Action next() { + return actions.get(this.current++); + } + + private class SequenceFuture<T> extends Futures.BasicFuture<List<T>> { + + private List<T> results = new LinkedList<T>(); + private FutureHandler<T> hnd = new Futures.BasicHandler<T>() { + + protected void process(Future<T> theResult) { + + if (theResult.failed()) { + SequenceFuture.this.cause(theResult.cause()); + //and stop processing of other results + } + else { + SequenceFuture.this.results.add(theResult.result()); + if (Sequence.this.hasNext()) { + Sequence.this.next().execute().setHandler(this); + } + else { + SequenceFuture.this.result(SequenceFuture.this.results); + } + } + } + }; + + + } + + + + } + +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Future.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Future.java new file mode 100644 index 0000000..c50f467 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Future.java @@ -0,0 +1,35 @@ +package org.onap.sdc.dcae.catalog.commons; + +import org.onap.sdc.dcae.catalog.commons.Future; +import org.onap.sdc.dcae.catalog.commons.FutureHandler; + +/** + * Modeled after the vertx future + */ +public interface Future<T> { + + public T result(); + + public Future<T> result(T theResult); + +//rename 'cause' to 'failure' + + public Throwable cause(); + + public Future<T> cause(Throwable theError); + + public boolean succeeded(); + + public boolean failed(); + + public boolean complete(); + + public T waitForResult() throws Exception; + + //public T waitForResult(long theTimeout) throws Exception; + + public Future<T> waitForCompletion() throws InterruptedException; + + public Future<T> setHandler(FutureHandler<T> theHandler); + +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/FutureHandler.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/FutureHandler.java new file mode 100644 index 0000000..b689412 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/FutureHandler.java @@ -0,0 +1,13 @@ +package org.onap.sdc.dcae.catalog.commons; + +import org.onap.sdc.dcae.catalog.commons.Future; + +/** + * Modeled after the vertx future + */ +@FunctionalInterface +public interface FutureHandler<T> { + + public void handle(Future<T> theResult); + +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Futures.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Futures.java new file mode 100644 index 0000000..ffaf42b --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Futures.java @@ -0,0 +1,257 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.util.List; +import java.util.LinkedList; +import java.util.Collections; + +import java.util.concurrent.CountDownLatch; +import java.util.function.Function; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.dcae.catalog.commons.Future; +import org.onap.sdc.dcae.catalog.commons.FutureHandler; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + + +/** + */ +public class Futures<T> { + + private Futures() { + } + + + public static <T> Future<T> failedFuture(Throwable theError) { + return new BasicFuture<T>() + .cause(theError); + } + + public static <T> Future<T> succeededFuture(T theResult) { + return new BasicFuture<T>() + .result(theResult); + } + + public static <T> Future<T> future() { + return new BasicFuture<T>(); + } + + public static <U,V> Future<V> advance(Future<U> theStep, + final Function<U,V> theResultFunction) { + return advance(theStep, theResultFunction, Function.identity()); + } + + public static <U,V> Future<V> advance(Future<U> theStep, + final Function<U,V> theResultFunction, + final Function<Throwable, Throwable> theErrorFunction) { + final Future<V> adv = new BasicFuture<V>(); + theStep.setHandler(new FutureHandler<U>() { + public void handle(Future<U> theResult) { + if (theResult.failed()) + adv.cause(theErrorFunction.apply(theResult.cause())); + else + adv.result(theResultFunction.apply(theResult.result())); + } + }); + return adv; + } + + /** */ + public static class BasicFuture<T> implements Future<T> { + + protected boolean succeeded, + failed; + + protected FutureHandler<T> handler; + protected Throwable cause; + protected T result; + + + protected BasicFuture() { + } + + public T result() { + return this.result; + } + + public Future<T> result(T theResult) { + this.result = theResult; + this.succeeded = true; + this.cause = null; + this.failed = false; + callHandler(); + return this; + } + + public Throwable cause() { + return this.cause; + } + + public Future<T> cause(Throwable theCause) { + this.cause = theCause; + this.failed = true; + this.result = null; + this.succeeded = false; + callHandler(); + return this; + } + + public boolean succeeded() { + return this.succeeded; + } + + public boolean failed() { + return this.failed; + } + + public boolean complete() { + return this.failed || this.succeeded; + } + + public Future<T> setHandler(FutureHandler<T> theHandler) { + this.handler = theHandler; + callHandler(); + return this; + } + + public T waitForResult() throws Exception { + BasicHandler<T> hnd = buildHandler(); + setHandler(hnd); + hnd.waitForCompletion(); + if (failed()) + throw (Exception)cause(); + else + return result(); + } + + public Future<T> waitForCompletion() throws InterruptedException { + BasicHandler<T> hnd = buildHandler(); + setHandler(hnd); + hnd.waitForCompletion(); + return this; + } + + protected void callHandler() { + if (this.handler != null && complete()) { + this.handler.handle(this); + } + } + + protected BasicHandler<T> buildHandler() { + return new BasicHandler<T>(); + } + } + + + /** */ + public static class BasicHandler<T> + implements FutureHandler<T> { + + protected T result = null; + protected Throwable error = null; + protected CountDownLatch latch = null; + + BasicHandler() { + this(new CountDownLatch(1)); + } + + BasicHandler(CountDownLatch theLatch) { + this.latch = theLatch; + } + + public void handle(Future<T> theResult) { + process(theResult); + if (this.latch != null) { + this.latch.countDown(); + } + } + + protected void process(Future<T> theResult) { + if (theResult.failed()) { + this.error = theResult.cause(); + } + else { + this.result = theResult.result(); + } + } + + public T result(boolean doWait) + throws InterruptedException, RuntimeException { + if (doWait) { + waitForCompletion(); + } + if (null == this.error) + return this.result; + + throw new RuntimeException(this.error); + } + + public T result() + throws InterruptedException, RuntimeException { + return result(true); + } + + public BasicHandler<T> waitForCompletion() throws InterruptedException { + this.latch.await(); + return this; + } + } + + /** */ + public static class Accumulator<T> extends BasicFuture<List<T>> + implements Future<List<T>> { + + protected List<Future<T>> futures = new LinkedList<Future<T>>(); + //protected List<T> results = new LinkedList<T>(); + protected BasicHandler<T> handler = null; + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + public Accumulator() { + this.result = new LinkedList<T>(); + } + + public Accumulator<T> add(Future<T> theFuture) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Intersection add"); + this.futures.add(theFuture); + this.result.add(null); + return this; + } + + public Accumulator<T> addAll(Accumulator<T> theFutures) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Intersection addAll"); + + return this; + } + + public Future<List<T>> accumulate() { + this.futures = Collections.unmodifiableList(this.futures); + this.handler = new BasicHandler<T>(new CountDownLatch(this.futures.size())) { + protected void process(Future<T> theResult) { + if (theResult.failed()) { + Accumulator.this.cause = theResult.cause(); + } + else { + Accumulator.this.result.set( + Accumulator.this.futures.indexOf(theResult), theResult.result()); + } + if (this.latch.getCount() == 1) { + if (Accumulator.this.cause != null) + Accumulator.this.cause(Accumulator.this.cause); + else + Accumulator.this.result(Accumulator.this.result); + } + } + }; + futures.stream() + .forEach(f -> f.setHandler(this.handler)); + + return this; + } + + } + + +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Http.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Http.java new file mode 100644 index 0000000..0f28495 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Http.java @@ -0,0 +1,107 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.util.List; + +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.http.client.SimpleClientHttpRequestFactory; +import org.springframework.http.converter.HttpMessageConverter; +import org.springframework.util.concurrent.ListenableFutureCallback; +import org.springframework.web.client.AsyncRestTemplate; +import org.springframework.web.client.HttpClientErrorException; +import org.springframework.web.client.RestClientException; +import org.springframework.web.client.RestTemplate; + +public class Http { + + protected Http() { + } + + + public static <T> Future<T> exchange(String theUri, HttpMethod theMethod, HttpEntity theRequest, Class<T> theResponseType) { + + AsyncRestTemplate restTemplate = new AsyncRestTemplate(); + + List<HttpMessageConverter<?>> converters = restTemplate.getMessageConverters(); + converters.add(0, new JSONHttpMessageConverter()); + restTemplate.setMessageConverters(converters); + + HttpFuture<T> result = new HttpFuture<T>(); + try { + restTemplate + .exchange(theUri, theMethod, theRequest, theResponseType) + .addCallback(result.callback); + } + catch (RestClientException rcx) { + return Futures.failedFuture(rcx); + } + catch (Exception x) { + return Futures.failedFuture(x); + } + + return result; + } + + /** + * + * @param theUri + * @param theMethod + * @param theRequest + * @param theResponseType + * @param readTimeOut pass -1 if you dont need to customize the read time out interval + * @return + */ + public static <T> ResponseEntity<T> exchangeSync(String theUri, HttpMethod theMethod, HttpEntity theRequest, Class<T> theResponseType, int readTimeOut) { + + RestTemplate restTemplate = new RestTemplate(); + + if(readTimeOut!=-1){ + SimpleClientHttpRequestFactory rf = (SimpleClientHttpRequestFactory) restTemplate.getRequestFactory(); + rf.setReadTimeout(1 * readTimeOut); + } + + List<HttpMessageConverter<?>> converters = restTemplate.getMessageConverters(); + converters.add(0, new JSONHttpMessageConverter()); + restTemplate.setMessageConverters(converters); + ResponseEntity<T> result = null; + + try { + result = restTemplate.exchange(theUri, theMethod, theRequest, theResponseType); + } + catch (RestClientException rcx) { + return new ResponseEntity<T>((T) rcx.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR); + } + catch (Exception x) { + return new ResponseEntity<T>((T) x.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR); + } + + return result; + } + + + + public static class HttpFuture<T> extends Futures.BasicFuture<T> { + + HttpFuture() { + } + + ListenableFutureCallback<ResponseEntity<T>> callback = new ListenableFutureCallback<ResponseEntity<T>>() { + + public void onSuccess(ResponseEntity<T> theResult) { + HttpFuture.this.result(theResult.getBody()); + } + + public void onFailure(Throwable theError) { + if (theError instanceof HttpClientErrorException) { + HttpFuture.this.cause(new Exception((HttpClientErrorException)theError)); + } + else { + HttpFuture.this.cause(theError); + } + } + }; + + } +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/JSONHttpMessageConverter.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/JSONHttpMessageConverter.java new file mode 100644 index 0000000..e711279 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/JSONHttpMessageConverter.java @@ -0,0 +1,100 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.OutputStreamWriter; +import java.io.Reader; +import java.io.Writer; +import java.lang.reflect.Type; +import java.nio.charset.Charset; + +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpInputMessage; +import org.springframework.http.HttpOutputMessage; +import org.springframework.http.MediaType; +import org.springframework.http.converter.AbstractHttpMessageConverter; +import org.springframework.http.converter.HttpMessageNotReadableException; +import org.springframework.http.converter.HttpMessageNotWritableException; + +import org.json.JSONObject; +import org.json.JSONArray; +import org.json.JSONTokener; +import org.json.JSONException; + +/** + */ +public class JSONHttpMessageConverter extends AbstractHttpMessageConverter<Object> { + + public static final Charset DEFAULT_CHARSET = Charset.forName("UTF-8"); + + /** */ + public JSONHttpMessageConverter() { + super(new MediaType("application", "json", DEFAULT_CHARSET)); + } + /* + @Override + public boolean canRead(Class<?> theClazz, MediaType theMediaType) { + return canRead(theMediaType); + } + + @Override + public boolean canWrite(Class<?> theClazz, MediaType theMediaType) { + return canWrite(theMediaType); + } + */ + @Override + protected boolean supports(Class<?> theClazz) { + return theClazz.equals(JSONObject.class) || + theClazz.equals(JSONArray.class); + } + + @Override + protected Object readInternal(Class<?> theClazz, HttpInputMessage theInputMessage) + throws IOException, HttpMessageNotReadableException { + + Reader json = new InputStreamReader(theInputMessage.getBody(), getCharset(theInputMessage.getHeaders())); + + try { + if (theClazz.equals(JSONObject.class)) + return new JSONObject(new JSONTokener(json)); + if (theClazz.equals(JSONArray.class)) + return new JSONArray(new JSONTokener(json)); + + throw new HttpMessageNotReadableException("Could not process input, cannot handle " + theClazz); + } + catch (JSONException jsonx) { + throw new HttpMessageNotReadableException("Could not read JSON: " + jsonx.getMessage(), jsonx); + } + } + + @Override + protected void writeInternal(Object theObject, HttpOutputMessage theOutputMessage) + throws IOException, HttpMessageNotWritableException { + + Writer writer = new OutputStreamWriter(theOutputMessage.getBody(), getCharset(theOutputMessage.getHeaders())); + + try { + if (theObject instanceof JSONObject) { + ((JSONObject)theObject).write(writer); + } + else if (theObject instanceof JSONArray) { + ((JSONArray)theObject).write(writer); + } + + writer.close(); + } + catch(JSONException jsonx) { + throw new HttpMessageNotWritableException("Could not write JSON: " + jsonx.getMessage(), jsonx); + } + } + + private Charset getCharset(HttpHeaders theHeaders) { + if (theHeaders != null && + theHeaders.getContentType() != null && + theHeaders.getContentType().getCharSet() != null) { + return theHeaders.getContentType().getCharSet(); + } + return DEFAULT_CHARSET; + } + +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ListBuilder.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ListBuilder.java new file mode 100644 index 0000000..2538893 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ListBuilder.java @@ -0,0 +1,59 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.util.Arrays; +import java.util.List; + +import org.onap.sdc.dcae.catalog.commons.ListBuilder; + +import java.util.LinkedList; + +public class ListBuilder<T> { + + private List<T> list; + + public ListBuilder() { + this.list = new LinkedList<T>(); + } + + public boolean isEmpty() { + return this.list.isEmpty(); + } + + public ListBuilder add(T theValue) { + this.list.add(theValue); + return this; + } + + public ListBuilder addAll(final Iterable<? extends T> theValues) { + for (final T val : theValues) { + this.list.add(val); + } + return this; + } + + public ListBuilder addAll(final List<? extends T> theList) { + this.list.addAll(theList); + return this; + } + + public ListBuilder addAll(final T[] theArray) { + for (T t: theArray) this.list.add(t); + return this; + } + + public List build() { + return this.list; + } + + public List buildOpt() { + return this.list.isEmpty() ? null : this.list; + } + + public static <V> List<V> asList(V[] theArray) { + return Arrays.asList(theArray); + } + + public static <V> List<V> asListOpt(V[] theArray) { + return (theArray != null && theArray.length > 0) ? Arrays.asList(theArray) : null; + } +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/MapBuilder.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/MapBuilder.java new file mode 100644 index 0000000..3aa2a56 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/MapBuilder.java @@ -0,0 +1,80 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.util.Map; +import java.util.HashMap; +import java.util.function.Function; + +import org.onap.sdc.dcae.catalog.commons.MapBuilder; + +import java.util.function.BiFunction; + +public class MapBuilder<K,V> { + + private Map<K,V> map; + + public MapBuilder() { + this.map = new HashMap<K,V>(); + } + + public boolean isEmpty() { + return this.map.isEmpty(); + } + + public MapBuilder<K,V> put(K theKey, V theValue) { + this.map.put(theKey, theValue); + return this; + } + + public MapBuilder<K,V> putOpt(K theKey, V theValue) { + if (theValue != null) { + this.map.put(theKey, theValue); + } + return this; + } + + public MapBuilder<K,V> put(final Map.Entry<? extends K, ? extends V> theEntry) { + this.map.put(theEntry.getKey(), theEntry.getValue()); + return this; + } + + public MapBuilder<K,V> putOpt(final Map.Entry<? extends K, ? extends V> theEntry) { + if (theEntry != null) { + this.map.put(theEntry.getKey(), theEntry.getValue()); + } + return this; + } + + public MapBuilder<K,V> putAll(final Iterable<? extends Map.Entry<? extends K, ? extends V>> theEntries) { + for (final Map.Entry<? extends K, ? extends V> e : theEntries) { + this.map.put(e.getKey(), e.getValue()); + } + return this; + } + + /* If theEntries contains multiple entries with the same key then the key gets a suffix in order to make it unique + .. */ +// public MapBuilder forceAll(final Iterable<? extends Map.Entry<? extends K, ? extends V>> theEntries, + public MapBuilder<K,V> forceAll(final Iterable<? extends Map.Entry<K, V>> theEntries, + Function<Map.Entry<K, V> , K> rekeyFunction) { + for (final Map.Entry<? extends K, ? extends V> e : theEntries) { + K key = e.getKey(); + if (this.map.containsKey(key)) + key = rekeyFunction.apply((Map.Entry<K,V>)e); + this.map.put(key, e.getValue()); + } + return this; + } + + public MapBuilder<K,V> putAll(final Map<? extends K, ? extends V> theMap) { + this.map.putAll(theMap); + return this; + } + + public Map<K,V> build() { + return this.map; + } + + public Map<K,V> buildOpt() { + return this.map.isEmpty() ? null : this.map; + } +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Neo.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Neo.java new file mode 100644 index 0000000..f818163 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Neo.java @@ -0,0 +1,54 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.util.Iterator; + +import com.google.common.base.Predicate; +import com.google.common.collect.Iterators; + +import org.json.JSONObject; + + +public class Neo { + + /* + */ + public static String literalMap(JSONObject theProps, + String theNameAlias, + String theValueAlias, + String theAssignmentOp, + String theRelationOp, + Predicate theFieldFilter) { + if(theProps.length() == 0) + return ""; + StringBuilder sb = new StringBuilder(""); + for (Iterator i = Iterators.filter(theProps.keys(), + theFieldFilter); + i.hasNext();) { + String propName = (String)i.next(); + + if (theNameAlias != null) { + sb.append(theNameAlias) + .append('.'); + } + sb.append('`') + .append(propName) + .append('`') + .append(theAssignmentOp) + .append(" {") + .append(theValueAlias) + .append("}.") + .append('`') + .append(propName) + .append('`') + .append(theRelationOp); + } + return sb.substring(0, sb.length() - theRelationOp.length()); + } + + public static String literalMap(JSONObject theProps, + String theAlias) { + return literalMap(theProps, null, theAlias, ":", ",", f -> true); + } + +} + diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxies.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxies.java new file mode 100644 index 0000000..8983599 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxies.java @@ -0,0 +1,37 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.util.Map; +import java.lang.reflect.ParameterizedType; + +import org.json.JSONObject; + +import org.onap.sdc.dcae.catalog.commons.ProxyBuilder; + + +public class Proxies { + + private Proxies() { + } + + + private static ProxyBuilder builder = new ProxyBuilder(); + + public static <T> T build(Map theData, Class<T> theType) { + return builder.build(new JSONObject(theData), theType); + } + + public static <T> T build(Map theData, Map theContextData, Class<T> theType) { + return builder.build(new JSONObject(theData), theContextData, theType); + } + + public static <T> T build(JSONObject theData, Class<T> theType) { + return builder.build(theData, theType); + } + + public static <T> Class<T> typeArgument(Class theType) { + return (Class<T>) + ((ParameterizedType)theType.getGenericSuperclass()). + getActualTypeArguments()[0]; + } + +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.java new file mode 100644 index 0000000..d368886 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.java @@ -0,0 +1,144 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.util.List; +import java.util.LinkedList; +import java.util.Map; +import java.util.Collections; + +import java.util.stream.Collectors; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import java.lang.reflect.Type; +import java.lang.reflect.Method; +import java.lang.reflect.Array; +import java.lang.reflect.Constructor; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.InvocationTargetException; + +import java.lang.invoke.MethodHandles; + +import com.google.common.reflect.Invokable; +import org.onap.sdc.dcae.catalog.commons.Proxy; +import org.onap.sdc.dcae.catalog.commons.ProxyBuilder; +import com.google.common.reflect.AbstractInvocationHandler; + +import org.apache.commons.beanutils.ConvertUtils; + +import org.json.JSONObject; +import org.json.JSONArray; + +public class Proxy extends AbstractInvocationHandler { + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.METHOD) + + public static @interface DataMap { + + public String map() default ""; + + public boolean proxy() default false; + + public Class elementType() default Void.class; + } + + + public static final Constructor<MethodHandles.Lookup> lookupHandleConstructor; + + static { + try { + lookupHandleConstructor = + MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, + int.class); + + if (!lookupHandleConstructor.isAccessible()) { + lookupHandleConstructor.setAccessible(true); + } + } + catch (Exception x) { + throw new RuntimeException(x); + } + } + + + private JSONObject data; + private ProxyBuilder builder; + + protected Proxy(JSONObject theData, ProxyBuilder theBuilder) { + this.data = theData; + this.builder = theBuilder; + } + + public JSONObject data() { + return this.data; + } + + public ProxyBuilder getBuilder() { + return this.builder; + } + + protected Object handleInvocation( + Object theProxy,Method theMethod,Object[] theArgs) + throws Throwable { + if (theMethod.isDefault()) { + final Class<?> declaringClass = theMethod.getDeclaringClass(); + + return lookupHandleConstructor + .newInstance(declaringClass, MethodHandles.Lookup.PRIVATE) + .unreflectSpecial(theMethod, declaringClass) + .bindTo(theProxy) + .invokeWithArguments(theArgs); + } + + String key = theMethod.getName(); + + Proxy.DataMap dataMap = (Proxy.DataMap)theMethod.getAnnotation(Proxy.DataMap.class); + if (dataMap != null) { + String dataKey = dataMap.map(); + if (dataKey != null && !"".equals(dataKey)) + key = dataKey; + } + + //this is ugly, can this be done through an extension mechanism such as plugging in functions? + if ( builder.hasExtension(key) ) + return this.builder.extension(key).apply(this, theArgs); + + //we give priority to the context (because of the 'catalog' property issue in catalog service) but + //how natural is this? + Object val = this.builder.context(key); + if (val == null) + val = this.data.opt(key); + + if (val == null) + return null; + +//as we create proxies here we should store them back in the 'data' so that we do not do it again +//can we always 'recognize' them? + if (val instanceof String && + String.class != theMethod.getReturnType()) { + //??This will yield a POJO .. + return ConvertUtils.convert((String)val, theMethod.getReturnType()); + } + else if (val instanceof JSONObject) { + if (dataMap != null && dataMap.proxy()) { + return builder.build((JSONObject)val, theMethod.getReturnType()); + } + } + else if (val instanceof JSONArray&& dataMap != null && + dataMap.proxy() && + List.class.isAssignableFrom(theMethod.getReturnType())) { + + List res = (List) theMethod.getReturnType().newInstance(); + for (int i = 0; i < ((JSONArray) val).length(); i++) { + res.add(builder.build(((JSONArray) val).getJSONObject(i), dataMap.elementType())); + } + return res; + + } + return val; + } +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.pojo b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.pojo new file mode 100644 index 0000000..b3b5cb9 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Proxy.pojo @@ -0,0 +1,145 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.util.List; +import java.util.Map; +import java.util.Collections; + +import java.util.stream.Collectors; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import java.lang.reflect.Type; +import java.lang.reflect.Method; +import java.lang.reflect.Array; +import java.lang.reflect.Constructor; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.InvocationTargetException; + +import java.lang.invoke.MethodHandles; + +import com.google.common.reflect.Invokable; +import com.google.common.reflect.AbstractInvocationHandler; + +import org.apache.commons.beanutils.ConvertUtils; + + +/** + */ +public class Proxy + extends AbstractInvocationHandler { + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.METHOD) + + public static @interface DataMap { + + public String map() default ""; + + public boolean proxy() default false; + + public Class elementType() default Void.class; + } + + + public static Constructor<MethodHandles.Lookup> lookupHandleConstructor; + + static { + try { + lookupHandleConstructor = + MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, + int.class); + + if (!lookupHandleConstructor.isAccessible()) { + lookupHandleConstructor.setAccessible(true); + } + } + catch (Exception x) { + throw new RuntimeException(x); + } + } + + + private Map data; + private ProxyBuilder builder; + + protected Proxy(Map theData, ProxyBuilder theBuilder) { + this.data = theData; + this.builder = theBuilder; + } + + public Map data() { + return this.data; + } + + public ProxyBuilder getBuilder() { + return this.builder; + } + + protected Object handleInvocation( + Object theProxy,Method theMethod,Object[] theArgs) + throws Throwable { + if (theMethod.isDefault()) { + final Class<?> declaringClass = theMethod.getDeclaringClass(); + /* + return MethodHandles.lookup() + .in(declaringClass) + .unreflectSpecial(theMethod, declaringClass) + .bindTo(theProxy) + .invokeWithArguments(theArgs); + */ + return lookupHandleConstructor + .newInstance(declaringClass, MethodHandles.Lookup.PRIVATE) + .unreflectSpecial(theMethod, declaringClass) + .bindTo(theProxy) + .invokeWithArguments(theArgs); + } + + String key = theMethod.getName(); + + Proxy.DataMap dataMap = (Proxy.DataMap)theMethod.getAnnotation(Proxy.DataMap.class); + if (dataMap != null) { + String dataKey = dataMap.map(); + if (dataKey != null && !"".equals(dataKey)) + key = dataKey; + } + + //this is ugly, can this be done through an extension mechanism such as plugging in functions? + if ( builder.hasExtension(key) ) + return this.builder.extension(key).apply(this, theArgs); + + Object val = this.data.getOrDefault(key, this.builder.context(key)); + +System.out.println("!! " + key + " : " + val); + +//as we create proxies here we should store them back in the 'data' so that we do not do it again +//can we always 'recognize' them? + if (val instanceof String && + String.class != theMethod.getReturnType()) { + return ConvertUtils.convert((String)val, theMethod.getReturnType()); + } + else if (val instanceof Map) { + if (dataMap != null && dataMap.proxy()) { + return builder.build((Map)val, theMethod.getReturnType()); + } + } + else if (val instanceof List) { + if (dataMap != null && dataMap.proxy()) { + return ((List)val) + .stream() + .map(e -> this.builder.build((Map)e, dataMap.elementType())) + .collect(Collectors.toList()); + } + } +/* + else if (val.getClass().isArray()) { + if (dataMap != null && dataMap.proxy()) { + } + } +*/ + return val; + } +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ProxyBuilder.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ProxyBuilder.java new file mode 100644 index 0000000..e3a422a --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/ProxyBuilder.java @@ -0,0 +1,92 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.util.Map; + +import java.util.function.Function; +import java.util.function.BiFunction; + +import org.apache.commons.beanutils.ConvertUtils; +import org.apache.commons.beanutils.Converter; + +import org.json.JSONObject; + +import org.onap.sdc.dcae.catalog.commons.Proxy; +import org.onap.sdc.dcae.catalog.commons.ProxyBuilder; + + +public class ProxyBuilder { + + private Map<String, ?> context; + private Map<String, BiFunction<Proxy, Object[], Object>> extensions; + + public ProxyBuilder() { + } +/* + public <T> T build(Map theData, Class<T> theType) { + return build(theData, this.context, theType); + } + + public <T> T build(Map theData, Map theContextData, Class<T> theType) { + return (T)java.lang.reflect.Proxy.newProxyInstance( + ProxyBuilder.class.getClassLoader(), + new Class[] { theType }, + new Proxy(theData, this)); + } +*/ + public <T> T build(Map theData, Class<T> theType) { + return build(new JSONObject(theData), theType); + } + + public <T> T build(Map theData, Map theContextData, Class<T> theType) { + return build(new JSONObject(theData), theContextData, theType); + } + + public <T> T build(JSONObject theData, Class<T> theType) { + return build(theData, this.context, theType); + } + + public <T> T build(JSONObject theData, Map theContextData, Class<T> theType) { + return (T)java.lang.reflect.Proxy.newProxyInstance( + ProxyBuilder.class.getClassLoader(), + new Class[] { theType }, + new Proxy(theData, this)); + } + + + + + public ProxyBuilder withConverter(final Function<Object, ?> theConverter, Class theType) { + ConvertUtils.register(new Converter() { + public Object convert(Class theToType, Object theValue) { + return theConverter.apply(theValue); + } + }, + theType); + return this; + } + + /* + plug in an extension to the proxy default behaviour. + */ + public ProxyBuilder withExtensions(Map<String, BiFunction<Proxy, Object[], Object>> theExtensions) { + this.extensions = theExtensions; + return this; + } + + public ProxyBuilder withContext(Map<String, ?> theContext) { + this.context = theContext; + return this; + } + + protected Object context(String theName) { + return this.context == null ? null : this.context.get(theName); + } + + protected BiFunction<Proxy, Object[], Object> extension(String theName) { + return this.extensions == null ? null : this.extensions.get(theName); + } + + protected boolean hasExtension(String theName) { + return this.extensions == null ? false : this.extensions.containsKey(theName); + } +} diff --git a/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Recycler.java b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Recycler.java new file mode 100644 index 0000000..3493cb1 --- /dev/null +++ b/dcaedt_catalog/commons/src/main/java/org/onap/sdc/dcae/catalog/commons/Recycler.java @@ -0,0 +1,329 @@ +package org.onap.sdc.dcae.catalog.commons; + +import java.io.Reader; +import java.io.IOException; + +import java.util.List; +import java.util.Map; +import java.util.HashMap; +import java.util.AbstractMap; +import java.util.Arrays; +import java.util.Iterator; +import java.util.Collections; +import java.util.Spliterators; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import org.apache.commons.jxpath.Pointer; +import org.apache.commons.jxpath.JXPathContext; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.core.type.TypeReference; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.yaml.snakeyaml.Yaml; + + +/** + * Practically a copy of the Validator's service Recycler, minus the Spring framework aspects + picking up the + * description of every node + */ +public class Recycler { + + private static final String PROPERTIES = "properties"; + private static final String VALUE = "value"; + private static final String ASSIGNMENT = "assignment"; + private static final String CAPABILITY = "capability"; + private static final String RELATIONSHIP = "relationship"; + private static final String NAME = "name"; + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + private List<Map> imports; + private List<String> metas; + + public Recycler() { + withImports(); + withMetas(null); + } + + public Recycler withImports(String... theImports) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Setting imports to {}", theImports); + ListBuilder importsBuilder = new ListBuilder(); + for (int i = 0; i < theImports.length; i++) { + importsBuilder.add(new MapBuilder() + .put("i" + i, theImports[i]) + .build()); + } + this.imports = importsBuilder.build(); + return this; + } + + private List imports() { + ListBuilder importsBuilder = new ListBuilder(); + for (Map e: this.imports) { + importsBuilder.add(new MapBuilder() + .putAll(e) + .build()); + } + return importsBuilder.build(); + } + + public Recycler withMetas(String... theMetas) { + this.metas = (theMetas == null) ? Collections.emptyList() : Arrays.asList(theMetas); + return this; + } + + public Object recycle(final Reader theSource) throws Exception { + return this.recycle(new ObjectMapper().readValue(theSource, (Class)HashMap.class)); + } + + public Object recycle(final Object theDump) { + + final JXPathContext jxroot = JXPathContext.newContext(theDump); + jxroot.setLenient(true); + + final Map<String, Object> nodeTemplates = + (Map<String, Object>)new MapBuilder() + .putAll( + StreamSupport + .stream( + Spliterators.spliteratorUnknownSize((Iterator<Pointer>)jxroot.iteratePointers("/nodes"), 16), false) + .map(p -> { + JXPathContext jxnode = jxroot.getRelativeContext(p); + return new AbstractMap.SimpleEntry<String,Object>( + (String)jxnode.getValue(NAME) + "_" + (String)jxnode.getValue("nid"), + new MapBuilder() + .put("type", jxnode.getValue("type/name")) + .put("description", jxnode.getValue("description")) + .putOpt("metadata", nodeMetadata(jxnode)) + .putOpt(PROPERTIES, nodeProperties(jxnode)) + .putOpt("requirements", nodeRequirements(jxnode)) + .putOpt("capabilities", nodeCapabilities(jxnode)) + .build()); + })::iterator) + .buildOpt(); + + return new MapBuilder() + .put("tosca_definitions_version", "tosca_simple_yaml_1_0_0") + .put("imports", imports()) + .put("topology_template", new MapBuilder() + .putOpt("node_templates", nodeTemplates) + .build()) + .build(); + } + + /* */ + private Object nodeProperties(JXPathContext theNodeContext) { + return + new MapBuilder() + .putAll( + StreamSupport.stream( + Spliterators.spliteratorUnknownSize((Iterator<Map>)theNodeContext.iterate(PROPERTIES), 16), false) + .map(m -> new AbstractMap.SimpleEntry(m.get(NAME), this.nodeProperty(m))) + .filter(e -> e.getValue() != null) + ::iterator) + .buildOpt(); + } + + /* */ + private Object nodeProperty(final Map theSpec) { + Object value = theSpec.get(VALUE); + if (value == null) { + value = theSpec.get("default"); + if (value == null) { + /*final*/ Map assign = (Map)theSpec.get(ASSIGNMENT); + if (assign != null) { + value = assign.get(VALUE); + } + } + } + String type = (String)theSpec.get("type"); + if (value != null && type != null) { + value = getValueByType(value, type); + } + return value; + } + + private Object getValueByType(Object value, String type) { + Object returnValue = null; + try { + if ("map".equals(type) && !(value instanceof Map)) { + returnValue = new ObjectMapper().readValue(value.toString(), new TypeReference<Map>(){}); + } + else if ("list".equals(type) && !(value instanceof List)) { + returnValue = new ObjectMapper().readValue(value.toString(), new TypeReference<List>(){}); + } + else if ("integer".equals(type) && (value instanceof String)) { + returnValue = Integer.valueOf((String)value); + } + else if ("float".equals(type) && (value instanceof String)) { + returnValue = Double.valueOf((String)value); //double because that's how the yaml parser would encode it + } + } + catch (NumberFormatException nfx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Failed to process String representation {} of numeric data: {}", value, nfx); + } + catch (IOException iox) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Failed to process {} representation of a collection: {}", value.getClass().getName(), iox); + } + return returnValue; + } + + /* */ + private List nodeRequirements(JXPathContext theNodeContext) { + return + new ListBuilder() + .addAll( + StreamSupport.stream( + Spliterators.spliteratorUnknownSize((Iterator<Map>)theNodeContext.iterate("requirements"), 16), false) + .flatMap(m -> this.nodeRequirement(m, theNodeContext).stream()) + //nicer that the ListBuilder buy cannot handle the empty lists, i.e. it will generate empty requirement lists + // .collect(Collectors.toList()) + .toArray()) + .buildOpt(); + } + + /* + * @param theSpec the requirement entry that appears within the node specification + * @param theNodeContext .. Should I pass the root context instead of assuming that the nodes context has it as parent? + * @return a List as one requirement (definition) could end up being instantiated multiple times + */ + private List nodeRequirement(final Map theSpec, JXPathContext theNodeContext/*Iterator theTargets*/) { + + final ListBuilder value = new ListBuilder(); + + final Map target = (Map)theSpec.get("target"); + final Map capability = (Map)theSpec.get(CAPABILITY); + final Map relationship = (Map)theSpec.get(RELATIONSHIP); + + //this are actual assignments + for (Iterator i = theNodeContext.getParentContext().iterate("/relations[@n2='" + theNodeContext.getValue("nid") + "']/meta[@p2='" + theSpec.get(NAME) +"']"); i.hasNext(); ) { + + String targetNodeName = (String)((Map)i.next()).get("n1"); + + //make sure target exists + Map targetNode = (Map)theNodeContext.getParentContext().getValue("/nodes[@nid='" + targetNodeName + "']"); + if (null == targetNode) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Relation points to non-existing node {}", targetNodeName); + continue; //this risks of producing a partial template .. + } + + value.add(new MapBuilder().put(theSpec.get(NAME), new MapBuilder() + .putOpt("node", targetNode.get(NAME) + "_" + targetNode.get("nid")) + .putOpt(CAPABILITY, capability == null ? null : capability.get(NAME)) + .putOpt(RELATIONSHIP, relationship == null ? null : relationship.get("type")) + .build()).build()); + } + addTemporary(theSpec, theNodeContext, value, capability, relationship); + + if (value.isEmpty()) { + value.add(new MapBuilder().put(theSpec.get(NAME), new MapBuilder() + .putOpt("node", target == null ? null : target.get(NAME) + "_" + target.get("nid")) + .putOpt(CAPABILITY, capability == null ? null : capability.get(NAME)) + .putOpt(RELATIONSHIP, relationship == null ? null : relationship.get("type")) + .build()).build()); + } + + return value.build(); + } + + private void addTemporary(Map theSpec, JXPathContext theNodeContext, ListBuilder value, Map capability, Map relationship) { + //temporary + for (Iterator i = theNodeContext.getParentContext().iterate("/relations[@n1='" + theNodeContext.getValue("nid") + "']/meta[@p1='" + theSpec.get(NAME) +"']"); i.hasNext(); ) { + + String targetNodeName = (String)((Map)i.next()).get("n2"); + + Map targetNode = (Map)theNodeContext.getParentContext().getValue("/nodes[@nid='" + targetNodeName + "']"); + //make sure target exists + if (null == targetNode) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Relation points to non-existing node {}", targetNode); + continue; //this risks of producing a partial template .. + } + + value.add(new MapBuilder().put(theSpec.get(NAME), new MapBuilder() + .putOpt("node", targetNode.get(NAME) + "_" + targetNode.get("nid")) + .putOpt(CAPABILITY, capability == null ? null : capability.get(NAME)) + .putOpt(RELATIONSHIP, relationship == null ? null : relationship.get("type")) + .build()).build()); + } + //end temporary + } + + /* */ + private Map nodeCapabilities(JXPathContext theNodeContext) { + return + new MapBuilder() + .putAll( + StreamSupport.stream( + Spliterators.spliteratorUnknownSize((Iterator<Map>)theNodeContext.iterate("capabilities"), 16), false) + .map(m -> this.nodeCapability(m)) + .filter(c -> c != null) + ::iterator) + .buildOpt(); + } + + /** + * this handles a capability assignment which only includes properties and attributes so unless there + * are any properties/attributes assignments we might not generate anything + */ + private Map.Entry nodeCapability(final Map theSpec) { + List<Map> properties = (List<Map>) theSpec.get(PROPERTIES); + if (properties == null || properties.isEmpty()) { + return null; + } + + return new AbstractMap.SimpleEntry(theSpec.get(NAME), + new MapBuilder() + .put(PROPERTIES, + new MapBuilder().putAll(properties.stream() + .filter(p -> p.containsKey(ASSIGNMENT) || + p.containsKey(VALUE)) + .map(p -> new AbstractMap.SimpleEntry( + p.get(NAME), + p.containsKey(ASSIGNMENT) ? + ((Map) p.get(ASSIGNMENT)).get(VALUE) + : p.get(VALUE)) + ) + ::iterator) + .build()) + .build()); + } + + + /* */ + private Object nodeMetadata(JXPathContext theNodeContext) { + return + new MapBuilder() + .putAll( + this.metas + .stream() + .flatMap(m -> { + Object v = theNodeContext.getValue(m); + if (v == null) { + return Stream.empty(); + } + if (v instanceof Map) { + return ((Map) v).entrySet() + .stream() + .map(e -> new AbstractMap.SimpleEntry<String, Object> + (((Map.Entry) e).getKey().toString(), + ((Map.Entry) e).getValue().toString())); + } + return Stream.of(new AbstractMap.SimpleEntry<String,Object>(m, v.toString())); + }) + ::iterator) + .buildOpt(); + } + + + public static String toString(Object theVal) { + return new Yaml().dump(theVal); + } + + + public static void main(String[] theArgs) throws Exception { + debugLogger.log(LogLevel.DEBUG, Recycler.class.getName(), + Recycler.toString( + new Recycler().recycle(new java.io.FileReader(theArgs[0])))); + } +} diff --git a/dcaedt_catalog/db/pom.xml b/dcaedt_catalog/db/pom.xml new file mode 100644 index 0000000..8a0e1f9 --- /dev/null +++ b/dcaedt_catalog/db/pom.xml @@ -0,0 +1,149 @@ +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog</artifactId> + <version>1806.0.1-SNAPSHOT</version> + </parent> + <artifactId>DCAE-DT-Catalog-DB</artifactId> + <packaging>jar</packaging> + <name>DCAE DT Catalog database</name> + + <build> + <sourceDirectory>src/main/java</sourceDirectory> + <plugins> + <plugin> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.1</version> + <configuration> + <source>1.8</source> + <target>1.8</target> + <encoding>${project.build.sourceEncoding}</encoding> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-dependency-plugin</artifactId> + <version>2.10</version> + <executions> + <execution> + <id>copy-dependencies</id> + <phase>package</phase> + <goals> + <goal>copy-dependencies</goal> + </goals> + <configuration> + <outputDirectory>${project.build.directory}/deps</outputDirectory> + <overWriteReleases>false</overWriteReleases> + <overWriteSnapshots>false</overWriteSnapshots> + <overWriteIfNewer>true</overWriteIfNewer> + </configuration> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>buildnumber-maven-plugin</artifactId> + <version>1.4</version> + <executions> + <execution> + <phase>validate</phase> + <goals> + <goal>create</goal> + </goals> + </execution> + </executions> + <configuration> + <doCheck>false</doCheck> + <doUpdate>false</doUpdate> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>2.1</version> + <configuration> + <archive> + <manifest> + <addDefaultImplementationEntries>true</addDefaultImplementationEntries> + </manifest> + <manifestEntries> + <Implementation-Build>${buildNumber}</Implementation-Build> + </manifestEntries> + </archive> + </configuration> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-assembly-plugin</artifactId> + <version>2.6</version> + <configuration> + <descriptorRefs> + <descriptorRef>jar-with-dependencies</descriptorRef> + </descriptorRefs> + <archive> + <manifest> + <mainClass>org.onap.sdc.dcae.db.neo4j.Modeled</mainClass> + </manifest> + <manifestEntries> + <Implementation-Build>${buildNumber}</Implementation-Build> + </manifestEntries> + </archive> + </configuration> + <executions> + <execution> + <id>make-assembly</id> <!-- this is used for inheritance merges --> + <phase>package</phase> <!-- bind to the packaging phase --> + <goals> + <goal>single</goal> + </goals> + </execution> + </executions> + </plugin> + + </plugins> + </build> + <dependencies> + <dependency> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpasyncclient</artifactId> + <version>4.1</version> + </dependency> + <dependency> + <groupId>commons-io</groupId> + <artifactId>commons-io</artifactId> + <version>2.4</version> + </dependency> + <dependency> + <groupId>commons-cli</groupId> + <artifactId>commons-cli</artifactId> + <version>1.3</version> + </dependency> + <dependency> + <groupId>commons-jxpath</groupId> + <artifactId>commons-jxpath</artifactId> + <version>1.3</version> + </dependency> + <dependency> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + <version>17.0</version> + </dependency> + <dependency> + <groupId>org.yaml</groupId> + <artifactId>snakeyaml</artifactId> + <version>1.17</version> + </dependency> + <dependency> + <groupId>org.json</groupId> + <artifactId>json</artifactId> + <version>20160212</version> + </dependency> + <dependency> + <groupId>com.github.wnameless</groupId> + <artifactId>json-flattener</artifactId> + <version>0.2.2</version> + </dependency> + </dependencies> +</project> diff --git a/dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java b/dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java new file mode 100644 index 0000000..6b2f395 --- /dev/null +++ b/dcaedt_catalog/db/src/main/java/org/onap/sdc/dcae/db/neo4j/Modeled.java @@ -0,0 +1,1980 @@ +/* + * AT&T - PROPRIETARY + * THIS FILE CONTAINS PROPRIETARY INFORMATION OF + * AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN + * ACCORDANCE WITH APPLICABLE AGREEMENTS. + * + * Copyright (c) 2014 AT&T Knowledge Ventures + * Unpublished and Not for Publication + * All Rights Reserved + */ +package org.onap.sdc.dcae.db.neo4j; + +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.charset.Charset; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.List; +import java.util.LinkedList; +import java.util.Collections; + +import org.apache.commons.cli.BasicParser; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.OptionBuilder; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.io.IOUtils; +import org.apache.commons.codec.binary.Base64; + +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.jxpath.JXPathException; + +import org.apache.http.Header; +import org.apache.http.HttpHeaders; +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.HttpClientBuilder; +import org.json.JSONException; +import org.json.JSONObject; +import org.json.JSONArray; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.yaml.snakeyaml.Yaml; + +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; + +/* A few less obvious design choices: + * - representing properties across type hierarchies (same for requirements + * and capabilities, and will be for attributes and interfaces when we'll + * add them): we attach to each type only those properties it declares (such a + * declaration might be the re-definition of a property defined by a supertype). + * Calculating the set of properties for a type (i.e. the one it declares plus + * the ones it inherits, with respect to re-defintions) is a 2 step process: + * 1. run a query matching all properties acrosss the type's hierarchy, from + * leaf to root type (neo's job) + * 2. collecting them in a set that accumulates them with respect to + * re-definition (model catalog client library job) + * A (viable) alternative would have been to calculate the entire property set + * at model import time and associate them it the type node. It would simplify + * the query and processing in the catalog API. It has the drawback of making + * the reverse process (exporting a yaml model from neo) tedious. + * As we get a better sense of were the optimizations are needed this might + * be a change to be made .. + * + * + * - representing requirements and capability as nodes. At first glance + * both can be represented as edges pointing from a Type Node or Template Node + * to another Type Node or Template Node. While this is true for capabilities + * it is not so for requirements: a requirement could point to a capability + * of a Type Node, i.e. it is a hyperedge between a Type Node (or Tempate Node), * another Type Node (the target) and a capability of the target. As such, the + * requirements ands up being represented as a node and the capability will need + * to do the same in order to be able to be pointed at (and for the sake of + * uniformity ..). + * + * + */ +public class Modeled { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private static HttpClientBuilder httpClientBuilder = + HttpClientBuilder.create(); + private static String USAGE = "oil oil_stylesheet_path | bigdata | aws | awsdata input_file customer"; + + private static List<String> ignoreMissing = new LinkedList<String>(); + + static { + Collections.addAll(ignoreMissing, + "tosca.datatypes", + "tosca.capabilities", + "tosca.relationships", + "tosca.interfaces", + "tosca.nodes", + "tosca.artifacts", + "tosca.policies", + "tosca.groups"); + } + + public static void main(String[] theArgs) { + + CommandLineParser parser = new BasicParser(); + + // create the Options + Options options = new Options(); + options.addOption(OptionBuilder. + withArgName("target") + .withLongOpt("target") + .withDescription("target ice4j database uri") + .hasArg() + .isRequired() + .create('t')); + + options.addOption(OptionBuilder. + withArgName("action") + .withLongOpt("action") + .withDescription("one of import, annotate, list, remove") + .hasArg() + .isRequired() + .create('a')); + + options.addOption( + OptionBuilder.withArgName("input") + .withLongOpt("input") + .withDescription( + "for import/annotate: the tosca template file, " + + "for list: an optional json filter, " + + "for remove: the template id") + .hasArgs() + .create('i')).addOption( + OptionBuilder.withArgName("labels") + .withLongOpt("labels") + .withDescription( + "for annotate: the ':' sepatated list of annotation labels") + .hasArgs() + .create('l')); + + options.addOption(OptionBuilder. + withArgName("ignore") + .withLongOpt("ignore") + .isRequired(false) + .withDescription( + "for annotate: the ':' sepatated list of namespaces who's missing constructs can be ignored") + .hasArgs() + .create()); + + + CommandLine line; + try { + line = parser.parse(options, theArgs); + } catch (ParseException exp) { + errLogger.log(LogLevel.ERROR, Modeled.class.getName(), exp.getMessage()); + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp("import", options); + return; + } + + String ignores = line.getOptionValue("ignore"); + if (ignores != null) + Collections.addAll(ignoreMissing, ignores.split(":")); + + Modeled modeled = new Modeled(); + try { + modeled.setNeoUri(new URI(line.getOptionValue("target"))); + } catch (URISyntaxException urisx) { + errLogger.log(LogLevel.ERROR, Modeled.class.getName(), "Invalid target specification: {}", urisx); + return; + } + + try { + loadStorageSpec(); + + String action = line.getOptionValue("action"); + if ("import".equals(action)) { + modeled.importTemplate(line.getOptionValue("input")); + } else if ("annotate".equals(action)) { + modeled.annotateItem(line.getOptionValue("input"), line.getOptionValue("labels")); + } else if ("list".equals(action)) { + modeled.listTemplates(line.getOptionValue("input")); + } else if ("remove".equals(action)) { + modeled.removeTemplate(line.getOptionValue("input")); + } else { + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp("import", options); + } + } catch (Exception x) { + errLogger.log(LogLevel.ERROR, Modeled.class.getName(), x.getMessage()); + } + } + + private static Tracker<String> tracker = new Tracker<String>(); + private static Map toscaStorageSpec; + + private static void loadStorageSpec() { + toscaStorageSpec = (Map) new Yaml().load( + Modeled.class.getClassLoader().getResourceAsStream("tosca-schema.yaml")); + + Map storageSpec = (Map) new Yaml().load( + Modeled.class.getClassLoader().getResourceAsStream("tosca-storage-schema.yaml")); + + JXPathContext jxPath = JXPathContext.newContext(toscaStorageSpec); + for (Iterator<Map.Entry<String, Object>> ces = + storageSpec.entrySet().iterator(); + ces.hasNext(); ) { + Map.Entry<String, Object> ce = ces.next(); + try { + Map m = (Map) jxPath.getValue(ce.getKey()); + if (m == null) { + debugLogger.log(LogLevel.DEBUG, Modeled.class.getName(), "No schema entry '{}'", ce.getKey()); + continue; + } + + m.putAll((Map) ce.getValue()); + } catch (JXPathException jxpx) { + errLogger.log(LogLevel.WARN, Modeled.class.getName(), "Failed to apply storage info {}", jxpx); + } + } + } + + + private static JSONObject EMPTY_JSON_OBJECT = new JSONObject(); + + private URI neoUri = null; + + private Modeled() { + } + + private void setNeoUri(URI theUri) { + this.neoUri = theUri; + } + + public URI getNeoUri() { + return this.neoUri; + } + + /* Experimental in nature. I was reluctant creating another node to represent + * the set of constraints as they're integral part of the property (or other + * artifact) they're related to. I was also looking for a representation + * that would easily be processable into a TOSCA abstraction in the + * Catalog API. So ... we pack all the constraints as a JSON string and store + * them as a single property of the TOSCA artifact they belog to. + * Highs: easily un-winds in an object + * Lows: can't write query selectors based on constraints values .. + //the TOSCA/yaml spec exposes constraints as a List .. where each + //entry is a Map .. why?? + */ + private static String yamlEncodeConstraints(List theConstraints) { + Map allConstraints = new HashMap(); + for (Object c : theConstraints) { + allConstraints.putAll((Map) c); + //this would be the place to add dedicate processing of those + //constraints with 'special' values, i.e. in_range: dual scalar, + //valid_values: list + } + return JSONObject.valueToString(allConstraints); + } + + /* TODO: attributes handling to be added, similar to properties. + */ + private void yamlNodeProperties(String theNodeId, + Map<String, Object> theProperties, + NeoTransaction theTrx) + throws IOException { + + for (Map.Entry<String, Object> propertyEntry : theProperties.entrySet()) { + String propName = propertyEntry.getKey(); + Object propObject = propertyEntry.getValue(); + + Map propValues; + if (propObject instanceof Map) { + propValues = (Map) propObject; + } else { + //valuation, not of interest here + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNode, unknown property representation {} for {}, node {}", propObject.getClass(), propObject, theNodeId); + continue; + } + + String constraintsValue = null; + if (propValues.containsKey("constraints")) { + constraintsValue = yamlEncodeConstraints( + (List) propValues.get("constraints")); + } + + String neoPropId = neoCreateNode( + theTrx, false, + new JSONObject() + .put("name", propName) + .put("type", propValues.getOrDefault("type", "string")) + .put("required", propValues.getOrDefault("required", Boolean.TRUE)) + .putOpt("default", propValues.get("default")) + .putOpt("description", propValues.get("description")) + .putOpt("status", propValues.get("status")) + .putOpt("constraints", constraintsValue), + "TOSCA", "Property"); + + neoEdge(theTrx, false, + neoPropId, + theNodeId, + EMPTY_JSON_OBJECT, + "PROPERTY_OF"); + } + + } + + private void yamlNodeTypeCapabilities(String theNodeId, + Map<String, Object> theCapabilities, + NeoTransaction theTrx) + throws IOException { + + for (Map.Entry<String, Object> capability : theCapabilities.entrySet()) { + String capabilityName = capability.getKey(); + Object capabilityValue = capability.getValue(); + + String capabilityType = null, + capabilityDesc = null; + Map<String, Object> capabilitySpec = null; + + if (capabilityValue instanceof String) { + //short notation was used, we get the name of a capability type + capabilityType = (String) capabilityValue; + + capabilitySpec = Collections.singletonMap("type", capabilityType); + } else if (capabilityValue instanceof Map) { + //extended notation + capabilitySpec = (Map<String, Object>) capabilityValue; + + capabilityType = (String) capabilitySpec.get("type"); + //cannot be missing + if (capabilityType == null) { + //ERROR!! + errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoNode, missing capability type in {} for node {}", capabilitySpec, theNodeId); + continue; //rollback .. + } + capabilityDesc = (String) capabilitySpec.get("description"); + } + + // + String anonCapabilityTypeId = null; + if (capabilitySpec.containsKey("properties")) { + //we need an anonymous capability type (augmentation) + //or they could be added to the 'Capabillity' node but anonymous + //types make processing more uniform + anonCapabilityTypeId = + yamlAnonymousType(capabilitySpec, + capabilityType, +//not a very nice owner string as theNodeId is cryptic (we should use +//node name but do not have it here .. + theNodeId + "#" + capabilityName, + true, + false, + theTrx); + } + + JSONObject capabilityDef = new JSONObject() + .put("name", capabilityName) + .putOpt("description", capabilityDesc); + if (capabilitySpec != null) { + List occurrences = (List) capabilitySpec.get("occurrences"); + if (occurrences != null) { + capabilityDef.put("occurrences", encodeRange(occurrences)); + } + List valid_source_types = (List) capabilitySpec.get("valid_source_types"); + if (valid_source_types != null) { + capabilityDef.put("validSourceTypes", + new JSONArray(valid_source_types)); + } + } + + String capabilityId = neoCreateNode( + theTrx, false, + capabilityDef, + "TOSCA", "Capability"); + neoEdge(theTrx, false, + capabilityId, + theNodeId, + EMPTY_JSON_OBJECT, + "CAPABILITY_OF"); + + if (anonCapabilityTypeId != null) { + neoEdge(theTrx, false, + capabilityId, + anonCapabilityTypeId, + new JSONObject() + .put("name", capabilityName) + .putOpt("description", capabilityDesc), + "FEATURES"/* TARGETS */); + //no reason this one would point to a non-existing capability as we just created one + } else { + if (null == neoEdge(theTrx, false, + capabilityId, + "Type", + new JSONObject() + .put("name", capabilityType), + new JSONObject() + .put("name", capabilityName) + .putOpt("description", capabilityDesc), + "FEATURES"/* TARGETS */)) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeCapabilities, Node {}, capability {} (id: {}) seems to point to invalid capability type: {}", theNodeId, capabilityName, capabilityId, capabilityType); + ignoreMissing(capabilityType); + } + } + + } + + } + + private void yamlNodeTypeRequirements( + String theNodeTypeId, + List<Map<String, Object>> theRequirements, + NeoTransaction theTrx) + throws IOException { + + for (Map<String, Object> arequirement : theRequirements) { + //supposed to have only one entry + Map.Entry<String, Object> requirement = + arequirement.entrySet().iterator().next(); + + String requirementName = requirement.getKey(); + Object requirementValue = requirement.getValue(); + + String targetNode = null, + targetCapability = null, + targetRelationship = null; + Map<String, Object> requirementSpec = null; + + if (requirementValue instanceof String) { + //short form, points to a capability type + targetCapability = (String) requirementValue; + } else if (requirementValue instanceof Map) { + //extended notation + requirementSpec = (Map<String, Object>) requirementValue; + + targetCapability = (String) requirementSpec.get("capability"); + targetNode = (String) requirementSpec.get("node"); + //this assumes a short form for the relationship specification + //it can actually be a map (indicating the relationship type and the + //additional interface definitions). + targetRelationship = (String) requirementSpec.get("relationship"); + } + + if (targetCapability == null) { + throw new IOException(theNodeTypeId + "missing capability type"); + } + + JSONObject requirementDef = new JSONObject() + .put("name", requirementName); + if (requirementSpec != null) { + List occurrences = (List) requirementSpec.get("occurrences"); + if (occurrences != null) { + requirementDef.put("occurrences", encodeRange(occurrences)); + } + } + + String requirementId = neoCreateNode( + requirementDef, + "TOSCA", "Requirement"); + neoEdge(theTrx, false, + requirementId, + theNodeTypeId, + EMPTY_JSON_OBJECT, + "REQUIREMENT_OF"); + + //we're not verifying here that this a capability type .. just a type + if (null == neoEdge(theTrx, false, + requirementId, + "Type", + new JSONObject() + .put("name", targetCapability), + EMPTY_JSON_OBJECT, + "CAPABILITY")) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid capability type: {}", theNodeTypeId, requirementName, requirementId, targetCapability); + } + + if (targetNode != null) { + //points to a node type + if (null == neoEdge(theTrx, false, + requirementId, + "Type", + new JSONObject() + .put("name", targetNode), + EMPTY_JSON_OBJECT, + "REQUIRES")) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid capability type: {}", theNodeTypeId, requirementName, requirementId, targetCapability); + } + } + + if (targetRelationship != null) { + //points to a relationship type + if (null == neoEdge(theTrx, false, + requirementId, + "Type", + new JSONObject() + .put("name", targetRelationship), + EMPTY_JSON_OBJECT, + "RELATIONSHIP")) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid relationship type: {}", theNodeTypeId, requirementName, requirementId, targetRelationship); + } + } + } + } + + /* + * handles the requirement assignments + */ + private void toscaRequirementsAssignment( + String theNodeId, + List<Map<String, Object>> theRequirements, + NeoTransaction theTrx) + throws IOException { + + for (Map<String, Object> arequirement : theRequirements) { + //supposed to have only one entry + Map.Entry<String, Object> requirement = + arequirement.entrySet().iterator().next(); + + String requirementName = requirement.getKey(); + Object requirementValue = requirement.getValue(); + + String targetNode = null, + targetCapability = null, + targetRelationship = null; + //TODO: targetFilter + + Map<String, Object> requirementSpec = null; + + if (requirementValue instanceof String) { + //short notation was used, we get the name of a local node + targetNode = (String) requirementValue; + } else if (requirementValue instanceof Map) { + //extended notation + requirementSpec = (Map<String, Object>) requirementValue; + + targetNode = (String) requirementSpec.get("node"); + targetCapability = (String) requirementSpec.get("capability"); + targetRelationship = (String) requirementSpec.get("relationship"); + } + + /* TODO: add targetFilter definition in here (most likely place) + */ + String requirementId = neoCreateNode( + theTrx, false, + new JSONObject() + .put("name", requirementName), + "TOSCA", "Requirement"); + + neoEdge(theTrx, false, + requirementId, + theNodeId, + EMPTY_JSON_OBJECT, + "REQUIREMENT_OF"); + + String targetNodeTemplate = null; + if (targetNode != null) { + //check if the target is a node within the template (in which case the + //requirement is really defined by that node type. i.e. its type's + //capabilities + targetNodeTemplate = tracker.lookupTemplate("Node", targetNode); + if (targetNodeTemplate != null) { + neoEdge(theTrx, false, + requirementId, + targetNodeTemplate, + new JSONObject() + .put("name", requirementName), + "REQUIRES" /* TARGETS */); + } else { + //if not a local node template then it must be node type + if (null == neoEdge(theTrx, false, + requirementId, + "Type", + new JSONObject() + .put("name", targetNode), + EMPTY_JSON_OBJECT, + "REQUIRES")) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlNodeTypeRequirements, Node {}, requirement {} (id: {}) seems to point to invalid node type: {}", theNodeId, requirementName, requirementId, targetNode); + } + } + } + + if (targetCapability != null) { + /* + * Can point to a capability of the targetNode (template or type, + * whatever was specified) or to a capability type; + */ + if (targetNode != null) { + String stmt = null; + if (targetNodeTemplate != null) { + //a capability of a local node template + //TODO: could be a capability type of a local node (and is up to the + //orchestrator to pick) given that the target node has at least one //capability of that type + stmt = + "MATCH (c:Capability)-[:CAPABILITY_OF]->(n:Node), (r:Requirement) " + + "WHERE id(n)=" + targetNodeTemplate + " " + + "AND c.name = \"" + targetCapability + "\" " + + "AND id(r)=" + requirementId + " " + + "MERGE (r)-[rq:REQUIRES_CAPABILITY]->(c) " + + "RETURN id(rq)"; + } else { + //a capability of the node type + stmt = + "MATCH (c:Type:Capability)-[:CAPABILITY_OF]->(t:Type), (r:Requirement) " + + "WHERE t.name = \"" + targetNode + "\" " + + "AND c.name = \"" + targetCapability + "\" " + + "AND id(r)=" + requirementId + " " + + "MERGE (r)-[rq:REQUIRES_CAPABILITY]->(c) " + + "RETURN id(rq)"; + } + if (null == neoId(theTrx + .statement( + new JSONObject() + .put("statement", stmt)) + .execute() + .result())) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaRequirementsAssignment, Node {}, requirement {} (id: {}) seems to point to invalid node capability: {}", theNodeId, requirementName, requirementId, targetCapability); + } + } else { + if (null == neoEdge(theTrx, false, + requirementId, + "Type", + new JSONObject() + .put("name", targetCapability), + EMPTY_JSON_OBJECT, + "REQUIRES_CAPABILITY")) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaRequirementsAssignment, Node {}, requirement {} (id: {}) seems to point to invalid capability type: {}", theNodeId, requirementName, requirementId, targetCapability); + } + } + } + + if (targetRelationship != null) { + if (null == neoEdge(theTrx, false, + requirementId, + "Type", + new JSONObject() + .put("name", targetRelationship), + EMPTY_JSON_OBJECT, + "RELATIONSHIP")) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaRequirementsAssignment, Node {}, requirement {} (id: {}) seems to point to invalid relationship type: {}", theNodeId, requirementName, requirementId, targetRelationship); + } + } else { + //TODO: does the presence of properties/attributes/interfaces in the + //requirement definition trigger the defintion of an anonymous + //relationship type?? (maybe derived from the one under the + //'relationship_type' key, if present?) + } + } + } + + /* an anonymous type is created from a node specification (type,template) + */ + private String yamlAnonymousType(Map<String, Object> theInfo, + String theType, + String theOwner, + boolean doProperties, + boolean doCapabilities, + NeoTransaction theTrx) + throws IOException { + + //is this naming scheme capable enough??NO! + String anonTypeId = theOwner + "#" + (theType == null ? "" : theType); + + String neoAnonTypeId = neoMergeNode( + theTrx, false, + new JSONObject() + .put("name", anonTypeId) + .put("id", anonTypeId), + "TOSCA", "Type"); + + if (theType != null) { + neoEdge(theTrx, false, + neoAnonTypeId, + "Type", + new JSONObject() + .put("name", theType), + EMPTY_JSON_OBJECT, + "DERIVED_FROM"); + } + + //shoudl the properties spec be passed explcitly?? + if (doProperties) { + Map<String, Object> props = (Map<String, Object>) theInfo.get("properties"); + if (props != null) { + yamlNodeProperties(neoAnonTypeId, props, theTrx); + } + } + + return neoAnonTypeId; + } + + /* + * A first pass over a type spec provisions each type individually + * and its properties. + * We process here types for all constructs: data, capability, relationship, + * node, [interface, artifact] + */ + private void toscaTypeSpec(String theConstruct, + Map<String, Map> theTypes, + NeoTransaction theTrx) + throws IOException { + //first pass, provision each type individually (and their properties) + String rule = "_" + theConstruct.toLowerCase() + "_type_definition"; + Map storageSpec = (Map) toscaStorageSpec.get(rule); + + for (Map.Entry<String, Map> toscaType : theTypes.entrySet()) { + String typeName = toscaType.getKey(); + Map<String, Map> typeValue = (Map<String, Map>) toscaType.getValue(); + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Type: {}", typeName); + + JSONObject data = pack(storageSpec, typeValue) + .put("name", typeName) + .put("id", typeName); + + String neoTypeId = neoMergeNode(theTrx, false, data, "TOSCA", "Type", theConstruct); + + tracker.trackType(theConstruct, typeName, neoTypeId); + + Map<String, Object> toscaTypeProps = (Map<String, Object>) typeValue.get("properties"); + if (toscaTypeProps != null) { + yamlNodeProperties(neoTypeId, toscaTypeProps, theTrx); + } //type props + } //types + + toscaTypePostProc(theConstruct, theTypes, theTrx); + } + + /* + * A second pass to process the derived_from relationship and + * the capabilities (now that the capabilities types have been provisioned) + */ + private void toscaTypePostProc(String theConstruct, + Map<String, Map> theTypes, + NeoTransaction theTrx) + throws IOException { + for (Map.Entry<String, Map> typeEntry : theTypes.entrySet()) { + Map typeValue = typeEntry.getValue(); + String typeName = typeEntry.getKey(); + + //supertype and description: all types + String superTypeName = (String) typeValue.get("derived_from"); + if (superTypeName != null) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}-DERIVED_FROM->{}", typeName, superTypeName); + + if (tracker.tracksType(theConstruct, superTypeName)) { + if (null == neoEdge(theTrx, false, + tracker.lookupType(theConstruct, typeName), + tracker.lookupType(theConstruct, superTypeName), + EMPTY_JSON_OBJECT, + "DERIVED_FROM")) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, missing parent type {}, id {} for type {}, id {}", superTypeName, tracker.lookupType(theConstruct, superTypeName), typeName, tracker.lookupType(theConstruct, typeName)); + } + } else { + if (null == neoEdge(theTrx, false, + tracker.lookupType(theConstruct, typeName), + "Type", + new JSONObject() + .put("name", superTypeName), + new JSONObject(), + "DERIVED_FROM")) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, missing parent type {} for type {}", superTypeName, typeName); + } + } + } + + //requirements/capabilities: for node types + Map<String, Object> capabilities = + (Map<String, Object>) typeValue.get("capabilities"); + if (capabilities != null) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Processing: {}", capabilities); + yamlNodeTypeCapabilities( + tracker.lookupType(theConstruct, typeName), capabilities, theTrx); + } + + List<Map<String, Object>> requirements = + (List<Map<String, Object>>) typeValue.get("requirements"); + if (requirements != null) { + yamlNodeTypeRequirements( + tracker.lookupType(theConstruct, typeName), requirements, theTrx); + } + + //interfaces: for node types or relationship types + Object interfaces = typeValue.get("interfaces"); + if (interfaces != null) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, Type {}: interfaces section declared but not handled", typeName); + if (interfaces instanceof List) { + //expect a list of interface types + } + } + + //valid targets: for relationship types + List valid_targets = (List) typeValue.get("valid_targets"); + if (valid_targets != null) { + //add as a property to the type node, can be used for validation + //whereever this type is used + //the list should contain node type names and we should check that we + //have those types + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, Type {}: valid_targets section declared but not handled", typeName); + + } + + List artifacts = (List) typeValue.get("artifacts"); + if (artifacts != null) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlTypePostProc, Type {}: artifacts section declared but not handled", typeName); + } + + /* Artifact types can have "mime_type" and "file_ext" sections + */ + } + } + + private void toscaTemplate(String theTopologyTemplateId, + String theConstruct, + Map<String, Object> theTemplates, + NeoTransaction theTrx) + throws IOException { + + String rule = "_" + theConstruct.toLowerCase() + "_template_definition"; + Map storageSpec = (Map) toscaStorageSpec.get(rule); + if (storageSpec == null) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No rule '{}', can't make up the storage specification for {}", rule, theConstruct); + } + + for (Map.Entry<String, Object> template : theTemplates.entrySet()) { + + String templateName = template.getKey(); + Map<String, Object> templateSpec = (Map<String, Object>) template.getValue(); + + String templateType = (String) templateSpec.get("type"); + if (templateType == null) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoNode, template {}'{}', does not have a type specification .. skipping", theConstruct, templateName); + continue; + } + + try { + //we use create here as node names are not unique across templates + JSONObject neoTemplateNode = + pack(storageSpec, templateSpec) + .put("name", templateName); + + String templateNodeId = neoCreateNode( + theTrx, false, neoTemplateNode, "TOSCA", theConstruct); + + tracker.trackTemplate(theConstruct, templateName, templateNodeId); + + neoEdge(theTrx, false, + templateNodeId, + theTopologyTemplateId, + new JSONObject(), + theConstruct.toUpperCase() + "_OF"); + + if (null == neoEdge(theTrx, false, + templateNodeId, + "Type", + new JSONObject() + .put("name", templateType), + new JSONObject(), + "OF_TYPE")) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlSpec, Template {}, {} {}: failed to identify type {}", theTopologyTemplateId, theConstruct, templateName, templateType); + } + + //facets + + //we handle properties for all constructs (as they all have them) + Map<String, Object> templateProps = + (Map<String, Object>) templateSpec.get("properties"); + if (templateProps != null) { + for (Map.Entry<String, Object> templateProp : + templateProps.entrySet()) { + String templatePropName = templateProp.getKey(); + Object templatePropObject = templateProp.getValue(); + + final Map templatePropValues; + if (templatePropObject instanceof Map) { + templatePropValues = (Map) templatePropObject; + } else { + + //this is dealing with short form, if we ran the first 2 stages of the checker //we'd always be working on a canonical form .. + // + templatePropValues = new HashMap(); + templatePropValues.put("value", templatePropObject); + } + + //a node will contain the means for property valuation: + //straight value or a call to get_input/get_property/get_attribute + + //find the property node (in the type) this valuation belongs to + if (templatePropValues != null) { + + String propertyId = + neoId( + theTrx.statement( + new JSONObject() + .put("statement", + "MATCH (t:Type)-[:DERIVED_FROM*0..5]->(:Type)<-[:PROPERTY_OF]-(p:Property) " + + "WHERE t.name='" + templateType + "' " + + "AND p.name='" + templatePropName + "' " + + "RETURN id(p)")) + .execute() + .result() + ); + + if (propertyId == null) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "yamlSpec, Template {}, {} template {}, property {} does not match the node type spec, skipping property", templateName, theConstruct, templateName, templatePropName); + continue; + } + + //remove valuation by function: for now handle only get_input + String propInput = (String) templatePropValues.remove("get_input"); + + List constraints = (List) templatePropValues.remove("constraints"); + if (constraints != null) { + //flattening + templatePropValues.put("constraints", + yamlEncodeConstraints(constraints)); + } + + Object val = templatePropValues.remove("value"); + //check if the value is a collection or user defined data type, the cheap way + if (val instanceof List || + val instanceof Map) { + /* An interesting option here: + * 1. store the whole flatten value under the 'value' property + templatePropValues.put("value", JsonFlattener.flatten(JsonObject.valueToString(val))); + Simpler but almost impossible to write queries based on property value + * 2. store each entry in the flatten map as a separate property (we prefix it with 'value' for + * clarity). + * see below + */ + /* + JsonFlattener.flattenAsMap(JSONObject.valueToString(Collections.singletonMap("value",val))) + .entrySet() + .stream() + .forEach(e -> templatePropValues.put(e.getKey(), e.getValue())); + */ + //simply stores a collection in its (json) string representation. Cannot be used if + //queries are necessary based on the value (on one of its elements). + templatePropValues.put("value", JSONObject.valueToString(val)); + } else { + /* scalar, store as such */ + templatePropValues.put("value", val); + } + + String templatePropValueId = + neoCreateNode( + theTrx, false, + new JSONObject(templatePropValues), + "TOSCA", /*"Property",*/ "Assignment"); + + neoEdge(theTrx, false, + templatePropValueId, + templateNodeId, + new JSONObject(), + "OF_TEMPLATE"); + + neoEdge(theTrx, false, + templatePropValueId, + propertyId, + new JSONObject(), + "OF_" + theConstruct.toUpperCase() + "_PROPERTY"); + + if (propInput != null) { + String inputId = tracker.lookupTemplate("Input", propInput); + if (inputId == null) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoNode, Template {},node {}, property {} input {} not found", theTopologyTemplateId, templateName, templatePropName, propInput); + } + + neoEdge(theTrx, false, + templatePropValueId, + inputId, + new JSONObject(), + "GET_INPUT"); + } + } + } + } + tracker.trackTemplate(theConstruct, templateName, templateNodeId); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} template {} of type {}", theConstruct, templateName, templateType); + } catch (IOException iox) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "toscaTemplate, Failed to persist template {}", iox); + throw iox; + } + } + } + + /* while we persist basic type values inline (in the assigment node) we store complex values + * in a graph of their own. + * We handle the neo4j 'limitation' stated below + * Neo4j can only store collections (map, list) of basic types. + * + * User defined data types can created undefinitely nested strctures of collections. + * We could store collections of basic types inline but it would make for a less uniform structure. + */ + private void toscaPropertyAssignment( + String theAssignmentId, + Object theValue, + NeoTransaction theTrx) + throws IOException { + //look the grammar rules to see if we inline (stringify) or not + + if (theValue instanceof Map) { + //a map type property or a user-defined datatype + Map<String, Object> elements = (Map<String, Object>) theValue; + for (Map.Entry element : elements.entrySet()) { + + String elementId = neoCreateNode(theTrx, false, + new JSONObject(). + put("name", element.getKey()), + "TOSCA", "Data", "Element"); + + neoEdge(theTrx, false, + elementId, + theAssignmentId, + EMPTY_JSON_OBJECT, + "ELEMENT_OF"); + + toscaPropertyAssignment(elementId, element.getValue(), theTrx); + } + } else if (theValue instanceof List) { + //a list type property + for (int i = 0; i < ((List) theValue).size(); i++) { + + String elementId = neoCreateNode(theTrx, false, + new JSONObject(). + put("pos", i), + "TOSCA", "Data", "Element"); + + neoEdge(theTrx, false, + elementId, + theAssignmentId, + EMPTY_JSON_OBJECT, + "ELEMENT_OF"); + + toscaPropertyAssignment(elementId, ((List) theValue).get(i), theTrx); + } + + //update theAssignment with a length property + neoNodeProperties(theTrx, false, theAssignmentId, + new JSONObject(). + put("length", ((List) theValue).size())); + } else { + //update the assignment with a 'value' attribute + neoNodeProperties(theTrx, false, theAssignmentId, + new JSONObject(). + put("value", theValue)); + } + } + + /* + * We only handle properties for now so we assume these are properties + * assignments + */ + private void toscaCapabilityAssignment( + String theNodeTemplateId, + String theCapabilityName, + Map<String, Object> theValuations, + NeoTransaction theTrx) + throws IOException { + + for (Map.Entry<String, Object> valuation : theValuations.entrySet()) { + String propertyName = valuation.getKey(); + Object propertyValueSpec = valuation.getValue(); + + Map propertyValue = null; + if (propertyValueSpec instanceof Map) { + propertyValue = (Map) propertyValueSpec; + } else { + //this is dealing with short form, if we ran the first 2 stages of + //the checker we'd always be working on a canonical form .. + propertyValue = new HashMap(); + propertyValue.put("value", propertyValueSpec); + } + + //we need to link the assignment to the node template, the capability + //and the property of the capability type (a node can have multiple + //capabilities of the same type). + String[] ids = + neoIds( + theTrx.statement( + new JSONObject() + .put("statement", + "MATCH (n:Node)-[:OF_TYPE]->(:Node:Type)<-[:CAPABILITY_OF]-(c:Capability)-[:FEATURES]->(:Capability:Type)-[:DERIVED_FROM*0..5]->(:Capability:Type)<-[:PROPERTY_OF]-(p:Property) " + + "WHERE id(n) = " + theNodeTemplateId + " " + + "AND c.name = '" + theCapabilityName + "' " + + "AND p.name = '" + propertyName + "' " + + "RETURN id(p), id(c)")) + .execute() + .result()); + + if (ids == null) { + throw new IOException("toscaCapabilityAssignment: " + + "node template " + theNodeTemplateId + ", " + + "capability " + theCapabilityName + ", " + + "property " + propertyName + + " does not match the node type spec"); + } + + /* this node represents the assignment of a value to a capability property + * hence my doubts about hoe to label it ['Assignment', 'Property'] or ['Assignment','Capability'] + * I am inclined towards the second option as there is no other capability assignment in itself. + */ + String assignmentId = + neoCreateNode( + theTrx, false, + new JSONObject(propertyValue), + "TOSCA", /*Capability,*/"Assignment"); + + neoEdge(theTrx, false, + assignmentId, + theNodeTemplateId, + new JSONObject(), + "OF_TEMPLATE"); + + neoEdge(theTrx, false, + assignmentId, + ids[1], + new JSONObject(), + "OF_CAPABILITY"); + + neoEdge(theTrx, false, + assignmentId, + ids[0], + new JSONObject(), + "OF_CAPABILITY_PROPERTY"); + } + } + + /* + * + * */ + private void importTemplate(String thePath) throws IOException { + try (FileInputStream input = new FileInputStream(thePath)){ + for (Object yaml : new Yaml().loadAll(input)) { + toscaSpec((Map) yaml); + } + } + } + + private void toscaSpec(Map theSpec) throws IOException { + + // type specifications + // at this time we do not record the relation between a type and the + // template it was defined in. + + NeoTransaction trx = new NeoTransaction(this.neoUri); + try { + { + Map<String, Map> types = (Map<String, Map>) theSpec.get("data_types"); + if (types != null) { + toscaTypeSpec("Data", types, trx); + } + + types = (Map<String, Map>) theSpec.get("capability_types"); + if (types != null) { + toscaTypeSpec("Capability", types, trx); + } + + types = (Map<String, Map>) theSpec.get("relationship_types"); + if (types != null) { + toscaTypeSpec("Relationship", types, trx); + } + + types = (Map<String, Map>) theSpec.get("node_types"); + if (types != null) { + toscaTypeSpec("Node", types, trx); + } + + types = (Map<String, Map>) theSpec.get("policy_types"); + if (types != null) { + toscaTypeSpec("Policy", types, trx); + } + } + + Map<String, Map> topologyTemplate = (Map<String, Map>) + theSpec.get("topology_template"); + if (topologyTemplate != null) { + + Map<String, Object> metadata = (Map<String, Object>) theSpec.get("metadata"); + if (metadata == null) { + throw new IOException("Missing metadata, cannot register template"); + } + String templateName = (String) metadata.get("template_name"); + String templateId = neoMergeNode( + trx, false, + new JSONObject() + .put("name", templateName) + .putOpt("description", (String) theSpec.get("description")) + .putOpt("version", (String) metadata.get("template_version")) + .putOpt("author", (String) metadata.get("template_author")) + .putOpt("scope", (String) metadata.get("scope")), + "TOSCA", "Template"); + + /* inputs */ + Map<String, Map> toscaInputs = (Map) topologyTemplate.get("inputs"); + if (toscaInputs != null) { + for (Map.Entry<String, Map> toscaInput : toscaInputs.entrySet()) { + //we use create here as input names are not unique across templates + //also, constraints require special encoding + Map toscaInputSpec = toscaInput.getValue(); + + List constraints = (List) toscaInputSpec.remove("constraints"); + if (constraints != null) { + //flattening + toscaInputSpec.put("constraints", + yamlEncodeConstraints(constraints)); + } + String neoInputNodeId = + neoCreateNode( + trx, false, + new JSONObject(toscaInputSpec) + .put("name", toscaInput.getKey()) + .putOpt("type", toscaInputSpec.get("type")), + "TOSCA", "Input"); + + tracker.trackTemplate( + "Input", (String) toscaInput.getKey(), neoInputNodeId); + + neoEdge(trx, false, + neoInputNodeId, + templateId, + new JSONObject(), + "INPUT_OF"); + } + } + + /* + * The main issue that I have here is with the defintion given to each + * section (properties, capabilities, requirements ..) of a Node template: + * they are said to 'augment' the information provided in its Node Type but + * without specifying the semantics of 'augment'. Can new properties be + * added? can interface specification contain new operations? + */ + Map<String, Object> toscaNodes = (Map) topologyTemplate.get("node_templates"); + if (toscaNodes != null) { + toscaTemplate(templateId, "Node", toscaNodes, trx); + + //now that all nodes are in we need a second path over the nodes set in + //order to handle the capabilities, requirements .. + + for (Map.Entry<String, Object> toscaNode : toscaNodes.entrySet()) { + + String toscaNodeName = toscaNode.getKey(); + Map<String, Object> toscaNodeValues = (Map<String, Object>) toscaNode.getValue(); + + Map<String, Map> capabilities = + (Map<String, Map>) toscaNodeValues.get("capabilities"); + if (capabilities != null) { + for (Map.Entry<String, Map> capability : capabilities.entrySet()) { + Map<String, Map> assignments = (Map<String, Map>) capability.getValue(); + Map<String, Object> propertiesAssignments = + assignments.get("properties"); + if (propertiesAssignments != null) { + toscaCapabilityAssignment( + tracker.lookupTemplate("Node", toscaNodeName), + capability.getKey(), + propertiesAssignments, + trx); + } + } + } + + List<Map<String, Object>> requirements = (List<Map<String, Object>>) + toscaNodeValues.get("requirements"); + if (requirements != null) { + toscaRequirementsAssignment( + tracker.lookupTemplate("Node", toscaNodeName), requirements, trx); + } + + //interfaces + } + } + + List toscaPolicies = (List) topologyTemplate.get("policies"); + if (toscaPolicies != null) { + for (Object toscaPolicy : toscaPolicies) { + toscaTemplate(templateId, "Policy", (Map<String, Object>) toscaPolicy, trx); + } + } + + Map<String, Map> toscaOutputs = (Map) topologyTemplate.get("outputs"); + if (toscaOutputs != null) { + for (Map.Entry<String, Map> toscaOutput : toscaOutputs.entrySet()) { + Object outputValue = toscaOutput.getValue().get("value"); + if (outputValue instanceof Map) { //shouldn't I be doing this in all cases?? + outputValue = JSONObject.valueToString((Map) outputValue); + } + + String neoOutputNodeId = neoCreateNode( + trx, false, + new JSONObject() + .put("name", (String) toscaOutput.getKey()) + .putOpt("description", (String) toscaOutput.getValue().get("description")) + .put("value", outputValue.toString()), + "TOSCA", "Output"); + + neoEdge(trx, false, + neoOutputNodeId, + templateId, + new JSONObject(), + "OUTPUT_OF"); + } + } + + //if this is a service template look for its type mapping specification + Map<String, Object> substitutionSpec = + (Map<String, Object>) theSpec.get("substitution_mappings"); + if (substitutionSpec != null) { + + String nodeType = (String) substitutionSpec.get("node_type"); + if (nodeType != null) { + neoEdge(trx, false, + templateId, + "Type", + new JSONObject() + .put("name", nodeType), + new JSONObject(), + "SUBSTITUTES"); + } else { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoProc, Template {} substitution_mapping is missing a node_type in spec: {}", templateName, substitutionSpec); + } + + //process the rest of the mapping definition + } else { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "neoProc, Template {} does not have a substitution mapping", templateName); + } + + //try to connect template to catalog item if information was provided + // + String catalogItemSelector = (String) metadata.get("asc_catalog"); + if (catalogItemSelector != null) { + if (null == neoEdge(trx, false, + templateId, + "CatalogItem", + new JSONObject(catalogItemSelector), + new JSONObject(), + "MODEL_OF")) { + throw new IOException("No such catalog item: " + catalogItemSelector); + } + } + } + trx.commit(); + } catch (IOException iox) { + try { + trx.rollback(); + } catch (IOException riox) { + errLogger.log(LogLevel.ERROR, Modeled.class.getName(), riox.getMessage()); + } + throw iox; + } + } + + private void annotateItem(String thePath, String theLabels) throws IOException { + + if (theLabels == null) { + throw new IOException("Labels ??"); + } + + try (FileInputStream input = new FileInputStream(thePath)){ + for (Object yaml : new Yaml().loadAll(input)) { + annotateItem((Map) yaml, theLabels); + } + } + } + + private void annotateItem(Map theSpec, String theLabels) throws IOException { + + Map<String, Object> metadata = (Map<String, Object>) theSpec.get("metadata"); + if (metadata == null) { + throw new IOException("Missing metadata, cannot register template"); + } + + String catalogItemSelector = (String) metadata.remove("asc_catalog"); + if (catalogItemSelector == null) { + throw new IOException("Missing item selector"); + } + + JSONObject annotation = new JSONObject(); + for (Map.Entry<String, Object> e : metadata.entrySet()) { + String key = e.getKey(); + if (key.startsWith("asc_")) { + annotation.put(key.substring(4, key.length()), e.getValue()); + } + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "annotation: " + annotation); + + NeoTransaction trx = new NeoTransaction(this.neoUri); + try { + String id = neoCreateNode(trx, false, annotation, ("Annotation:" + theLabels).split(":")); + if (id == null) { + throw new IOException("No such catalog item: " + catalogItemSelector); + } + + id = neoEdge(trx, false, + id, + "CatalogItem", + new JSONObject(catalogItemSelector), + new JSONObject(), + "ANNOTATION_OF"); + if (id == null) { + throw new IOException("No such catalog item: " + catalogItemSelector); + } + + trx.commit(); + } catch (IOException iox) { + try { + trx.rollback(); + } catch (IOException riox) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), riox.getMessage()); + } + throw iox; + } + } + + private void listTemplates(String theSelector) throws IOException { + + JSONObject selector = null; + + if (theSelector != null) { + selector = new JSONObject(theSelector); + } + + NeoTransaction trx = new NeoTransaction(this.neoUri); + + JSONObject res = trx.statement(new JSONObject() + .put("statement", + "MATCH (t:TOSCA:Template" + + (selector != null ? neoLiteralMap(selector) : "") + ") RETURN t, id(t)") + .put("parameters", + new JSONObject() + .put("props", selector != null ? selector : new JSONObject()))) + .commit() + .result(); + + JSONArray data = res + .getJSONArray("results") + .getJSONObject(0) + .getJSONArray("data"); + if (data.length() == 0) { + return; + } + + for (int i = 0; i < data.length(); i++) { + JSONArray row = data.getJSONObject(i) + .getJSONArray("row"); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}: {}", row.getInt(1), row.getJSONObject(0)); + } + } + + + private void removeTemplate(String theId) throws IOException { + + //find the nodes to delete and then use 'detach delete' + + NeoTransaction trx = new NeoTransaction(this.neoUri); + + try { + //Template elements are never more then three hops away and point towards the template + JSONObject res = trx.statement(new JSONObject() + .put("statement", + "MATCH (t:TOSCA:Template)<-[*0..3]-(x) " + + "WHERE id(t)=" + theId + " RETURN {labels:labels(x),id:id(x)} as tgt")) + .execute() + .result(); + + JSONArray data = res + .getJSONArray("results") + .getJSONObject(0) + .getJSONArray("data"); + if (data.length() == 0) { + return; + } + + for (int i = data.length() - 1; i >= 0; i--) { + JSONArray row = data.getJSONObject(i) + .getJSONArray("row"); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "> {}", row.getJSONObject(0)); + + //double check + + + res = trx.statement(new JSONObject() + .put("statement", + "MATCH (n) " + + "WHERE id(n)=" + row.getJSONObject(0).getInt("id") + " " + + "DETACH DELETE n")) + .execute() + .result(); + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "> {}", res); + } + + trx.commit(); + } catch (IOException iox) { + try { + trx.rollback(); + } catch (IOException riox) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Rollback failed: {}", riox); + } + throw iox; + } + } + + /* + */ + private static void ignoreMissing(String theTarget) throws IOException { + + for (String prefix : ignoreMissing) { + //make sure they are only one name element away + if ((theTarget.startsWith(prefix)) && (theTarget.substring(prefix.length()).lastIndexOf('.') == 0)) { + return; + } + } + + throw new IOException("Not configured to ignore missing " + theTarget); + } + + private static JSONArray encodeRange(List theRange) throws IOException { + JSONArray range = new JSONArray(); + for (Object value : theRange) { + if (value instanceof Number) { + range.put(((Number) value).intValue()); + } else if (value instanceof String && + "UNBOUNDED".equals(value)) { + range.put(Integer.MAX_VALUE); + } else { + throw new IOException("Unexpected value in range definition: " + value); + } + } + return range; + } + + private static String neoLiteralMap(JSONObject theProps) { + return neoLiteralMap(theProps, "props"); + } + + private static String neoLiteralMap(JSONObject theProps, String theArg) { + if (theProps.length() == 0) { + return ""; + } + StringBuilder sb = new StringBuilder(""); + for (Iterator i = theProps.keys(); i.hasNext(); ) { + String key = (String) i.next(); + sb.append("`") + .append(key) + .append("`: {") + .append(theArg) + .append("}.`") + .append(key) + .append("`,"); + } + return "{ " + sb.substring(0, sb.length() - 1) + " }"; + } + + private static String neoLabelsString(int theStartPos, String... theLabels) { + StringBuffer lbls = new StringBuffer(""); + for (int i = theStartPos; i < theLabels.length; i++) { + lbls.append(":") + .append(theLabels[i]); + } + return lbls.toString(); + } + + private String neoCreateNode( + JSONObject theProperties, + String... theLabels) throws IOException { + return neoNode("CREATE", theProperties, theLabels); + } + + /* executes the (up to 2) statements required to construct a node + in a dedicated transaction */ + private String neoNode( + String theVerb, + JSONObject theProperties, + String... theLabels) throws IOException { + NeoTransaction trx = new NeoTransaction(this.neoUri); + try { + return neoNode(trx, true, + theVerb, theProperties, theLabels); + } catch (IOException iox) { + try { + trx.rollback(); + } catch (IOException ioxx) { + errLogger.log(LogLevel.ERROR, Modeled.class.getName(), ioxx.getMessage()); + } + throw iox; + } + } + + private String neoCreateNode( + NeoTransaction theTransaction, + boolean doCommit, + JSONObject theProperties, + String... theLabels) throws IOException { + return neoNode(theTransaction, doCommit, "CREATE", theProperties, theLabels); + } + + private String neoMergeNode( + NeoTransaction theTransaction, + boolean doCommit, + JSONObject theProperties, + String... theLabels) throws IOException { + return neoNode(theTransaction, doCommit, "MERGE", theProperties, theLabels); + } + + /* execute the statements required to construct a node as part of the + given transaction + + */ + private String neoNode( + NeoTransaction theTransaction, + boolean doCommit, + String theVerb, + JSONObject theProperties, + String... theLabels) throws IOException { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNode {}", new Object[]{theProperties, theLabels}); + + JSONObject node; + String nodeId; + + node = theTransaction + .statement( + new JSONObject() + .put("statement", + theVerb + " (n:" + theLabels[0] + neoLiteralMap(theProperties) + " ) RETURN id(n)") + .put("parameters", + new JSONObject() + .put("props", theProperties))) + .execute() + .result(); + + + nodeId = neoId(node); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNode, node: {}", nodeId); + + if (theLabels.length > 1) { + theTransaction.statement( + new JSONObject() + .put("statement", + "START n=node(" + nodeId + ") SET n " + neoLabelsString(1, theLabels))); + } + theTransaction.execute(doCommit); + + return nodeId; + } + + private void neoNodeProperties( + NeoTransaction theTransaction, + boolean doCommit, + String theId, + JSONObject theProperties) throws IOException { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoNodeProperties {}", new Object[]{theId, theProperties}); + theTransaction + .statement( + new JSONObject() + .put("statement", + "START n=node(" + theId + ") SET n+= " + + neoLiteralMap(theProperties) + " RETURN id(n)") + .put("parameters", + new JSONObject() + .put("props", theProperties))) + .execute(doCommit); + } + + private String neoEdge( + NeoTransaction theTransaction, + boolean doCommit, + String theFrom, String theTo, + JSONObject theProperties, + String... theLabels) throws IOException { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoEdge: {}", new Object[]{theFrom, theTo, theProperties, theLabels}); + + return neoEdge( + theTransaction, doCommit, + new JSONObject() + .put("statement", + "START a=node(" + theFrom + "),b=node(" + theTo + ") " + + "MERGE (a)-[r:" + theLabels[0] + neoLiteralMap(theProperties) + "]->(b) " + + "RETURN id(r)") + .put("parameters", + new JSONObject() + .put("props", theProperties))); + } + + private String neoEdge( + NeoTransaction theTransaction, boolean doCommit, + String theFromId, + String theToLabel, JSONObject theToProps, + JSONObject theProperties, + String... theLabels) throws IOException { + + return neoEdge(theTransaction, doCommit, + new JSONObject() + .put("statement", + //"START a=node(" + theFromId + ") " + + "MATCH (a),(b:" + theToLabel + neoLiteralMap(theToProps, "toProps") + ") " + + "WHERE id(a)=" + theFromId + " " + + "MERGE (a)-[r:" + theLabels[0] + neoLiteralMap(theProperties) + "]->(b) " + + "RETURN id(r)") + .put("parameters", + new JSONObject() + .put("toProps", theToProps) + .put("props", theProperties))); + } + + private String neoEdge(NeoTransaction theTransaction, + boolean doCommit, + JSONObject theEdgeStatement) + throws IOException { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "neoEdge {}", new Object[]{theEdgeStatement}); + + return neoId( + theTransaction + .statement(theEdgeStatement) + .execute(doCommit) + .result() + ); + } + + private static String neoId(JSONObject theResult) throws IOException { + try { + JSONArray data = theResult + .getJSONArray("results") + .getJSONObject(0) + .getJSONArray("data"); + if (data.length() == 0) { + return null; + } + + return String.valueOf( + data.getJSONObject(0) + .getJSONArray("row") + .getInt(0)); + } catch (JSONException jsonx) { + errLogger.log(LogLevel.WARN, Modeled.class.getName(), "neoId, No 'id' in result: {} {}", theResult, jsonx); + throw new IOException("no 'id' in result", jsonx); + } + } + + private static String[] neoIds(JSONObject theResult) throws IOException { + try { + JSONArray data = theResult + .getJSONArray("results") + .getJSONObject(0) + .getJSONArray("data"); + if (data.length() == 0) { + return new String[]{}; + } + + JSONArray array = data.getJSONObject(0) + .getJSONArray("row"); + + String[] res = new String[array.length()]; + for (int i = 0; i < array.length(); i++) { + res[i] = String.valueOf(array.getInt(i)); + } + return res; + } catch (JSONException jsonx) { + errLogger.log(LogLevel.WARN, Modeled.class.getName(), "neoId, No 'id' in result: {} {}", theResult, jsonx); + throw new IOException("no 'id' in result", jsonx); + } + } + + private static class NeoTransaction { + + private HttpClient client = null; + private String uri = null; + private String auth = null; + private JSONObject result = null; + private JSONArray stmts = new JSONArray(); + + NeoTransaction(URI theTarget) { + + client = httpClientBuilder.build(); + this.uri = theTarget.getScheme() + "://" + theTarget.getHost() + ":" + theTarget.getPort() + "/db/data/transaction"; + + String userInfo = theTarget.getUserInfo(); + if (userInfo != null) { + this.auth = "Basic " + new String( + Base64.encodeBase64( + userInfo.getBytes(Charset.forName("ISO-8859-1")))); + } + } + + /* adds a statement to the next execution cycle */ + NeoTransaction statement(JSONObject theStatement) { + if (this.client == null) { + throw new IllegalStateException("Transaction was completed"); + } + this.stmts.put(theStatement); + return this; + } + + /* executes all pending statements but does not commit the transaction */ + /* executing a transaction with no statements refreshes the transaction timer in order to keep the transaction alive */ + NeoTransaction execute() throws IOException { + if (this.client == null) { + throw new IllegalStateException("Transaction was completed"); + } + post(this.uri); + return this; + } + + /* executes all pending statements and commits the transaction */ + NeoTransaction commit() throws IOException { + if (this.client == null) { + throw new IllegalStateException("Transaction was completed"); + } + post(this.uri + "/commit"); + //mark the transaction as terminated + this.client = null; + return this; + } + + /* just to simplify some code written on top of NeoTransaction */ + NeoTransaction execute(boolean doCommit) throws IOException { + return doCommit ? commit() : execute(); + } + + private void post(String theUri) throws IOException { + HttpPost post = new HttpPost(theUri); + JSONObject payload = new JSONObject() + .put("statements", this.stmts); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "post> " + payload); + post.setEntity(new StringEntity(payload.toString(), + ContentType.APPLICATION_JSON)); + run(post); + } + + /* rollbacks the transaction changes */ + NeoTransaction rollback() throws IOException { + if (this.client == null) { + throw new IllegalStateException("Transaction was completed"); + } + if (this.uri == null) { + throw new IllegalStateException("Transaction not started"); + } + run(new HttpDelete(this.uri)); + return this; + } + + /* retrieve the (raw) results of the last execute/commit cycle */ + JSONObject result() { + return this.result; + } + + private void run(HttpUriRequest theRequest) throws IOException { + theRequest.setHeader(HttpHeaders.ACCEPT, "application/json; charset=UTF-8"); + if (this.auth != null) { + theRequest.setHeader(HttpHeaders.AUTHORIZATION, this.auth); + } + + HttpResponse response = this.client.execute(theRequest); + int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode >= 300) { + try { + this.result = new JSONObject(IOUtils.toString(response.getEntity().getContent(), "UTF-8")); + } catch (Exception x) { + errLogger.log(LogLevel.ERROR, Modeled.class.getName(), x.getMessage()); + } + throw new IOException("Neo statement(s) '" + this.stmts + "' failed: " + response.getStatusLine()); + } + + try { + this.result = new JSONObject( + IOUtils.toString(response.getEntity().getContent(), "UTF-8")); + } catch (Exception x) { + throw new IOException("no json in response", x); + } + + JSONArray errors = this.result.getJSONArray("errors"); + if (errors.length() > 0) { + throw new IOException("Neo statement(s) '" + this.stmts + "' have errors: " + errors); + } + //we only get a header if this was not a one statement transaction + Header hdr = response.getFirstHeader("Location"); + if (hdr != null) { + if (!hdr.getValue().startsWith(this.uri)) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "new transaction location?? : {} vs. {}", this.uri, hdr.getValue()); + } + this.uri = hdr.getValue(); + } + this.stmts = new JSONArray(); + } + } + + private static JSONObject pack(Map theRule, Map theDef) { + JSONObject pack = new JSONObject(); + + if (theRule == null) { + return pack; + } + + //these are the facets of the construct definition + Map facets = (Map) theRule.get("mapping"); + if (facets == null) { + return pack; + } + + facets.entrySet().stream() + .forEach( + theEntry -> + { + Map.Entry entry = (Map.Entry) theEntry; + Map facetDef = (Map) entry.getValue(); + + String storage = (String) facetDef.getOrDefault("storage", ""); + String type = (String) facetDef.get("type"); + + if ("none".equals(storage)) { + return; + } + if ("map".equals(type)) { + //maps are used for cross-references between constructs or for + //constructs facets + return; + } + Object val = theDef.get(entry.getKey()); + if ("seq".equals(type)) { + //sequences can be stored inlined, if so instructed .. + if ("inline".equals(storage)) { + val = JSONObject.valueToString(val); + } else { + return; + } + } + if ("no".equals(facetDef.getOrDefault("required", "no"))) { + pack.putOpt((String) entry.getKey(), theDef.get(entry.getKey())); + } else { + pack.putOnce((String) entry.getKey(), theDef.get(entry.getKey())); + } + }); + return pack; + } + + /* a sort of catalog of neo identifiers generated for the different + * constructs (or their types) we store + */ + private static class Tracker<T> { + + private Table<String, String, T> + typeTracker = HashBasedTable.create(), + templateTracker = HashBasedTable.create(); + + void trackType(String theConstruct, String theName, T theInfo) { + typeTracker.put(theConstruct, theName, theInfo); + } + + T lookupType(String theConstruct, String theName) { + return typeTracker.get(theConstruct, theName); + } + + boolean tracksType(String theConstruct, String theName) { + return typeTracker.contains(theConstruct, theName); + } + + void trackTemplate(String theConstruct, String theName, T theInfo) { + templateTracker.put(theConstruct, theName, theInfo); + } + + T lookupTemplate(String theConstruct, String theName) { + return templateTracker.get(theConstruct, theName); + } + + } +} diff --git a/dcaedt_catalog/db/src/main/resources/tosca-schema.yaml b/dcaedt_catalog/db/src/main/resources/tosca-schema.yaml new file mode 100644 index 0000000..5944e22 --- /dev/null +++ b/dcaedt_catalog/db/src/main/resources/tosca-schema.yaml @@ -0,0 +1,1231 @@ +_status_values: &status_values + enum: + - supported + - unsupported + - experimental + - deprecated + +#I do not know that the lists and maps qualify as 'primitive' .. +_primitive_types: &primitive_types + enum: [string,integer,float,boolean,timestamp,list,map,version,range,scalar-unit.size,scalar_unit.frequency,scalar_unit.time] + +#needs custom validation as we have to make sure there are 2 elements and allow for the +#UNBOUNDED keyword as second element +_range_definition: &range_definition + type: seq + name: range_definition + sequence: + - type: scalar + +#see A.5.2 +#this is where the need of verifying the size of a collection (sequence/map) came from +#this is specified as a sequence where each entry is a map with one entry?? +_constraints_sequence: &constraints_sequence + name: constraints_sequence +# short: "0" + type: seq + sequence: + - type: map +# length: 1 + mapping: + equal: + desc: "Constrains a property or parameter to a value equal to the value declared." + type: any + required: no + greater_than: + desc: "Constrains a property or parameter to a value greater than the value declared" + type: scalar + required: no + greater_or_equal: + desc: "Constrains a property or parameter to a value greater than or equal to the value declared." + type: scalar + required: no + less_than: + desc: "Constrains a property or parameter to a value less than the value declared" + type: scalar + required: no + less_or_equal: + desc: "Constrains a property or parameter to a value less than or equal to the value declared." + type: scalar + required: no + in_range: + desc: "Constrains a property or parameter to a value in range of (inclusive) the two values declared. +" + type: seq +# length: 2 + sequence: + - type: scalar + required: no + valid_values: + desc: "Constrains a property or parameter to a value that is in the list of declared values" + type: seq + sequence: + - type: scalar + required: no + length: + desc: "Constrains the property or parameter to a value of a given length." + type: int + required: no + min_length: + desc: "Constrains the property or parameter to a value to a minimum length" + type: scalar + required: no + max_length: + desc: "Constrains the property or parameter to a value to a maximum length" + type: scalar + required: no + pattern: + desc: "Constrains the property or parameter to a value that is allowed by the provided regular expression." + type: str + required: no + +# section A.5.3 property_filter_definition +# it is a constraints sequence that gets attached to a property .. +_property_filter_definition: &property_filter_definition + name: property_filter_definition + type: map + mapping: + =: + *constraints_sequence + +#section A.5.4 node_filter_definition +_node_filter_definition: &node_filter_definition + type: map + name: node_filter_definition + mapping: + properties: + desc: "property names to constraints to be applied to those properties" + required: no + type: seq + sequence: + - *property_filter_definition +# - type: map +# mapping: +# =: +# *constraints_sequence + capabilities: + desc: "" + required: no + type: seq + sequence: + - type: map + name: node_filter_capabilities_sequence + desc: "the key is a capability name or type" + mapping: + =: + name: node_filter_capabilities_entry + type: map + mapping: + properties: + desc: "the capability properties and their constraints" + name: node_filter_capabilities_properties + type: seq + sequence: + - type: map + name: node_filter_capabilities_property + mapping: + =: *constraints_sequence + +#used in property and attribute definitions +_entry_schema_definition: &entry_schema_definition + desc: "The optional key that is used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map" + name: entry_schema_definition + required: no + type: map + short: type + mapping: + "type": + desc: "collection element type" + required: yes + type: str + description: + required: no + type: str + constraints: + *constraints_sequence + +# see section A.5.5 +_artifact_definition: &artifact_definition + type: map + name: artifact_definition + short: implementation # assumes type can be inferred .. + mapping: + "type": + desc: "The required artifact type for the artifact definition" + required: yes + type: str + description: + desc: "The optional description for the artifact definition" + required: no + type: str + implementation: + desc: "The optional URI string (relative or absolute) which can be used to locate the artifacts file. +" + required: no + type: str + repository: + desc: "The optional name of the repository definition which contains the location of the external repository that contains the artifact" + required: no + type: str + deploy_path: + desc: "The file path the associated file would be deployed into within the target nodes container." + required: no + type: str + +# see section A.5.6 +_repository_definition: &repository_definition + type: map + name: repository_definition + short: url + mapping: + description: + desc: "The optional description for the repository. +" + required: no + type: str + url: + desc: "The required URL or network address used to access the repository" + required: yes + type: str + credential: + desc: "The optional Credential used to authorize access to the repository" + required: no + type: str + +#see section A.5.7 +_property_definition: &property_definition + type: map + name: property_definition + mapping: + "type": + type: str + required: yes +#not as easy, it can be an user defined data type +# <<: *primitive_types + description: + type: str + required: no + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + required: no + <<: *constraints_sequence + default: + type: any + required: no + "required": + type: bool + required: no + status: + type: str + required: no + <<: *status_values + entry_schema: + <<: *entry_schema_definition +# desc: "used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map." +# type: str +# required: no + +#see section A.5.8 +#_property_assignment_definition: &property_assignment_definition + +#see A.5.9 +_attribute_definition: &attribute_definition + type: map + name: attribute_definition + mapping: + "type": + type: str + required: yes +# <<: *primitive_types + description: + type: str + required: no + default: + type: any + required: no + status: + desc: "The optional status of the attribute relative to the specification or implementation" + type: str + required: no + <<: *status_values + entry_schema: + <<: *entry_schema_definition + +#see section A.5.10 +#here again, we must support the short form which is the most common +_attribute_assignment_definition: &attribute_assignment_definition + type: map + name: attribute_assignment_definition + mapping: + description: + desc: "The optional description of the attribute." + required: no + type: str + value: +#actually 'value | value_expression' + desc: "represent the type-compatible value to assign to the named attribute. Attribute values may be provided as the result from the evaluation of an expression or a function" + required: yes + type: any + + +# see spec section A.5.11 + +# see spec section A.5.11.1: variant to be used in node or relationship type definitions +_type_operation_definition: &type_operation_definition + type: map + name: type_operation_definition + short: implementation + mapping: + description: + desc: "The optional description string for the associated named operation." + required: no + type: str + implementation: + desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)" + required: no + type: str + inputs: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a input value" + name: property_assignment + type: any + +# from A.5.11.2 +_template_operation_definition: &template_operation_definition + type: map + name: template_operation_definition + short: implementation + mapping: + description: + desc: "The optional description string for the associated named operation." + required: no + type: str + implementation: + desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)" + name: template_operation_implementation_definition + required: no + short: primary + type: map + mapping: + primary: + desc: "The optional implementation artifact name (e.g., the primary script file name within a TOSCA CSAR file). " + required: no + type: str + dependencies: + desc: "The optional list of one or more dependent or secondary implementation artifact name which are referenced by the primary implementation artifact (e.g., a library the script installs or a secondary script)" + required: no + type: seq + sequence: + - type: str + inputs: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a input value" + name: property_assignment + type: any + + +# see section A.5.12, specifically A.5.12.2.1 : definition to be used in node or relationship type definition +_type_interface_definition: &type_interface_definition + type: map + name: type_interface_definition + mapping: + "type": + desc: "represents the required name of the Interface Type for the interface definition +" + required: yes + type: str + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + *property_definition + =: + *type_operation_definition + +# see section A.5.12.2.2, extended notation to be used in node or relationship template definitions +_template_interface_definition: &template_interface_definition + type: map + name: template_interface_definition + mapping: + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + =: + *template_operation_definition + + +# A.6 section: type specific definitions + +# see section A.6.1 +_capability_definition: &capability_definition + type: map + name: capability_definition + short: type + mapping: + "type": + desc: "The required name of the Capability Type the capability definition is based upon" + required: yes + type: str + description: + desc: "The optional description of the Capability definition" + required: no + type: str + properties: + desc: "" + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of property definitions for the Capability definition" + required: no + type: map + mapping: + =: + *attribute_definition + valid_source_types: + desc: "" + required: no + type: seq + sequence: + - type: str + occurrences: + desc: "The optional minimum and maximum occurrences for the capability." + required: no + <<: *range_definition + +# see section A.6.2 +# +_requirement_definition: &requirement_definition + type: map + name: requirement_definition + short: capability #as per A.6.2.2.1 + mapping: + capability: + desc: "The required reserved keyname used that can be used to provide the name of a valid Capability Type that can fulfil the requirement" + required: yes + type: str + node: + desc: "The optional reserved keyname used to provide the name of a valid Node Type that contains the capability definition that can be used to fulfil the requirement. " + required: no + type: str + relationship: +# and from section A.6.2.1, this one is an oddball + desc: "The optional reserved keyname used to provide the name of a valid Relationship Type to construct when fulfilling the requirement." + required: no + name: requirement_relationship_definition + short: type + type: map + mapping: + type: + desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement definitions relationship keyname. +" + required: yes + type: str + interfaces: + #not clear which interface definition is to be used here + desc: "allows augmentation (additional properties and operations) of the interfaces defined by the relationship type indicated above" + required: no + type: map + mapping: + =: + *type_interface_definition + occurrences: + desc: "The optional minimum and maximum occurrences for the requirement." + required: no + <<: *range_definition + +# see section A.6.3 +_artifact_type_definition: &artifact_type_definition + type: map + name: artifact_type_definition + mapping: + derived_from: + desc: "An optional parent Artifact Type name the Artifact Type derives from" + required: no + type: str + description: + desc: "An optional description for the Artifact Type." + required: no + type: str + mime_type: + desc: "The required mime type property for the Artifact Type." + required: no + type: str + file_ext: + desc: "The required file extension property for the Artifact Type" + required: no + type: seq + sequence: + - type: str + properties: + desc: "An optional list of property definitions for the Artifact Type" + required: no + type: map + mapping: + =: + *property_definition + +#see spec section #A.6.4 +_interface_type_definition: &interface_type_definition + type: map + name: interface_type_definition + mapping: + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + type: str + desc: "property_name to property_value(_expression) mapping" + =: + *type_operation_definition + +# A.6.5 +_data_type_definition: &data_type_definition + type: map + name: data_type_definition + mapping: + derived_from: + desc: "The optional key used when a datatype is derived from an existing TOSCA Data Type. +" + required: no + type: str + description: + desc: "The optional description for the Data Type. +" + required: no + type: str + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + <<: *constraints_sequence + properties: + desc: "The optional list property definitions that comprise the schema for a complex Data Type in TOSCA" + type: map + mapping: + =: + *property_definition + +# see section A.6.6 +_capability_type_definition: &capability_type_definition + type: map + name: capability_type_definition + mapping: + derived_from: + desc: "An optional parent capability type name this new Capability Type derives from." + required: no + type: str + description: + desc: "An optional description for the Capability Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Capability Type." + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Capability Type" + required: no + type: map + mapping: + =: + *attribute_definition + valid_source_types: + desc: "An optional list of one or more valid names of Node Types that are supported as valid sources of any relationship established to the declared Capability Type" + required: no + type: seq + sequence: + - type: str + +# section A.6.7 requirement definition: TOSCA YAML profile relies on capability types to +# define requirements + +# see section A.6.9 +_relationship_type_definition: &relationship_type_definition + type: map + name: relationship_type_definition + mapping: + derived_from: + desc: "An optional parent Relationship Type name the Relationship Type derives from" + required: no + type: str + description: + desc: "An optional description for the Relationship Type." + required: no + type: str + properties: + desc: "An optional list of property definitions for the Relationship Type" + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Relationship Type" + required: no + type: map + mapping: + =: + *attribute_definition + interfaces: + desc: "An optional list of interface definitions interfaces supported by the Relationship Type" + required: no + type: map + mapping: + =: + *type_interface_definition + valid_target_types: + desc: "An optional list of one or more names of Capability Types that are valid targets for this relationship. " + required: no + type: seq + sequence: + - type: str + +#see section 3.6.10 +_group_type_definition: &group_type_definition + type: map + name: group_type_definition + mapping: + derived_from: + desc: "An optional parent Group Type name this new Group Type derives from" + required: no + type: str + version: + desc: "An optional version for the Group Type definition" + required: no + type: str + description: + desc: "An optional description for the Group Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Group Type." + required: no + type: map + mapping: + =: + *property_definition + targets: + desc: "An optional list of one or more names of Node Types that are valid +(allowed) as members of the Group Type." + required: no + type: seq + sequence: + - type: str + interfaces: + desc: "An optional list of interface definitions supported by the Group Type" + required: no + type: map + mapping: + =: + *type_interface_definition + +#see section 3.6.11 +_policy_type_definition: &policy_type_definition + type: map + name: policy_type_definition + mapping: + derived_from: + desc: "An optional parent Policy Type name this new Policy Type derives from" + required: no + type: str + version: + desc: "An optional version for the Policy Type definition" + required: no + type: str + description: + desc: "An optional description for the Policy Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Policy Type." + required: no + type: map + mapping: + =: + *property_definition + targets: + desc: "An optional list of valid Node Types or Group Types the Policy Type +can be applied to" + required: no + type: seq + sequence: + - type: str + +# see section A.6.8 +_node_type_definition: &node_type_definition + type: map + name: node_type_definition + mapping: + derived_from: + desc: "An optional parent Node Type name this new Node Type derives from" + required: no + type: str + description: + desc: "An optional description for the Node Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Node Type." + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Node Type. +" + required: no + type: map + mapping: + =: + *attribute_definition + requirements: + desc: "An optional sequenced list of requirement definitions for the Node Type. +" + required: no + type: seq + sequence: + - type: map + mapping: + =: + *requirement_definition + capabilities: + desc: "An optional list of capability definitions for the Node Type" + required: no + type: map + mapping: + =: + *capability_definition + interfaces: + desc: "" + required: no + type: map + mapping: + =: + *type_interface_definition + artifacts: + desc: "An optional list of named artifact definitions for the Node Type" + required: no + type: map + mapping: + =: + *artifact_definition + +# A.7 Template specific definitions + +# see section A.7.1 +_capability_assignment_definition: &capability_assignment_definition + type: map + name: capability_assignment_definition + mapping: + properties: + # list of property assignments + desc: "An optional list of property definitions for the Capability definition" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + attributes: + # list of attribute assignments + desc: "An optional list of attribute definitions for the Capability definition" + required: no + type: map + mapping: + =: + desc: "" + name: attribute_assignment + type: any + +# see section A.7.2 +_requirement_assignment_definition: &requirement_assignment_definition + type: map + name: requirement_assignment_definition + short: node + mapping: + capability: + desc: " used to provide the name of either a: Capability definition within a target node template that can fulfill the requirement or Capability Type that the provider will use to select a type-compatible target node template to fulfill the requirement at runtime." + required: no + type: str + node: +#why is this a reference to a node type and not to a node template?? + desc: "used to identify the target node of a relationship: Node Template name that can fulfil the target node requirement or Node Type name that the provider will use to select a type-compatible node template to fulfil the requirement at runtime" + required: no + type: str + relationship: + desc: "" + required: no +#fins a better name name: relationship_definition + type: map + short: type + mapping: + "type": + desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement assignments relationship keyname" + required: no + type: str + properties: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + interfaces: + desc: "from A.5.12.2.2, right?" + required: no + type: map + mapping: + =: + *template_interface_definition + node_filter: + desc: "The optional filter definition that TOSCA orchestrators or providers would use to select a type-compatible target node that can fulfill the associated abstract requirement at runtime." + required: no + <<: *node_filter_definition + +# see section A.7.3 +_node_template_definition: &node_template_definition + type: map + name: node_template_definition + mapping: + "type": + desc: "The required name of the Node Type the Node Template is based upon" + required: yes + type: str + description: + desc: "An optional description for the Node Template" + required: no + type: str + directives: + desc: "An optional list of directive values to provide processing instructions to orchestrators and tooling." + required: no + type: seq + sequence: + - type: str + properties: +#custom check needs to be added: the value or expression providing the property value +#needs to be compatible with the property definition + desc: "An optional list of property value assignments for the Node Template." + required: no + type: map + mapping: + =: + type: any + name: property_assignment + desc: "a property value or an expression providing a property value" + attributes: + desc: "An optional list of attribute value assignments for the Node Template" + required: no + type: map + mapping: + =: + *attribute_assignment_definition + requirements: + desc: "An optional sequenced list of requirement assignments for the Node Template." + required: no + type: seq + sequence: + - type: map + mapping: + =: + *requirement_assignment_definition + capabilities: + desc: "An optional list of capability assignments for the Node Template." + required: no + type: map + mapping: + =: + *capability_assignment_definition + interfaces: + desc: "An optional list of named interface definitions for the Node Template" + required: no + type: map + mapping: + =: + *template_interface_definition + artifacts: + desc: "An optional list of named artifact definitions for the Node Template. +" + required: no + type: map + mapping: + =: + *artifact_definition + node_filter: + desc: "The optional filter definition that TOSCA orchestrators would use to select the correct target node. This keyname is only valid if the directive has the value of 'selectable' set." + required: no + <<: *node_filter_definition + copy: + desc: "The optional (symbolic) name of another node template to copy into (all keynames and values) and use as a basis for this node template." + required: no + type: str + +# see section A.7.4 +_relationship_template_definition: &relationship_template_definition + type: map + name: relationship_template_definition + mapping: + "type": + desc: "The required name of the Relationship Type the Relationship Template is based upon" + required: yes + type: str + alias: + desc: "The optional name of a different Relationship Template definition whose values are (effectively) copied into the definition for this Relationship Template (prior to any other overrides)." + required: no + type: str + description: + desc: "An optional description for the Relationship Template" + required: no + type: str + properties: + desc: "An optional list of property assignments for the Relationship Template." + required: no + name: properties_assignment_validation + type: map + mapping: + =: + type: any +#scalar + desc: "an expression providing a property value" + attributes: + desc: "An optional list of attribute value assignments for the Relationship Template" + required: no + name: attributes_assignment_validation + type: map + mapping: + =: + type: scalar + desc: "an expression providing an attribute value" + interfaces: + desc: "An optional list of named interface definitions for the Relationship Template ('augmentation' is allowed here)" + required: no + type: map + mapping: + =: + *template_interface_definition + copy: + desc: "The optional (symbolic) name of another relationship template to copy into (all keynames and values) and use as a basis for this relationship template." + required: no + type: str + + +# see section 3.7.5 +_group_definition: &group_definition + type: map + name: group_definition + mapping: + "type": + desc: "The required name of the group type the group definition is based upon" + required: yes + type: str + description: + desc: "The optional description for the group definition" + required: no + properties: + desc: " represents the optional list of property assignments for the group definition that provide values for properties defined in its declared Group Type" + required: no + type: map + mapping: + =: + type: any + name: property_assignment + targets: + desc: "contains the required list of one or more node template names (within the same topology template) that are members of this logical group" + required: yes + type: seq + sequence: + - type: str + interfaces: + desc: "represents the optional list of interface definitions for the group definition that augment those provided by its declared Group Type" + required: no + type: map + mapping: + =: + *template_interface_definition + +# see section 3.7.6 +_policy_template_definition: &policy_template_definition + type: map + name: policy_definition + mapping: + "type": + desc: "The required name of the policy type the policy definition is based upon" + required: yes + type: str + description: + desc: "The optional description for the policy definition" + required: no + properties: + desc: "represents the optional list of property assignments for the policy definition that provide values for properties defined in its declared Policy Type" + required: no + type: map + mapping: + =: + type: any + name: property_assignment + targets: + desc: "represents the optional list of names of node templates or groups that the policy is to applied to" + required: no + type: seq + sequence: + - type: str + +# see section 3.8 Topology Template definition: defines the topology template of a cloud application. +# described as a a reusable grammar as it can be a part of a service template definition +_topology_template_definition: &topology_template_definition + type: map + name: topology_template_definition + mapping: + description: + desc: "a description of the topology template" + required: no + type: str + inputs: + desc: "definition of input parameters for the topology template" + name: inputs + required: no + type: map + mapping: + =: + *property_definition + node_templates: + desc: "definition of the node templates of the topology" + name: node_templates + required: no + type: map + mapping: + =: + *node_template_definition + relationship_templates: + desc: "definition of the relationship templates of the topology" + required: no + name: relationship_templates + type: map + mapping: + =: + *relationship_template_definition + outputs: + desc: "definition of output parameters for the topology template" + name: outputs + required: no + type: map + mapping: + =: + *attribute_assignment_definition + groups: + desc: "An optional list of Group definitions whose members are node templates defined within this same Topology Template" + name: groups + required: no + type: map + mapping: + =: + *group_definition + policies: + # see 8.2.3, initially the list is not described as sequenced but then the grammar shows it as such !? + desc: "An optional sequenced?? list of Policy definitions for the Topology Template." + name: policies + required: no + type: seq + sequence: + - type: map + mapping: + =: + *policy_template_definition + substitution_mappings: +# one possible short-coming that is visible here is that the definition of the capability +# and requirements mappings are given in the spec only with the short/inline version of a +# YAML list/sequence, which cannot be enforced here .. + desc: " a description of the topology template" + name: substitution_mappings + required: no + type: map + mapping: + node_type: + desc: "node type name" + required: yes + type: str + capabilities: + desc: "map_of_capability_mappings_to_expose" + type: map + mapping: + =: + type: seq + sequence: + - type: str + requirements: + desc: "map_of_requirement_mapping_to_expose" + type: map + mapping: + =: + type: seq + sequence: + - type: str + + +# see A.9 Service Template definition: A TOSCA Service Template (YAML) document contains +# element definitions of building blocks for cloud application, or complete models of cloud applications. + +type: map +name: service_template_definition +mapping: + tosca_definitions_version: + desc: "Required TOSCA Definitions version string" + required: yes + type: str + + tosca_default_namespace: + desc: "Optional. default namespace (for type schema)" + required: no + type: str + + metadata: + desc: "Optional metadata keyname: value pairs" + name: metadata + required: no + type: map + mapping: + template_name: + desc: "Optional name of this service template" + required: no + type: str + template_author: + desc: "Optional author of this service template" + required: no + type: str + template_version: + desc: "Optional version of this service template" + required: no + type: str +#to add, the spec says: "Optional list of domain or profile specific metadata keynames" + + description: + desc: "Optional description of the definitions inside the file" + required: no + type: str + + imports: + desc: "list of import statements for importing other definitions files" + name: imports + required: no + type: seq + sequence: + - type: str + + dsl_definitions: + desc: "list of YAML alias anchors (or macros)" + name: dsl_definitions + required: no + type: map + mapping: + =: + desc: "some piece of valid yaml that makes the anchor/alias definition" + type: any + required: no + + repositories: + desc: "list of external repository definitions which host TOSCA artifacts" + name: repositories + required: no + type: map + mapping: + =: + *repository_definition + + data_types: + desc: "list of TOSCA datatype definitions" + name: data_types + required: no + type: map + mapping: + =: + *data_type_definition + + node_types: + desc: "list of node type definitions" + name: node_types + required: no + type: map + mapping: + =: + *node_type_definition + + capability_types: + desc: "list of capability type definitions" + name: capability_types + required: no + type: map + mapping: + =: + *capability_type_definition + + relationship_types: + desc: "list of relationship type definitions" + name: relationship_types + required: no + type: map + mapping: + =: + *relationship_type_definition + + artifact_types: + desc: "list of artifact type definitions" + name: artifact_types + required: no + type: map + mapping: + =: + *artifact_type_definition + + interface_types: + desc: "list of interface type definitions" + name: interface_types + required: no + type: map + mapping: + =: + *interface_type_definition + + group_types: + desc: "list of group type definitions" + name: group_types + required: no + type: map + mapping: + =: + *group_type_definition + + policy_types: + desc: "list of policy type definitions" + name: policy_types + required: no + type: map + mapping: + =: + *policy_type_definition + + topology_template: + desc: "topology template definition of the cloud application or service" + required: no + <<: *topology_template_definition diff --git a/dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml b/dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml new file mode 100644 index 0000000..5ca7061 --- /dev/null +++ b/dcaedt_catalog/db/src/main/resources/tosca-storage-schema.yaml @@ -0,0 +1,37 @@ +#_policy_type_storage_definition: +# <<: *policy_type_definition +# mapping: +# <<: *policy_type_mapping +# targets: +# <<: *policy_type_targets +# storage: inline + +/_data_type_definition/mapping/derived_from: + storage: none + +/_node_type_definition/mapping/derived_from: + storage: none + +/_capability_type_definition/mapping/derived_from: + storage: none +/_capability_type_definition/mapping/valid_source_types: + storage: inline + +/_relationship_type_definition/mapping/derived_from: + storage: none +/_relationship_type_definition/mapping/valid_target_types: + storage: inline + +/_policy_type_definition/mapping/derived_from: + storage: none +/_policy_type_definition/mapping/targets: + storage: inline + +/_node_template_definition/mapping/type: + storage: none + +/_policy_template_definition/mapping/targets: + storage: inline + +/_policy_template_definition/mapping/type: + storage: none diff --git a/dcaedt_catalog/pom.xml b/dcaedt_catalog/pom.xml new file mode 100644 index 0000000..701d665 --- /dev/null +++ b/dcaedt_catalog/pom.xml @@ -0,0 +1,29 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 + http://maven.apache.org/maven-v4_0_0.xsd"> + <modelVersion>4.0.0</modelVersion> + <artifactId>DCAE-DT-Catalog</artifactId> + <packaging>pom</packaging> + <name>DCAE DT Catalog</name> + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>dcae_dt_be_main</artifactId> + <version>1806.0.1-SNAPSHOT</version> +</parent> + <modules> + <module>asdc</module> + <module>commons</module> + <module>db</module> + <module>api</module> + <module>service</module> + </modules> + + <scm> + <connection>scm:git:https://github.research.att.com/ASC/ASC-Catalog/</connection> + <developerConnection>scm:git:https://github.research.att.com/ASC/ASC-Catalog/</developerConnection> + <tag>HEAD</tag> + <url>https://github.research.att.com/ASC/ASC-Catalog/</url> + </scm> +</project> diff --git a/dcaedt_catalog/service/README.md b/dcaedt_catalog/service/README.md new file mode 100644 index 0000000..8607f95 --- /dev/null +++ b/dcaedt_catalog/service/README.md @@ -0,0 +1,4 @@ +ASC-Catalog +=========== + +This component implements the Catalog API for the ASC Platform. diff --git a/dcaedt_catalog/service/pom.xml b/dcaedt_catalog/service/pom.xml new file mode 100644 index 0000000..c41980d --- /dev/null +++ b/dcaedt_catalog/service/pom.xml @@ -0,0 +1,80 @@ +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog</artifactId> + <version>1806.0.1-SNAPSHOT</version> + </parent> + <artifactId>DCAE-DT-Catalog-Service</artifactId> + <packaging>jar</packaging> + <name>DCAE-DT Catalog Service</name> + + <build> + <sourceDirectory>src/main/java</sourceDirectory> + <plugins> + <plugin> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.1</version> + <configuration> + <source>1.8</source> + <target>1.8</target> + <encoding>${project.build.sourceEncoding}</encoding> + </configuration> + </plugin> + <plugin> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-maven-plugin</artifactId> + <version>1.3.3.RELEASE</version> + <configuration> + <mainClass>org.onap.sdc.dcae.catalog.engine.CatalogEngine</mainClass> + </configuration> + <executions> + <execution> + <goals> + <goal>repackage</goal> + </goals> + </execution> + </executions> + </plugin> + </plugins> + </build> + <dependencies> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <version>3.8.1</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-core</artifactId> + <version>4.3.5.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-web</artifactId> + <version>4.3.5.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-web</artifactId> + <version>1.4.1.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework</groupId> + <artifactId>spring-webmvc</artifactId> + <version>4.3.5.RELEASE</version> + </dependency> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-autoconfigure</artifactId> + <version>1.4.1.RELEASE</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Catalog-API</artifactId> + <version>${project.version}</version> + </dependency> + </dependencies> +</project> diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogController.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogController.java new file mode 100644 index 0000000..7b9e1a2 --- /dev/null +++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogController.java @@ -0,0 +1,594 @@ +/* + * AT&T - PROPRIETARY + * THIS FILE CONTAINS PROPRIETARY INFORMATION OF + * AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN + * ACCORDANCE WITH APPLICABLE AGREEMENTS. + * + * Copyright (c) 2015 AT&T Knowledge Ventures + * Unpublished and Not for Publication + * All Rights Reserved + */ +package org.onap.sdc.dcae.catalog.engine; +/* + * THIS FILE CONTAINS PROPRIETARY INFORMATION OF + * AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN + * ACCORDANCE WITH APPLICABLE AGREEMENTS. + * + * Copyright (c) 2015 AT&T Knowledge Ventures + * Unpublished and Not for Publication + * All Rights Reserved + */ + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; + +import static org.onap.sdc.dcae.catalog.Catalog.*; + +import java.net.URI; +import java.net.URISyntaxException; + +import org.json.JSONObject; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.dcae.catalog.Catalog; +import org.onap.sdc.dcae.catalog.asdc.ASDCCatalog; +import org.onap.sdc.dcae.catalog.commons.Future; +import org.onap.sdc.dcae.catalog.commons.FutureHandler; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.onap.sdc.dcae.composition.util.SystemProperties; +import org.json.JSONArray; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import org.springframework.web.context.request.async.DeferredResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.context.properties.ConfigurationProperties; + +import org.springframework.web.bind.annotation.CrossOrigin; + +/** + * All requests body: + * { + * "id": optional request uuid, + * "timestamp": optional request timestamp, + * "catalog": optional catalog uri, + * "timeout": optional timeout - default 0 no time limit + * } + * + * All responses body: + * { "data": {}, + * "error": {} + * } + * + * If a non-2xx reponse is provided and error occured at catalog engine processing level. + * If error has occured in data retrieval then the response error object is not empty. + * + * Available uris + * /catalog + * /elements : roots of the catalog; request body is optional but can specify a label under 'startingLabel' + * response contains items under 'data/elements' + * /{itemId}/elements : catalog descendants of the given item, possibly a mix of folders and items + * response contains items under 'data/elements' + * /lookup.by.name : lookup catalog entries by name. + The request body must contain a 'selector' entry with a 'name' criteria + * response contains items under 'data/elements' + * Example: '{"id":"5d0c1cf4-11aa-11e6-a148-3e1d05defe78","selector":{"name":"Firewall"}}' + * /lookup.by.annotation + The request body must contain a 'annotation' entry and it can have a 'selector' entry + * with a multiple annotation property criteria + * response contains items under 'data/elements' + * /lookup.by.model.property.value : + * The request must contain a "selector" entry as a JSONObject containing the selection criteria + * (property name with values) and desired output properties (null values). Example: + * "selector":{"att-part-number":"L-CSR-50M-APP-3Y", + * "management-option":"ATT", + * "vnf-type":null, + * "vendor-model":null} + * response contains items under 'data/elements' + * /referents : provides generic recommendations + * response contains items under 'data/elements' + * /{itemId}/referents : provides recommendations for the given item + * response contains items under 'data/elements' + * /{itemId}/model : retrieves the TOSCA model for the item with the given id + * response under 'data/model' + * + */ + + +@RestController +//@RequestMapping(value="/catalog",method=RequestMethod.POST) +@CrossOrigin(origins="*") +//@ConfigurationProperties(prefix="catalogController") +public class CatalogController { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + + @Autowired + private SystemProperties systemProperties; + + + private boolean enableCORS = false; + private URI defaultCatalog; + private static Map<URI, Catalog> catalogs = new HashMap<URI, Catalog>(); + + + public void setDefaultCatalog(URI theUri) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "set default catalog at {}", theUri); + this.defaultCatalog = theUri; + } + + public void setEnableCORS(boolean doEnable) { + this.enableCORS = doEnable; + } + +// @RequestMapping(value="/elements",method={RequestMethod.POST, RequestMethod.GET}, produces = "application/json") +// public DeferredResult<CatalogResponse> items(@RequestBody(required=false) ItemsRequest theRequest) { +// +// final ItemsRequest request = (theRequest == null) ? ItemsRequest.EMPTY_REQUEST : theRequest; +// +// Catalog catalog = getCatalog(request.getCatalog()); +// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout()); +// +// catalog.rootsByLabel(request.getStartingLabel()) +// //catalog.roots() +// .setHandler( +// new CatalogHandler<Folders>(request, result) { +// public CatalogResponse handleData(Folders theFolders) { +// JSONArray ja = new JSONArray(); +// if (theFolders != null) { +// for (Folder folder : theFolders) { +// ja.put(patchData(catalog, folder.data())); +// } +// } +// CatalogResponse response = new CatalogResponse(this.request); +// response.data() +// .put("elements", ja); +// return response; +// } +// }); +// return result; +// } +// +// @RequestMapping(value="/{theItemId}/elements",method={RequestMethod.POST,RequestMethod.GET}, produces = "application/json") +// public DeferredResult<CatalogResponse> items(@RequestBody(required=false) ItemsRequest theRequest, @PathVariable String theItemId) { +// +// final ItemsRequest request = (theRequest == null) ? ItemsRequest.EMPTY_REQUEST : theRequest; +// +// Catalog catalog = getCatalog(request.getCatalog()); +// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout()); +// +// catalog +//// .fetchFolderByItemId(theItemId) +// .folder(theItemId) +// .withParts() +// .withPartAnnotations() +// .withItems() +// .withItemAnnotations() +// .withItemModels() +// .execute() +// .setHandler( +// new CatalogHandler<Folder>(request, result) { +// public CatalogResponse handleData(Folder theFolder) { +// CatalogResponse response = new CatalogResponse(this.request); +// if (theFolder == null) { +// return response; +// } +// +// try { +// Elements folders = theFolder.elements("parts",Folders.class); +// if (folders != null) { +// for (Object folder: folders) { +// patchData(catalog, ((Element)folder).data()); +// //lots of ephemere proxies created here .. +// Elements annotations = +// ((Element)folder).elements("annotations", Annotations.class); +// if (annotations != null) { +// for (Object a: annotations) { +// patchData(catalog, ((Annotation)a).data()); +// } +// } +// } +// } +// Elements items = theFolder.elements("items",Items.class); +// if (items != null) { +// for (Object i: items) { +// patchData(catalog, ((Element)i).data()); +// //lots of ephemere proxies created here .. +// Elements annotations = +// ((Element)i).elements("annotations", Annotations.class); +// if (annotations != null) { +// for (Object a: annotations){ +// patchData(catalog, ((Annotation)a).data()); +// } +// } +// } +// } +// } +// catch(Exception x) { +//x.printStackTrace(); +// return new CatalogError(this.request, "", x); +// } +// +// response.data() +// .put("element", theFolder.data()); +// return response; +// } +// }); +// +// return result; +// } +// +// @RequestMapping(value="/lookup.by.name",method=RequestMethod.POST, produces = "application/json") +// public DeferredResult<CatalogResponse> elementsByName(@RequestBody ElementsLookup theRequest) { +// +// Catalog catalog = getCatalog(theRequest.getCatalog()); +// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(theRequest.getTimeout()); +// +// catalog +// .lookup(new JSONObject(theRequest.getSelector())) +// .setHandler( +// new CatalogHandler<Mixels>(theRequest, result) { +// public CatalogResponse handleData(Mixels theElems) { +// JSONArray ja = new JSONArray(); +// if (theElems != null) { +// for (Object elem : theElems) { +// ja.put(patchData(catalog, ((Element)elem).data())); +// } +// } +// CatalogResponse response = new CatalogResponse(theRequest); +// response.data() +// .put("elements", ja); +// return response; +// } +// }); +// +// return result; +// } +// +// @RequestMapping(value="/lookup.by.annotation",method=RequestMethod.POST, produces = "application/json") +// public DeferredResult<CatalogResponse> elementsByAnnotation(@RequestBody ElementsLookup theRequest) { +// +// Catalog catalog = getCatalog(theRequest.getCatalog()); +// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(theRequest.getTimeout()); +// +// catalog +// .lookup(theRequest.getAnnotation(), +// new JSONObject(theRequest.getSelector())) +// .setHandler( +// new CatalogHandler<Mixels>(theRequest, result) { +// public CatalogResponse handleData(Mixels theElems) { +// JSONArray ja = new JSONArray(); +// if (theElems != null) { +// for (Object elem : theElems) { +// ja.put(patchData(catalog, ((Element)elem).data())); +// } +// } +// CatalogResponse response = new CatalogResponse(this.request); +// response.data() +// .put("elements", ja); +// return response; +// } +// }); +// +// return result; +// } + + /** + * NeoCatalog specific + *//* + @RequestMapping(value="/lookup.by.model.property.value",method=RequestMethod.POST, produces = "application/json") + public DeferredResult<CatalogResponse> elementsByModelPropertyValue(@RequestBody ElementsLookup theRequest) { + + DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(theRequest.getTimeout()); + + NeoCatalog catalog = asNeo(getCatalog(theRequest.getCatalog())); + if (catalog == null) { + result.setErrorResult( + new CatalogError( + theRequest,"The selected catalog is not capable of handling this request (lookup.by.model.property.value)")); + return result; + } + + catalog + .lookupItemsByToscaNodePropertyValue(theRequest.getJSONSelector()) + .setHandler( + new CatalogHandler<Items>(theRequest, result) { + public CatalogResponse handleData(Items theItems) { + JSONArray ja = new JSONArray(); + if (theItems != null) { + for (Item item : theItems) { + ja.put(patchData(catalog, item.data())); + } + } + CatalogResponse response = new CatalogResponse(this.request); + response.data() + .put("elements", ja); + return response; + } + }); + + return result; + } +*/ + /** + * This follows the current convention that each item will have a single model + 2 stage + */ +// @RequestMapping(value="/{theItemId}/model",method={RequestMethod.POST,RequestMethod.GET}, produces = "application/json") +// //public DeferredResult<CatalogResponse> model(@RequestBody ElementRequest theRequest) { +// public DeferredResult<CatalogResponse> model(@RequestBody(required=false) ElementRequest theRequest, @PathVariable String theItemId) { +// final ElementRequest request = (theRequest == null) ? ElementRequest.EMPTY_REQUEST : theRequest; +// +// Catalog catalog = getCatalog(request.getCatalog()); +// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout()); +// +// catalog +//// .fetchItemByItemId(/*theRequest.getProductId()*/theItemId) +// .item(theItemId) +// .withModels() +// .execute() +// .setHandler( +// new CatalogHandler<Item>(request, result) { +// public CatalogResponse handleData(Item theItem) { +// if (theItem == null) { +// return new CatalogError(this.request, "No such item"); +// } +// Templates models = null; +// try { +// models = (Templates)theItem.elements("models", Templates.class); +// } +// catch (Exception x) { +// return new CatalogError(this.request, "Failed to decode templates from result", x); +// } +// +// if (models == null || models.size() == 0) { +// return new CatalogError(this.request, "Item has no models"); +// } +// if (models.size() > 1) { +// return new CatalogError(this.request, "Item has more than one model !?"); +// } +// try{ +// catalog.template(models.get(0).id()) +// .withInputs() +// .withOutputs() +// .withNodes() +// .withNodeProperties() +// .withNodePropertiesAssignments() +// .withNodeRequirements() +// .withNodeCapabilities() +// .withNodeCapabilityProperties() +// .withNodeCapabilityPropertyAssignments() +// .withPolicies() +// .withPolicyProperties() +// .withPolicyPropertiesAssignments() +// .execute() +// .setHandler( +// new CatalogHandler<Template>(this.request, this.result) { +// public CatalogResponse handleData(Template theTemplate) { +// CatalogResponse response = new CatalogResponse(this.request); +// if (theTemplate != null) { +// response.data() +// .put("model", patchData(catalog, theTemplate.data())); +// } +// return response; +// } +// }); +// } +// catch (Exception x) { +// x.printStackTrace(); +// } +// return null; +// } +// }); +// +// return result; +// } + +// @RequestMapping(value="/{theItemId}/type/{theTypeName}",method={RequestMethod.POST,RequestMethod.GET}, produces = "application/json") +// public DeferredResult<CatalogResponse> model(@RequestBody(required=false) ElementRequest theRequest, @PathVariable String theItemId, @PathVariable String theTypeName) { +// final ElementRequest request = (theRequest == null) ? ElementRequest.EMPTY_REQUEST : theRequest; +// +// Catalog catalog = getCatalog(request.getCatalog()); +// DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout()); +// +// catalog.type(theItemId, theTypeName) +// .withHierarchy() +// .withCapabilities() +// .withRequirements() +// .execute() +// .setHandler( +// new CatalogHandler<Type>(request, result) { +// public CatalogResponse handleData(Type theType) { +// CatalogResponse response = new CatalogResponse(this.request); +// if (theType != null) { +// response.data() +// .put("type", patchData(catalog, theType.data())); +// } +// return response; +// } +// }); +// +// return result; +// } + +/* + @RequestMapping(value="/referents",method=RequestMethod.POST, produces = "application/json") + public DeferredResult<CatalogResponse> referents(@RequestBody(required=false) ElementRequest theRequest) { + final ElementRequest request = (theRequest == null) ? ElementRequest.EMPTY_REQUEST : theRequest; + DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout()); + + NeoCatalog catalog = asNeo(getCatalog(theRequest.getCatalog())); + if (catalog == null) { + result.setErrorResult( + new CatalogError( + theRequest,"The selected catalog is not capable of handling this request (referents)")); + return result; + } + + catalog + .defaultRecommendations() + .setHandler( + new CatalogHandler<Mixels>(request, result) { + public CatalogResponse handleData(Mixels theElems) { + JSONArray ja = new JSONArray(); + if (theElems != null) { + for (Element elem : theElems) { + ja.put(patchData(catalog, elem.data())); + } + } + CatalogResponse response = new CatalogResponse(this.request); + response.data() + .put("elements", ja); + return response; + } + }); + + return result; + } +*/ + +/* @RequestMapping(value="/{theItemId}/referents",method=RequestMethod.POST, produces = "application/json") + public DeferredResult<CatalogResponse> referents(@RequestBody(required=false) ElementRequest theRequest, @PathVariable String theItemId) { + final ElementRequest request = (theRequest == null) ? ElementRequest.EMPTY_REQUEST : theRequest; + DeferredResult<CatalogResponse> result = new DeferredResult<CatalogResponse>(request.getTimeout()); + + NeoCatalog catalog = asNeo(getCatalog(theRequest.getCatalog())); + if (catalog == null) { + result.setErrorResult( + new CatalogError( + theRequest,"The selected catalog is not capable of handling this request (item referents)")); + return result; + } + + catalog + .recommendationsForItemId(theItemId) + .setHandler( + new CatalogHandler<Mixels>(request, result) { + public CatalogResponse handleData(Mixels theElems) { + JSONArray ja = new JSONArray(); + if (theElems != null) { + for (Element elem : theElems) { + ja.put(patchData(catalog, elem.data())); + } + } + CatalogResponse response = new CatalogResponse(this.request); + response.data() + .put("elements", ja); + return response; + } + }); + + return result; + } +*/ + @PostConstruct + public void initCatalog() { + // Dump some info and construct our configuration objects + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "initCatalog"); + + this.defaultCatalog = URI.create(systemProperties.getProperties().getProperty(DcaeBeConstants.Config.ASDC_CATALOG_URL)); + // Initialize default catalog connection + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "default catalog at {}", this.defaultCatalog); + getCatalog(null); + + // Done + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "CatalogEngine started"); + } + + @PreDestroy + public void cleanupCatalog() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "destroyCatalog"); + } + + public Catalog getCatalog(URI theCatalogUri) { + //TODO: Thread safety! Check catalog is alive! + if (theCatalogUri == null) + theCatalogUri = this.defaultCatalog; + + Catalog cat = catalogs.get(theCatalogUri); + if (cat == null && theCatalogUri != null) { + String scheme = theCatalogUri.getScheme(); + URI catalogUri = null; + try { + catalogUri = new URI(theCatalogUri.getSchemeSpecificPart() + "#" + theCatalogUri.getFragment()); + } + catch (URISyntaxException urisx) { + throw new IllegalArgumentException("Invalid catalog reference '" + theCatalogUri.getSchemeSpecificPart() + "'"); + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Build catalog for {}", catalogUri); + + if ("asdc".equals(scheme)) { + cat = new ASDCCatalog(catalogUri); + } + else { + return null; + } + + catalogs.put(theCatalogUri, cat); + } + return cat; + } + +/* private NeoCatalog asNeo(Catalog theCatalog) { + try { + return (NeoCatalog)theCatalog; + } + catch (ClassCastException ccx) { + return null; + } + }*/ + + public JSONObject patchData(Catalog theCat, JSONObject theData) { + theData.put("catalog", theCat.getUri()); + theData.put("catalogId", theData.optLong("id")); + theData.put("id", theData.optLong("itemId")); + return theData; + } + + public abstract class CatalogHandler<T> implements FutureHandler<T> { + + protected DeferredResult result; + protected CatalogRequest request; + + public CatalogHandler(CatalogRequest theRequest, DeferredResult theResult) { + this.request = theRequest; + this.result = theResult; + } + + public abstract CatalogResponse handleData(T theData); + + //@Override + public void handle(Future<T> theEvent) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "handle"); + + if (this.result.isSetOrExpired()) { + debugLogger.log(LogLevel.WARN, this.getClass().getName(), "handle, Data is late"); + return; + } + + if (theEvent.failed()) { + this.result.setErrorResult(new CatalogError(this.request, "Catalog API failed", theEvent.cause())); + } + else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "handle, got: {}", theEvent.result()); + CatalogResponse response = handleData(theEvent.result()); + //a null result allows the handler to pass the processing onto some other async processing stage + if (response != null) { + if (!this.result.setResult(response)) { + this.result.setErrorResult(new CatalogError(this.request, "Catalog API call succesful but late")); + } + } + } + } + } +} diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogEngine.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogEngine.java new file mode 100644 index 0000000..042798f --- /dev/null +++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogEngine.java @@ -0,0 +1,26 @@ +package org.onap.sdc.dcae.catalog.engine; + +import org.onap.sdc.dcae.catalog.engine.CatalogEngine; +import org.springframework.boot.SpringApplication; +import org.springframework.context.ApplicationContext; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.ImportResource; +import org.springframework.boot.autoconfigure.SpringBootApplication; + + +import java.util.Arrays; + + + +@SpringBootApplication + +public class CatalogEngine { + + public static void main(String[] args) { + + SpringApplication.run(CatalogEngine.class, args); + } + +} diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogError.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogError.java new file mode 100644 index 0000000..0c7c418 --- /dev/null +++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogError.java @@ -0,0 +1,20 @@ +package org.onap.sdc.dcae.catalog.engine; + +import org.onap.sdc.dcae.catalog.engine.CatalogRequest; +import org.onap.sdc.dcae.catalog.engine.CatalogResponse; + +/** + */ +public class CatalogError extends CatalogResponse { + + public CatalogError(CatalogRequest theRequest, String theMessage) { + super(theRequest); + error().put("message", theMessage); + } + + public CatalogError(CatalogRequest theRequest, String theMessage, Throwable theError) { + super(theRequest); + error().put("message", theMessage) + .put("exception", theError.toString()); + } +} diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogMessage.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogMessage.java new file mode 100644 index 0000000..aee475b --- /dev/null +++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogMessage.java @@ -0,0 +1,27 @@ +package org.onap.sdc.dcae.catalog.engine; + +import java.util.UUID; + +public class CatalogMessage { + + private UUID id; + private long timestamp = 0; + + + public void setId(UUID theId) { + this.id = theId; + } + + public UUID getId() { + return this.id; + } + + public void setTimestamp(long theTimestamp) { + this.timestamp = theTimestamp; + } + + public long getTimestamp() { + return this.timestamp; + } + +} diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogRequest.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogRequest.java new file mode 100644 index 0000000..ad6caca --- /dev/null +++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogRequest.java @@ -0,0 +1,27 @@ +package org.onap.sdc.dcae.catalog.engine; + +import java.net.URI; + +import org.onap.sdc.dcae.catalog.engine.CatalogMessage; + +public class CatalogRequest extends CatalogMessage { + + private URI catalog; + private long timeout = 0; + + public void setCatalog(URI theCatalogUri) { + this.catalog = theCatalogUri; + } + + public URI getCatalog() { + return this.catalog; + } + + public void setTimeout(long theTimeout) { + this.timeout = theTimeout; + } + + public long getTimeout() { + return this.timeout; + } +} diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogResponse.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogResponse.java new file mode 100644 index 0000000..2f9913f --- /dev/null +++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/CatalogResponse.java @@ -0,0 +1,39 @@ +package org.onap.sdc.dcae.catalog.engine; + + +import com.fasterxml.jackson.annotation.JsonRawValue; + +import org.json.JSONObject; +import org.onap.sdc.dcae.catalog.engine.CatalogMessage; +import org.onap.sdc.dcae.catalog.engine.CatalogRequest; + +/** + */ +public class CatalogResponse extends CatalogMessage { + + private JSONObject data = new JSONObject(), + error = new JSONObject(); + + public CatalogResponse(CatalogRequest theRequest) { + setId(theRequest.getId()); + setTimestamp(theRequest.getTimestamp()); + } + + public JSONObject data() { + return this.data; + } + + @JsonRawValue + public String getData() { + return this.data.toString(); + } + + public JSONObject error() { + return this.error; + } + + @JsonRawValue + public String getError() { + return this.error.toString(); + } +} diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementRequest.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementRequest.java new file mode 100644 index 0000000..87d532f --- /dev/null +++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementRequest.java @@ -0,0 +1,6 @@ +package org.onap.sdc.dcae.catalog.engine; + +public class ElementRequest extends CatalogRequest { + + public static final ElementRequest EMPTY_REQUEST = new ElementRequest(); +} diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementsLookup.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementsLookup.java new file mode 100644 index 0000000..756be89 --- /dev/null +++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ElementsLookup.java @@ -0,0 +1,49 @@ +package org.onap.sdc.dcae.catalog.engine; + +import java.util.Map; +import java.util.Collections; + +import org.json.JSONObject; +import org.onap.sdc.dcae.catalog.engine.CatalogRequest; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +/** + */ +public class ElementsLookup extends CatalogRequest { + + private String annotation; + private Map<String,Object> selector; + + public void setAnnotation(String theAnnon) { + this.annotation = theAnnon; + } + + public String getAnnotation() { + return this.annotation; + } + + public Map<String,Object> getSelector() { + return this.selector == null ? Collections.EMPTY_MAP : this.selector; + } + + public void setSelector(Map<String,Object> theSelector) { + this.selector = theSelector; + } + + public Object getSelectorEntry(String theName) { + return getSelector().get(theName); + } + + /** + * Because the JSONObject(Map) constructor would not copy entries wth null values. + */ + @JsonIgnore + public JSONObject getJSONSelector() { + JSONObject jsonSelector = new JSONObject(); + for (Map.Entry<String, Object> entry: this.selector.entrySet()) { + jsonSelector.put(entry.getKey(), entry.getValue() != null ? entry.getValue() : JSONObject.NULL); + } + return jsonSelector; + } +} diff --git a/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ItemsRequest.java b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ItemsRequest.java new file mode 100644 index 0000000..9215282 --- /dev/null +++ b/dcaedt_catalog/service/src/main/java/org/onap/sdc/dcae/catalog/engine/ItemsRequest.java @@ -0,0 +1,16 @@ +package org.onap.sdc.dcae.catalog.engine; + +public class ItemsRequest extends CatalogRequest { + + public static final ItemsRequest EMPTY_REQUEST = new ItemsRequest("Superportfolio"); + + private String startingLabel; + + private ItemsRequest(String theLabel) { + this.startingLabel = theLabel; + } + + public String getStartingLabel() { + return this.startingLabel == null ? "Superportfolio" : this.startingLabel; + } +} diff --git a/dcaedt_catalog/service/src/main/resources/log4j.properties b/dcaedt_catalog/service/src/main/resources/log4j.properties new file mode 100644 index 0000000..e732166 --- /dev/null +++ b/dcaedt_catalog/service/src/main/resources/log4j.properties @@ -0,0 +1,15 @@ +# +# +# +# + + +log4j.rootLogger=DEBUG, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5p %l- %m%n + +log4j.logger.org.vertx=INFO +log4j.logger.com.hazelcast=DEBUG +log4j.logger.io.netty=WARN diff --git a/dcaedt_tools/pom.xml b/dcaedt_tools/pom.xml new file mode 100644 index 0000000..50ed8ef --- /dev/null +++ b/dcaedt_tools/pom.xml @@ -0,0 +1,136 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <groupId>org.onap.sdc.dcae.tools</groupId> + <artifactId>dcaedt_tools</artifactId> + <name>DCAE-D Tools</name> + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>dcae_dt_be_main</artifactId> + <version>1806.0.1-SNAPSHOT</version> + </parent> + <build> + <plugins> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>exec-maven-plugin</artifactId> + <version>1.2.1</version> + <executions> + <execution> + <goals> + <goal>java</goal> + </goals> + </execution> + </executions> + <configuration> + <mainClass>tools.Main</mainClass> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>3.0.2</version> + <configuration> + <archive> + <manifest> + <addClasspath>true</addClasspath> + <classpathPrefix>lib/</classpathPrefix> + <mainClass>tools.Main</mainClass> + </manifest> + </archive> + </configuration> + </plugin> + <plugin> + <artifactId>maven-assembly-plugin</artifactId> + <version>3.0.0</version> + <configuration> + <descriptorRefs> + <descriptorRef>jar-with-dependencies</descriptorRef> + </descriptorRefs> + <archive> + <manifest> + <addClasspath>true</addClasspath> + <mainClass>tools.Main</mainClass> + </manifest> + </archive> + </configuration> + <executions> + <execution> + <id>make-assembly</id> <!-- this is used for inheritance merges --> + <phase>package</phase> <!-- bind to the packaging phase --> + <goals> + <goal>single</goal> + </goals> + </execution> + </executions> + </plugin> + </plugins> + </build> + <dependencies> + <dependency> + <groupId>com.google.code.gson</groupId> + <artifactId>gson</artifactId> + <version>2.8.0</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae.property</groupId> + <artifactId>DCAE-DT-PROPERTY</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.powermock</groupId> + <artifactId>powermock-module-junit4</artifactId> + <version>1.6.4</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.powermock</groupId> + <artifactId>powermock-api-mockito</artifactId> + <version>1.6.4</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.testng</groupId> + <artifactId>testng</artifactId> + <version>6.9.10</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpclient</artifactId> + </dependency> + <dependency> + <groupId>commons-logging</groupId> + <artifactId>commons-logging</artifactId> + <version>1.1.1</version> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-core</artifactId> + <version>2.9.2</version> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-annotations</artifactId> + <version>2.9.2</version> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-databind</artifactId> + <version>2.9.2</version> + </dependency> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <version>4.12</version> + </dependency> + </dependencies> + + <distributionManagement> + <site> + <id>onap-site</id> + <url>dav:${onap.nexus.url}${sitePath}</url> + </site> + </distributionManagement> +</project>
\ No newline at end of file diff --git a/dcaedt_tools/src/ReadMe.txt b/dcaedt_tools/src/ReadMe.txt new file mode 100644 index 0000000..4657a81 --- /dev/null +++ b/dcaedt_tools/src/ReadMe.txt @@ -0,0 +1,64 @@ +How to run: +mvn exec:java -Dexec.mainClass=Main -Dexec.args="'environment.json' 'config.json'" + +environment.json example: + +{ + "dcaeBeHost": "http://135.91.225.81", + "dcaeBePort": "8080", + "apiPath": "/dcae" + "userEditor": "admin" +} + +config.json example: + { + "templateInfo": [{ + "name": "SNMP Fault", + "description": "SNMP FM with Map-Supplement-Enrich", + "category": "Template / Base Monitoring Template", + "subCategory":"some subCategory", + "updateIfExist": "true", + "composition": [{ + "type": "Map", + "alias": "mapper" + }, { + "type": "Supplement", + "alias": "sup" + }, { + "type": "Enrich", + "alias": "enrich" + }] + }, + { + "name": "FOI", + "description": "FOI SFTP with FOI-Collector and Docker-Map", + "category": "Template / Base Monitoring Template", + "subCategory":"some subCategory", + "updateIfExist": "true", + "composition": [{ + "type": "FOI Collector", + "alias": "collector" + }, { + "type": "DockerMap", + "alias": "map" + }], + "relation": [{ + "fromComponent": "collector.FOISftp", + "fromRequirement": "stream_publish_0", + "toComponent": "map.topic1", + "toCapability": "topic" + }] + }, + { + "name": "Syslog non-VES Collector", + "description": "Syslog flow with Syslog Collector", + "category": "Template / Base Monitoring Template", + "subCategory":"some subCategory", + "updateIfExist": "true", + "composition": [{ + "type": "Syslog", + "alias": "collector" + }] + } + ] + }
\ No newline at end of file diff --git a/dcaedt_tools/src/main/java/json/Credential.java b/dcaedt_tools/src/main/java/json/Credential.java new file mode 100644 index 0000000..001e7cf --- /dev/null +++ b/dcaedt_tools/src/main/java/json/Credential.java @@ -0,0 +1,62 @@ + +package json; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Credential { + + @SerializedName("editor") + private String Editor; + @SerializedName("firstName") + private String FirstName; + @SerializedName("lastName") + private String LastName; + @SerializedName("password") + private String Password; + @SerializedName("username") + private String Username; + + public String getEditor() { + return Editor; + } + + public void setEditor(String editor) { + Editor = editor; + } + + public String getFirstName() { + return FirstName; + } + + public void setFirstName(String firstName) { + FirstName = firstName; + } + + public String getLastName() { + return LastName; + } + + public void setLastName(String lastName) { + LastName = lastName; + } + + public String getPassword() { + return Password; + } + + public void setPassword(String password) { + Password = password; + } + + public String getUsername() { + return Username; + } + + public void setUsername(String username) { + Username = username; + } + +} diff --git a/dcaedt_tools/src/main/java/json/Credentials.java b/dcaedt_tools/src/main/java/json/Credentials.java new file mode 100644 index 0000000..817391a --- /dev/null +++ b/dcaedt_tools/src/main/java/json/Credentials.java @@ -0,0 +1,23 @@ + +package json; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Credentials { + + @SerializedName("credentials") + private List<Credential> Credentials; + + public List<Credential> getCredentials() { + return Credentials; + } + + public void setCredentials(List<Credential> credentials) { + Credentials = credentials; + } + +} diff --git a/dcaedt_tools/src/main/java/json/Environment.java b/dcaedt_tools/src/main/java/json/Environment.java new file mode 100644 index 0000000..8ce6dfc --- /dev/null +++ b/dcaedt_tools/src/main/java/json/Environment.java @@ -0,0 +1,51 @@ + +package json; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Environment { + + @SerializedName("apiPath") + private String ApiPath; + @SerializedName("dcaeBeHost") + private String DcaeBeHost; + @SerializedName("dcaeBePort") + private String DcaeBePort; + @SerializedName("credential") + private Credential credential; + + public String getApiPath() { + return ApiPath; + } + + public void setApiPath(String apiPath) { + ApiPath = apiPath; + } + + public String getDcaeBeHost() { + return DcaeBeHost; + } + + public void setDcaeBeHost(String dcaeBeHost) { + DcaeBeHost = dcaeBeHost; + } + + public String getDcaeBePort() { + return DcaeBePort; + } + + public void setDcaeBePort(String dcaeBePort) { + DcaeBePort = dcaeBePort; + } + + public Credential getCredential() { + return credential; + } + + public void setCredential(Credential credential) { + this.credential = credential; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ElementsResponse/Data.java b/dcaedt_tools/src/main/java/json/response/ElementsResponse/Data.java new file mode 100644 index 0000000..15876af --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ElementsResponse/Data.java @@ -0,0 +1,23 @@ + +package json.response.ElementsResponse; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Data { + + @SerializedName("elements") + private List<Element> mElements; + + public List<Element> getElements() { + return mElements; + } + + public void setElements(List<Element> elements) { + mElements = elements; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ElementsResponse/Element.java b/dcaedt_tools/src/main/java/json/response/ElementsResponse/Element.java new file mode 100644 index 0000000..6402061 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ElementsResponse/Element.java @@ -0,0 +1,73 @@ + +package json.response.ElementsResponse; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Element { + + @SerializedName("catalog") + private String mCatalog; + @SerializedName("catalogId") + private Long mCatalogId; + @SerializedName("id") + private Long mId; + @SerializedName("itemId") + private String mItemId; + @SerializedName("labels") + private List<String> mLabels; + @SerializedName("name") + private String mName; + + public String getCatalog() { + return mCatalog; + } + + public void setCatalog(String catalog) { + mCatalog = catalog; + } + + public Long getCatalogId() { + return mCatalogId; + } + + public void setCatalogId(Long catalogId) { + mCatalogId = catalogId; + } + + public Long getId() { + return mId; + } + + public void setId(Long id) { + mId = id; + } + + public String getItemId() { + return mItemId; + } + + public void setItemId(String itemId) { + mItemId = itemId; + } + + public List<String> getLabels() { + return mLabels; + } + + public void setLabels(List<String> labels) { + mLabels = labels; + } + + public String getName() { + return mName; + } + + public void setName(String name) { + mName = name; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ElementsResponse/ElementsResponse.java b/dcaedt_tools/src/main/java/json/response/ElementsResponse/ElementsResponse.java new file mode 100644 index 0000000..02dcdf2 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ElementsResponse/ElementsResponse.java @@ -0,0 +1,53 @@ + +package json.response.ElementsResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; +import json.response.Error; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class ElementsResponse { + + @SerializedName("data") + private Data mData; + @SerializedName("error") + private Error mError; + @SerializedName("id") + private Object mId; + @SerializedName("timestamp") + private Long mTimestamp; + + public Data getData() { + return mData; + } + + public void setData(Data data) { + mData = data; + } + + public Error getError() { + return mError; + } + + public void setError(Error error) { + mError = error; + } + + public Object getId() { + return mId; + } + + public void setId(Object id) { + mId = id; + } + + public Long getTimestamp() { + return mTimestamp; + } + + public void setTimestamp(Long timestamp) { + mTimestamp = timestamp; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/Error.java b/dcaedt_tools/src/main/java/json/response/Error.java new file mode 100644 index 0000000..bf68ac1 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/Error.java @@ -0,0 +1,11 @@ + +package json.response; + +import javax.annotation.Generated; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Error { + + +} diff --git a/dcaedt_tools/src/main/java/json/response/ItemsResponse/Artifact.java b/dcaedt_tools/src/main/java/json/response/ItemsResponse/Artifact.java new file mode 100644 index 0000000..c8cf275 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ItemsResponse/Artifact.java @@ -0,0 +1,102 @@ + +package json.response.ItemsResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Artifact { + + @SerializedName("artifactChecksum") + private String mArtifactChecksum; + @SerializedName("artifactDescription") + private String mArtifactDescription; + @SerializedName("artifactGroupType") + private String mArtifactGroupType; + @SerializedName("artifactLabel") + private String mArtifactLabel; + @SerializedName("artifactName") + private String mArtifactName; + @SerializedName("artifactType") + private String mArtifactType; + @SerializedName("artifactURL") + private String mArtifactURL; + @SerializedName("artifactUUID") + private String mArtifactUUID; + @SerializedName("artifactVersion") + private String mArtifactVersion; + + public String getArtifactChecksum() { + return mArtifactChecksum; + } + + public void setArtifactChecksum(String artifactChecksum) { + mArtifactChecksum = artifactChecksum; + } + + public String getArtifactDescription() { + return mArtifactDescription; + } + + public void setArtifactDescription(String artifactDescription) { + mArtifactDescription = artifactDescription; + } + + public String getArtifactGroupType() { + return mArtifactGroupType; + } + + public void setArtifactGroupType(String artifactGroupType) { + mArtifactGroupType = artifactGroupType; + } + + public String getArtifactLabel() { + return mArtifactLabel; + } + + public void setArtifactLabel(String artifactLabel) { + mArtifactLabel = artifactLabel; + } + + public String getArtifactName() { + return mArtifactName; + } + + public void setArtifactName(String artifactName) { + mArtifactName = artifactName; + } + + public String getArtifactType() { + return mArtifactType; + } + + public void setArtifactType(String artifactType) { + mArtifactType = artifactType; + } + + public String getArtifactURL() { + return mArtifactURL; + } + + public void setArtifactURL(String artifactURL) { + mArtifactURL = artifactURL; + } + + public String getArtifactUUID() { + return mArtifactUUID; + } + + public void setArtifactUUID(String artifactUUID) { + mArtifactUUID = artifactUUID; + } + + public String getArtifactVersion() { + return mArtifactVersion; + } + + public void setArtifactVersion(String artifactVersion) { + mArtifactVersion = artifactVersion; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ItemsResponse/Data.java b/dcaedt_tools/src/main/java/json/response/ItemsResponse/Data.java new file mode 100644 index 0000000..36fcb09 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ItemsResponse/Data.java @@ -0,0 +1,22 @@ + +package json.response.ItemsResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Data { + + @SerializedName("element") + private Element mElement; + + public Element getElement() { + return mElement; + } + + public void setElement(Element element) { + mElement = element; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ItemsResponse/Element.java b/dcaedt_tools/src/main/java/json/response/ItemsResponse/Element.java new file mode 100644 index 0000000..5d56a25 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ItemsResponse/Element.java @@ -0,0 +1,53 @@ + +package json.response.ItemsResponse; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Element { + + @SerializedName("id") + private Long mId; + @SerializedName("itemId") + private String mItemId; + @SerializedName("items") + private List<Item> mItems; + @SerializedName("name") + private String mName; + + public Long getId() { + return mId; + } + + public void setId(Long id) { + mId = id; + } + + public String getItemId() { + return mItemId; + } + + public void setItemId(String itemId) { + mItemId = itemId; + } + + public List<Item> getItems() { + return mItems; + } + + public void setItems(List<Item> items) { + mItems = items; + } + + public String getName() { + return mName; + } + + public void setName(String name) { + mName = name; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ItemsResponse/Item.java b/dcaedt_tools/src/main/java/json/response/ItemsResponse/Item.java new file mode 100644 index 0000000..6f0e518 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ItemsResponse/Item.java @@ -0,0 +1,203 @@ + +package json.response.ItemsResponse; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Item { + + @SerializedName("artifacts") + private List<Artifact> mArtifacts; + @SerializedName("catalog") + private String mCatalog; + @SerializedName("catalogId") + private Long mCatalogId; + @SerializedName("category") + private String mCategory; + @SerializedName("description") + private String mDescription; + @SerializedName("id") + private Long mId; + @SerializedName("invariantUUID") + private String mInvariantUUID; + @SerializedName("itemId") + private String mItemId; + @SerializedName("lastUpdaterFullName") + private String mLastUpdaterFullName; + @SerializedName("lastUpdaterUserId") + private String mLastUpdaterUserId; + @SerializedName("lifecycleState") + private String mLifecycleState; + @SerializedName("models") + private List<Model> mModels; + @SerializedName("name") + private String mName; + @SerializedName("resourceType") + private String mResourceType; + @SerializedName("subCategory") + private String mSubCategory; + @SerializedName("toscaModelURL") + private String mToscaModelURL; + @SerializedName("toscaResourceName") + private String mToscaResourceName; + @SerializedName("uuid") + private String mUuid; + @SerializedName("version") + private String mVersion; + + public List<Artifact> getArtifacts() { + return mArtifacts; + } + + public void setArtifacts(List<Artifact> artifacts) { + mArtifacts = artifacts; + } + + public String getCatalog() { + return mCatalog; + } + + public void setCatalog(String catalog) { + mCatalog = catalog; + } + + public Long getCatalogId() { + return mCatalogId; + } + + public void setCatalogId(Long catalogId) { + mCatalogId = catalogId; + } + + public String getCategory() { + return mCategory; + } + + public void setCategory(String category) { + mCategory = category; + } + + public String getDescription() { + return mDescription; + } + + public void setDescription(String description) { + mDescription = description; + } + + public Long getId() { + return mId; + } + + public void setId(Long id) { + mId = id; + } + + public String getInvariantUUID() { + return mInvariantUUID; + } + + public void setInvariantUUID(String invariantUUID) { + mInvariantUUID = invariantUUID; + } + + public String getItemId() { + return mItemId; + } + + public void setItemId(String itemId) { + mItemId = itemId; + } + + public String getLastUpdaterFullName() { + return mLastUpdaterFullName; + } + + public void setLastUpdaterFullName(String lastUpdaterFullName) { + mLastUpdaterFullName = lastUpdaterFullName; + } + + public String getLastUpdaterUserId() { + return mLastUpdaterUserId; + } + + public void setLastUpdaterUserId(String lastUpdaterUserId) { + mLastUpdaterUserId = lastUpdaterUserId; + } + + public String getLifecycleState() { + return mLifecycleState; + } + + public void setLifecycleState(String lifecycleState) { + mLifecycleState = lifecycleState; + } + + public List<Model> getModels() { + return mModels; + } + + public void setModels(List<Model> models) { + mModels = models; + } + + public String getName() { + return mName; + } + + public void setName(String name) { + mName = name; + } + + public String getResourceType() { + return mResourceType; + } + + public void setResourceType(String resourceType) { + mResourceType = resourceType; + } + + public String getSubCategory() { + return mSubCategory; + } + + public void setSubCategory(String subCategory) { + mSubCategory = subCategory; + } + + public String getToscaModelURL() { + return mToscaModelURL; + } + + public void setToscaModelURL(String toscaModelURL) { + mToscaModelURL = toscaModelURL; + } + + public String getToscaResourceName() { + return mToscaResourceName; + } + + public void setToscaResourceName(String toscaResourceName) { + mToscaResourceName = toscaResourceName; + } + + public String getUuid() { + return mUuid; + } + + public void setUuid(String uuid) { + mUuid = uuid; + } + + public String getVersion() { + return mVersion; + } + + public void setVersion(String version) { + mVersion = version; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ItemsResponse/ItemsResponse.java b/dcaedt_tools/src/main/java/json/response/ItemsResponse/ItemsResponse.java new file mode 100644 index 0000000..5b64b59 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ItemsResponse/ItemsResponse.java @@ -0,0 +1,53 @@ + +package json.response.ItemsResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; +import json.response.Error; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class ItemsResponse { + + @SerializedName("data") + private Data mData; + @SerializedName("error") + private Error mError; + @SerializedName("id") + private Object mId; + @SerializedName("timestamp") + private Long mTimestamp; + + public Data getData() { + return mData; + } + + public void setData(Data data) { + mData = data; + } + + public Error getError() { + return mError; + } + + public void setError(Error error) { + mError = error; + } + + public Object getId() { + return mId; + } + + public void setId(Object id) { + mId = id; + } + + public Long getTimestamp() { + return mTimestamp; + } + + public void setTimestamp(Long timestamp) { + mTimestamp = timestamp; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ItemsResponse/Model.java b/dcaedt_tools/src/main/java/json/response/ItemsResponse/Model.java new file mode 100644 index 0000000..036b16b --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ItemsResponse/Model.java @@ -0,0 +1,62 @@ + +package json.response.ItemsResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Model { + + @SerializedName("description") + private String mDescription; + @SerializedName("id") + private String mId; + @SerializedName("itemId") + private String mItemId; + @SerializedName("name") + private String mName; + @SerializedName("version") + private String mVersion; + + public String getDescription() { + return mDescription; + } + + public void setDescription(String description) { + mDescription = description; + } + + public String getId() { + return mId; + } + + public void setId(String id) { + mId = id; + } + + public String getItemId() { + return mItemId; + } + + public void setItemId(String itemId) { + mItemId = itemId; + } + + public String getName() { + return mName; + } + + public void setName(String name) { + mName = name; + } + + public String getVersion() { + return mVersion; + } + + public void setVersion(String version) { + mVersion = version; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/Assignment.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/Assignment.java new file mode 100644 index 0000000..40d8e19 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/Assignment.java @@ -0,0 +1,22 @@ + +package json.response.ModelResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Assignment { + + @SerializedName("value") + private String mValue; + + public String getValue() { + return mValue; + } + + public void setValue(String value) { + mValue = value; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/Capability.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/Capability.java new file mode 100644 index 0000000..ced2415 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/Capability.java @@ -0,0 +1,63 @@ + +package json.response.ModelResponse; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Capability { + + @SerializedName("dcae.capabilities.stream.subscribe") + private DcaeCapabilitiesStreamSubscribe mDcaeCapabilitiesStreamSubscribe; + @SerializedName("id") + private String mId; + @SerializedName("name") + private String mName; + @SerializedName("properties") + private List<Property> mProperties; + @SerializedName("type") + private Type mType; + + public DcaeCapabilitiesStreamSubscribe getDcaeCapabilitiesStreamSubscribe() { + return mDcaeCapabilitiesStreamSubscribe; + } + + public void setDcaeCapabilitiesStreamSubscribe(DcaeCapabilitiesStreamSubscribe dcaeCapabilitiesStreamSubscribe) { + mDcaeCapabilitiesStreamSubscribe = dcaeCapabilitiesStreamSubscribe; + } + + public String getId() { + return mId; + } + + public void setId(String id) { + mId = id; + } + + public String getName() { + return mName; + } + + public void setName(String name) { + mName = name; + } + + public List<Property> getProperties() { + return mProperties; + } + + public void setProperties(List<Property> properties) { + mProperties = properties; + } + + public Type getType() { + return mType; + } + + public void setType(Type type) { + mType = type; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/Data.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/Data.java new file mode 100644 index 0000000..26ea28f --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/Data.java @@ -0,0 +1,22 @@ + +package json.response.ModelResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Data { + + @SerializedName("model") + private Model mModel; + + public Model getModel() { + return mModel; + } + + public void setModel(Model model) { + mModel = model; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/DcaeCapabilitiesStreamSubscribe.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/DcaeCapabilitiesStreamSubscribe.java new file mode 100644 index 0000000..e244f52 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/DcaeCapabilitiesStreamSubscribe.java @@ -0,0 +1,23 @@ + +package json.response.ModelResponse; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class DcaeCapabilitiesStreamSubscribe { + + @SerializedName("properties") + private List<Property> mProperties; + + public List<Property> getProperties() { + return mProperties; + } + + public void setProperties(List<Property> properties) { + mProperties = properties; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/Format.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/Format.java new file mode 100644 index 0000000..37ef8ec --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/Format.java @@ -0,0 +1,22 @@ + +package json.response.ModelResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Format { + + @SerializedName("equal") + private String mEqual; + + public String getEqual() { + return mEqual; + } + + public void setEqual(String equal) { + mEqual = equal; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/Model.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/Model.java new file mode 100644 index 0000000..2b94678 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/Model.java @@ -0,0 +1,73 @@ + +package json.response.ModelResponse; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Model { + + @SerializedName("catalog") + private String mCatalog; + @SerializedName("catalogId") + private Long mCatalogId; + @SerializedName("id") + private Long mId; + @SerializedName("itemId") + private String mItemId; + @SerializedName("name") + private String mName; + @SerializedName("nodes") + private List<Node> mNodes; + + public String getCatalog() { + return mCatalog; + } + + public void setCatalog(String catalog) { + mCatalog = catalog; + } + + public Long getCatalogId() { + return mCatalogId; + } + + public void setCatalogId(Long catalogId) { + mCatalogId = catalogId; + } + + public Long getId() { + return mId; + } + + public void setId(Long id) { + mId = id; + } + + public String getItemId() { + return mItemId; + } + + public void setItemId(String itemId) { + mItemId = itemId; + } + + public String getName() { + return mName; + } + + public void setName(String name) { + mName = name; + } + + public List<Node> getNodes() { + return mNodes; + } + + public void setNodes(List<Node> nodes) { + mNodes = nodes; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/ModelResponse.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/ModelResponse.java new file mode 100644 index 0000000..fbe995a --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/ModelResponse.java @@ -0,0 +1,52 @@ + +package json.response.ModelResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class ModelResponse { + + @SerializedName("data") + private Data mData; + @SerializedName("error") + private Error mError; + @SerializedName("id") + private Object mId; + @SerializedName("timestamp") + private Long mTimestamp; + + public Data getData() { + return mData; + } + + public void setData(Data data) { + mData = data; + } + + public Error getError() { + return mError; + } + + public void setError(Error error) { + mError = error; + } + + public Object getId() { + return mId; + } + + public void setId(Object id) { + mId = id; + } + + public Long getTimestamp() { + return mTimestamp; + } + + public void setTimestamp(Long timestamp) { + mTimestamp = timestamp; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/Node.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/Node.java new file mode 100644 index 0000000..63576e4 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/Node.java @@ -0,0 +1,73 @@ + +package json.response.ModelResponse; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Node { + + @SerializedName("capabilities") + private List<Capability> mCapabilities; + @SerializedName("description") + private String mDescription; + @SerializedName("name") + private String mName; + @SerializedName("properties") + private List<Property> mProperties; + @SerializedName("requirements") + private List<Requirement> mRequirements; + @SerializedName("type") + private String mType; + + public List<Capability> getCapabilities() { + return mCapabilities; + } + + public void setCapabilities(List<Capability> capabilities) { + mCapabilities = capabilities; + } + + public String getDescription() { + return mDescription; + } + + public void setDescription(String description) { + mDescription = description; + } + + public String getName() { + return mName; + } + + public void setName(String name) { + mName = name; + } + + public List<Property> getProperties() { + return mProperties; + } + + public void setProperties(List<Property> properties) { + mProperties = properties; + } + + public List<Requirement> getRequirements() { + return mRequirements; + } + + public void setRequirements(List<Requirement> requirements) { + mRequirements = requirements; + } + + public String getType() { + return mType; + } + + public void setType(String type) { + mType = type; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/NodeFilter.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/NodeFilter.java new file mode 100644 index 0000000..11dfaff --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/NodeFilter.java @@ -0,0 +1,23 @@ + +package json.response.ModelResponse; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class NodeFilter { + + @SerializedName("capabilities") + private List<Capability> mCapabilities; + + public List<Capability> getCapabilities() { + return mCapabilities; + } + + public void setCapabilities(List<Capability> capabilities) { + mCapabilities = capabilities; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/Property.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/Property.java new file mode 100644 index 0000000..4edb3e7 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/Property.java @@ -0,0 +1,53 @@ + +package json.response.ModelResponse; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Property { + + @SerializedName("assignment") + private Assignment mAssignment; + @SerializedName("format") + private List<Format> mFormat; + @SerializedName("name") + private String mName; + @SerializedName("type") + private String mType; + + public Assignment getAssignment() { + return mAssignment; + } + + public void setAssignment(Assignment assignment) { + mAssignment = assignment; + } + + public List<Format> getFormat() { + return mFormat; + } + + public void setFormat(List<Format> format) { + mFormat = format; + } + + public String getName() { + return mName; + } + + public void setName(String name) { + mName = name; + } + + public String getType() { + return mType; + } + + public void setType(String type) { + mType = type; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/Relationship.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/Relationship.java new file mode 100644 index 0000000..78b3e0d --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/Relationship.java @@ -0,0 +1,22 @@ + +package json.response.ModelResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Relationship { + + @SerializedName("type") + private String mType; + + public String getType() { + return mType; + } + + public void setType(String type) { + mType = type; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/Requirement.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/Requirement.java new file mode 100644 index 0000000..29a89cb --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/Requirement.java @@ -0,0 +1,52 @@ + +package json.response.ModelResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Requirement { + + @SerializedName("capability") + private Capability mCapability; + @SerializedName("name") + private String mName; + @SerializedName("node_filter") + private NodeFilter mNodeFilter; + @SerializedName("relationship") + private Relationship mRelationship; + + public Capability getCapability() { + return mCapability; + } + + public void setCapability(Capability capability) { + mCapability = capability; + } + + public String getName() { + return mName; + } + + public void setName(String name) { + mName = name; + } + + public NodeFilter getNodeFilter() { + return mNodeFilter; + } + + public void setNodeFilter(NodeFilter nodeFilter) { + mNodeFilter = nodeFilter; + } + + public Relationship getRelationship() { + return mRelationship; + } + + public void setRelationship(Relationship relationship) { + mRelationship = relationship; + } + +} diff --git a/dcaedt_tools/src/main/java/json/response/ModelResponse/Type.java b/dcaedt_tools/src/main/java/json/response/ModelResponse/Type.java new file mode 100644 index 0000000..4e54518 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/response/ModelResponse/Type.java @@ -0,0 +1,32 @@ + +package json.response.ModelResponse; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Type { + + @SerializedName("id") + private String mId; + @SerializedName("name") + private String mName; + + public String getId() { + return mId; + } + + public void setId(String id) { + mId = id; + } + + public String getName() { + return mName; + } + + public void setName(String name) { + mName = name; + } + +} diff --git a/dcaedt_tools/src/main/java/json/templateInfo/Composition.java b/dcaedt_tools/src/main/java/json/templateInfo/Composition.java new file mode 100644 index 0000000..b99ad75 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/templateInfo/Composition.java @@ -0,0 +1,32 @@ + +package json.templateInfo; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Composition { + + @SerializedName("alias") + private String Alias; + @SerializedName("type") + private String Type; + + public String getAlias() { + return Alias; + } + + public void setAlias(String alias) { + Alias = alias; + } + + public String getType() { + return Type; + } + + public void setType(String type) { + Type = type; + } + +} diff --git a/dcaedt_tools/src/main/java/json/templateInfo/DeployTemplateConfig.java b/dcaedt_tools/src/main/java/json/templateInfo/DeployTemplateConfig.java new file mode 100644 index 0000000..303418d --- /dev/null +++ b/dcaedt_tools/src/main/java/json/templateInfo/DeployTemplateConfig.java @@ -0,0 +1,22 @@ +package json.templateInfo; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class DeployTemplateConfig { + + @SerializedName("templateInfo") + private List<TemplateInfo> TemplateInfo; + + public List<TemplateInfo> getTemplateInfo() { + return TemplateInfo; + } + + public void setTemplateInfo(List<TemplateInfo> templateInfo) { + TemplateInfo = templateInfo; + } + +} diff --git a/dcaedt_tools/src/main/java/json/templateInfo/Relation.java b/dcaedt_tools/src/main/java/json/templateInfo/Relation.java new file mode 100644 index 0000000..cbe7cb9 --- /dev/null +++ b/dcaedt_tools/src/main/java/json/templateInfo/Relation.java @@ -0,0 +1,50 @@ + +package json.templateInfo; + +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class Relation { + @SerializedName("fromComponent") + private String fromComponent; + @SerializedName("fromRequirement") + private String fromRequirement; + @SerializedName("toComponent") + private String toComponent; + @SerializedName("toCapability") + private String toCapability; + + public String getFromComponent() { + return fromComponent; + } + + public void setFromComponent(String fromComponent) { + this.fromComponent = fromComponent; + } + + public String getFromRequirement() { + return fromRequirement; + } + + public void setFromRequirement(String fromRequirement) { + this.fromRequirement = fromRequirement; + } + + public String getToComponent() { + return toComponent; + } + + public void setToComponent(String toComponent) { + this.toComponent = toComponent; + } + + public String getToCapability() { + return toCapability; + } + + public void setToCapability(String toCapability) { + this.toCapability = toCapability; + } +} diff --git a/dcaedt_tools/src/main/java/json/templateInfo/TemplateInfo.java b/dcaedt_tools/src/main/java/json/templateInfo/TemplateInfo.java new file mode 100644 index 0000000..c34aaef --- /dev/null +++ b/dcaedt_tools/src/main/java/json/templateInfo/TemplateInfo.java @@ -0,0 +1,83 @@ + +package json.templateInfo; + +import java.util.List; +import javax.annotation.Generated; +import com.google.gson.annotations.SerializedName; + +@Generated("net.hexar.json2pojo") +@SuppressWarnings("unused") +public class TemplateInfo { + + @SerializedName("category") + private String Category; + @SerializedName("composition") + private List<json.templateInfo.Composition> Composition; + @SerializedName("description") + private String Description; + @SerializedName("name") + private String Name; + @SerializedName("relations") + private List<Relation> Relations; + @SerializedName("subCategory") + private String SubCategory; + @SerializedName("updateIfExist") + private Boolean UpdateIfExist; + + public String getCategory() { + return Category; + } + + public void setCategory(String category) { + Category = category; + } + + public List<json.templateInfo.Composition> getComposition() { + return Composition; + } + + public void setComposition(List<json.templateInfo.Composition> composition) { + Composition = composition; + } + + public String getDescription() { + return Description; + } + + public void setDescription(String description) { + Description = description; + } + + public String getName() { + return Name; + } + + public void setName(String name) { + Name = name; + } + + public List<Relation> getRelations() { + return Relations; + } + + public void setRelations(List<Relation> relations) { + Relations = relations; + } + + public String getSubCategory() { + return SubCategory; + } + + public void setSubCategory(String subCategory) { + SubCategory = subCategory; + } + + public Boolean getUpdateIfExist() { + return UpdateIfExist; + } + + public void setUpdateIfExist(Boolean updateIfExist) { + UpdateIfExist = updateIfExist; + } + +} diff --git a/dcaedt_tools/src/main/java/tools/DeployTemplate.java b/dcaedt_tools/src/main/java/tools/DeployTemplate.java new file mode 100644 index 0000000..d5c368f --- /dev/null +++ b/dcaedt_tools/src/main/java/tools/DeployTemplate.java @@ -0,0 +1,157 @@ +package tools; +import com.google.gson.JsonObject; +import json.templateInfo.TemplateInfo; +import org.onap.sdc.dcae.composition.restmodels.CreateVFCMTRequest; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.onap.sdc.dcae.composition.util.DcaeBeConstants; +import org.springframework.web.client.HttpServerErrorException; +import utilities.IDcaeRestClient; +import utilities.IReport; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + + +public class DeployTemplate { + private static final String FAILED_UPDATE_VFCMT = "Failed update vfcmt: "; + private static final String FAILED = "failed"; + private final IReport report; + private final IDcaeRestClient dcaeRestClient; + private LoggerError errLogger = LoggerError.getInstance(); + private LoggerDebug debugLogger = LoggerDebug.getInstance(); + + DeployTemplate(IReport report, IDcaeRestClient dcaeRestClient) { + + this.report = report; + this.dcaeRestClient = dcaeRestClient; + } + + public void deploy(Map<TemplateInfo, JsonObject> templateInfoToJsonObjectMap) { + List<ResourceDetailed> vfcmtList = dcaeRestClient.getAllVfcmts(); + + List<TemplateInfo> updatedTemplateInfos = new ArrayList<>(); + vfcmtList.stream().forEach(vfcmt -> + templateInfoToJsonObjectMap.keySet().stream().filter(templateInfo -> templateInfo.getName().equalsIgnoreCase(vfcmt.getName())).forEach(templateInfo -> { + update(vfcmt, templateInfo, templateInfoToJsonObjectMap.get(templateInfo)); + updatedTemplateInfos.add(templateInfo); + })); + templateInfoToJsonObjectMap.keySet().stream() + .filter(templateInfo -> !updatedTemplateInfos.contains(templateInfo)) + .forEach(templateInfo -> createNew(templateInfo, templateInfoToJsonObjectMap.get(templateInfo))); + + verify(templateInfoToJsonObjectMap); + } + + private void verify(Map<TemplateInfo, JsonObject> templateInfoToJsonObjectMap) { + AtomicInteger foundCount = new AtomicInteger(); + debugLogger.log("Starting verify deployment"); + List<ResourceDetailed> vfcmtList = dcaeRestClient.getAllVfcmts(); + + templateInfoToJsonObjectMap.keySet().stream() + .forEach(templateInfo -> vfcmtList.stream() + .filter(vfcmt -> vfcmt.getName().equalsIgnoreCase(templateInfo.getName())) + .forEach(vfcmt -> foundCount.getAndIncrement())); + if (foundCount.get() == templateInfoToJsonObjectMap.size()) { + debugLogger.log("Deployment verify finished successfully"); + } else { + errLogger.log("Deployment verify finished successfully"); + String msg = "Deployment verify finished with errors, found only: " + + foundCount.get() + " of " + templateInfoToJsonObjectMap.size() + " vfcmts"; + report.addErrorMessage(msg); + errLogger.log(msg); + } + } + + private void createNew(TemplateInfo templateInfo, JsonObject jsonObject) { + try { + CreateVFCMTRequest createVFCMTRequest = new CreateVFCMTRequest(); + createVFCMTRequest.setName(templateInfo.getName()); + createVFCMTRequest.setDescription(templateInfo.getDescription()); + createVFCMTRequest.setSubcategory(templateInfo.getSubCategory()); + createVFCMTRequest.setCategory(templateInfo.getCategory()); + ResourceDetailed vfcmt = dcaeRestClient.createResource(createVFCMTRequest); + + jsonObject.addProperty("cid", vfcmt.getUuid()); + + saveAndCertify(jsonObject, vfcmt); + + } catch (HttpServerErrorException e) { + String msg = FAILED_UPDATE_VFCMT + templateInfo.getName() + ", With general message: " + e.getMessage(); + report.addErrorMessage(msg); + errLogger.log(msg + " " + e); + } + } + + private void update(ResourceDetailed vfcmt, TemplateInfo templateInfo, JsonObject jsonObject) { + ResourceDetailed checkedoutVfcmt = vfcmt; + try { + Boolean checkoutChecking = checkUserIfResourceCheckedOut(dcaeRestClient.getUserId(), vfcmt); + if (checkoutChecking != null && checkoutChecking) { + report.addErrorMessage(FAILED_UPDATE_VFCMT + vfcmt.getName() + ", cannot checkout vfcmt"); + return; + } + if (templateInfo.getUpdateIfExist()) { + if (checkoutChecking == null) { + checkedoutVfcmt = dcaeRestClient.checkoutVfcmt(vfcmt.getUuid()); + } + if (checkedoutVfcmt != null) { + checkedoutVfcmt.setSubCategory(templateInfo.getSubCategory()); + checkedoutVfcmt.setCategory(templateInfo.getCategory()); + checkedoutVfcmt.setDescription(templateInfo.getDescription()); + dcaeRestClient.updateResource(checkedoutVfcmt); + saveAndCertify(jsonObject, checkedoutVfcmt); + } + } else { + report.addNotUpdatedMessage("vfcmt: " + vfcmt.getName() + " found, but didn't update."); + } + } catch (HttpServerErrorException e) { + String msg = FAILED_UPDATE_VFCMT + vfcmt.getName() + ", With general message: " + e.getMessage(); + report.addErrorMessage(msg); + errLogger.log( msg + " " + e); + } + } + + private void saveAndCertify(JsonObject jsonObject, ResourceDetailed checkedoutVfcmt) { + if (saveCompositionAndCertify(checkedoutVfcmt, jsonObject)) { + report.addUpdatedMessage("vfcmt: " + checkedoutVfcmt.getName() + " updated successfully"); + } else { + report.addErrorMessage("VFCMT " + checkedoutVfcmt.getName() + " failed to update"); + } + } + + private boolean saveCompositionAndCertify(ResourceDetailed vfcmt, JsonObject jsonObject) { + if (vfcmt.getUuid() == null) { + return false; + } + + debugLogger.log("Saving cdump of: " + vfcmt.getName() + " vfcmt"); + debugLogger.log(jsonObject.toString()); + + String responseEntity = dcaeRestClient.saveComposition(vfcmt.getUuid(), jsonObject.toString()); + if (responseEntity.equalsIgnoreCase(FAILED)) { + String msg = "Failed saving vfcmt: " + vfcmt.getName(); + report.addErrorMessage(msg); + errLogger.log(msg); + return false; + } + dcaeRestClient.certifyVfcmt(vfcmt.getUuid()); + return true; + } + + private Boolean checkUserIfResourceCheckedOut(String userId, ResourceDetailed asset) { + if (DcaeBeConstants.LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT == DcaeBeConstants.LifecycleStateEnum.findState(asset.getLifecycleState())) { + String lastUpdaterUserId = asset.getLastUpdaterUserId(); + if (lastUpdaterUserId != null && !lastUpdaterUserId.equals(userId)) { + String msg = "User conflicts. Operation not allowed for user "+userId+" on resource checked out by "+lastUpdaterUserId; + report.addErrorMessage(msg); + errLogger.log(msg); + return true; + } else { + return false; + } + } + return null; + } +} diff --git a/dcaedt_tools/src/main/java/tools/EntitiesRetriever.java b/dcaedt_tools/src/main/java/tools/EntitiesRetriever.java new file mode 100644 index 0000000..0c31ddf --- /dev/null +++ b/dcaedt_tools/src/main/java/tools/EntitiesRetriever.java @@ -0,0 +1,33 @@ +package tools; + +import json.response.ElementsResponse.Element; +import json.response.ItemsResponse.Item; +import utilities.IDcaeRestClient; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + + +public class EntitiesRetriever { + private final IDcaeRestClient dcaeRestClient; + + EntitiesRetriever(IDcaeRestClient dcaeRestClient) { + + this.dcaeRestClient = dcaeRestClient; + } + + public Map<String, List<Item>> getElementsByFolder() { + List<Element> elementList = dcaeRestClient.getElements(); + Map<String, List<Item>> elementsByFolderNames = new HashMap<>(); + + for (Element element : elementList) { + List<Item> items = dcaeRestClient.getItem(element.getName()); + if (items == null) { + continue; + } + elementsByFolderNames.put(element.getName(), items); + } + return elementsByFolderNames; + } +} diff --git a/dcaedt_tools/src/main/java/tools/LoggerDebug.java b/dcaedt_tools/src/main/java/tools/LoggerDebug.java new file mode 100644 index 0000000..df177ed --- /dev/null +++ b/dcaedt_tools/src/main/java/tools/LoggerDebug.java @@ -0,0 +1,13 @@ +package tools; + +public class LoggerDebug { + private static LoggerDebug instance = new LoggerDebug(); + + public static LoggerDebug getInstance() { + return instance; + } + + public void log(String logLine) { + System.out.println(logLine); + } +} diff --git a/dcaedt_tools/src/main/java/tools/LoggerError.java b/dcaedt_tools/src/main/java/tools/LoggerError.java new file mode 100644 index 0000000..325bfc1 --- /dev/null +++ b/dcaedt_tools/src/main/java/tools/LoggerError.java @@ -0,0 +1,13 @@ +package tools; + +public class LoggerError { + private static LoggerError instance = new LoggerError(); + + public static LoggerError getInstance() { + return instance; + } + + public void log(String logLine) { + System.err.println(logLine); + } +} diff --git a/dcaedt_tools/src/main/java/tools/Main.java b/dcaedt_tools/src/main/java/tools/Main.java new file mode 100644 index 0000000..72b6e42 --- /dev/null +++ b/dcaedt_tools/src/main/java/tools/Main.java @@ -0,0 +1,80 @@ +package tools; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import com.google.gson.JsonObject; +import json.Environment; +import json.response.ItemsResponse.Item; +import json.templateInfo.DeployTemplateConfig; +import json.templateInfo.TemplateInfo; + +import utilities.IDcaeRestClient; +import utilities.IReport; +import utilities.Report; +import utilities.DcaeRestClient; + +import java.io.*; +import java.net.ConnectException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +public class Main { + private static final String CONFIG_FILE = "DcaeDtDeployConfigFile"; + private static final String ENVIRONMENT_CONFIG = "environment.resource"; + + private static LoggerError errLogger = LoggerError.getInstance(); + private static LoggerDebug debugLogger = LoggerDebug.getInstance(); + + private Main() { + throw new IllegalAccessError("Utility class"); + } + + public static void main(String[] args) { + System.setProperty("logback.configurationFile", "conf/logback.xml"); + debugLogger.log("Starting VFCMT template deployment"); + if (args.length != 2) { + errLogger.log("Got " + args.length + ", but expecting exactly 2 arguments ONLY!"); + return; + } + debugLogger.log("Arguments:"); + Arrays.stream(args).forEach(arg -> debugLogger.log(arg)); + + initConfiguration(args); + IReport report = new Report(); + try { + ObjectMapper mapper = new ObjectMapper(); + DeployTemplateConfig deployTemplateConfig = mapper.readValue(new File(System.getProperty(CONFIG_FILE, "conf/config.json")), DeployTemplateConfig.class); + Environment environment = mapper.readValue(new File(System.getProperty(ENVIRONMENT_CONFIG, "conf/environment.json")), Environment.class); + + IDcaeRestClient dcaeRestClient = new DcaeRestClient(environment.getCredential()); + dcaeRestClient.init(environment); + + EntitiesRetriever entitiesRetriever = new EntitiesRetriever(dcaeRestClient); + Map<String, List<Item>> elementsByFolderNames = entitiesRetriever.getElementsByFolder(); + + TemplateContainer templateContainer = new TemplateContainer(report, dcaeRestClient, deployTemplateConfig.getTemplateInfo(), elementsByFolderNames); + Map<TemplateInfo, JsonObject> templateInfoToJsonObjectMap = templateContainer.getCdumps(); + + DeployTemplate deployTemplate = new DeployTemplate(report, dcaeRestClient); + deployTemplate.deploy(templateInfoToJsonObjectMap); + + debugLogger.log( "VFCMT template deployment completed successfully"); + } catch (RuntimeException e) { + errLogger.log("ERROR - Template deployment failed with error " + e); + } catch (ConnectException e) { + errLogger.log( "ERROR - Failed connection to server, are you on AT&T network? {}" + e); + } catch (IOException e) { + errLogger.log( "ERROR - Fatal Error! " + e); + } finally { + debugLogger.log(report.toString()); + } + } + + private static void initConfiguration(String[] args) { + System.setProperty(ENVIRONMENT_CONFIG, args[0]); + System.setProperty(CONFIG_FILE, args[1]); + } + + +} diff --git a/dcaedt_tools/src/main/java/tools/NodeData.java b/dcaedt_tools/src/main/java/tools/NodeData.java new file mode 100644 index 0000000..f89105b --- /dev/null +++ b/dcaedt_tools/src/main/java/tools/NodeData.java @@ -0,0 +1,40 @@ +package tools; + +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; + +public class NodeData { + private final JsonArray capabilities; + private final JsonArray requirements; + private final JsonArray properties; + private final JsonObject typeInfo; + private final String nodeName; + + NodeData(JsonArray capabilities, JsonArray requirements, JsonArray properties, JsonObject typeInfo, String nodeName) { + this.capabilities = capabilities; + this.requirements = requirements; + this.properties = properties; + this.typeInfo = typeInfo; + this.nodeName = nodeName; + } + + public JsonArray getCapabilities() { + return capabilities; + } + + public JsonArray getRequirements() { + return requirements; + } + + public JsonArray getProperties() { + return properties; + } + + public JsonObject getTypeInfo() { + return typeInfo; + } + + public String getName() { + return nodeName; + } +} diff --git a/dcaedt_tools/src/main/java/tools/TemplateContainer.java b/dcaedt_tools/src/main/java/tools/TemplateContainer.java new file mode 100644 index 0000000..ee16d22 --- /dev/null +++ b/dcaedt_tools/src/main/java/tools/TemplateContainer.java @@ -0,0 +1,329 @@ +package tools; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import json.response.ItemsResponse.Item; +import json.templateInfo.Composition; +import json.templateInfo.Relation; +import json.templateInfo.TemplateInfo; +import org.apache.commons.lang3.StringUtils; +import utilities.IDcaeRestClient; +import utilities.IReport; + +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; + + +public class TemplateContainer { + private static final String NODES = "nodes"; + private static final String RELATIONSHIP = "relationship"; + private static long nidCounter = 0; + private final IReport report; + private final IDcaeRestClient dcaeRestClient; + private final List<TemplateInfo> templateInfos; + private final Map<String, List<Item>> elementsByFolderNames; + private LoggerDebug debugLogger = LoggerDebug.getInstance(); + + + public TemplateContainer(IReport report, IDcaeRestClient dcaeRestClient, List<TemplateInfo> templateInfos, Map<String, List<Item>> elementsByFolderNames) { + this.report = report; + this.dcaeRestClient = dcaeRestClient; + this.templateInfos = templateInfos; + this.elementsByFolderNames = elementsByFolderNames; + } + + private List<Item> findTemplate(TemplateInfo templateInfo) { + AtomicReference<List<Item>> items = new AtomicReference<>(); + items.set(new ArrayList<>()); + elementsByFolderNames.keySet().stream() + .forEach(folderName -> { + List<Item> itemList = returnMatchedTemplate(folderName, templateInfo); + items.get().addAll(itemList); + }); + if (items.get().size() == templateInfo.getComposition().size()) { + return items.get(); + } + return new ArrayList<>(); + } + + private List<Item> returnMatchedTemplate(String folderName, TemplateInfo templateInfo) { + List<Item> items = new ArrayList<>(); + elementsByFolderNames.get(folderName).stream() + .forEach(item -> templateInfo.getComposition().stream().forEach(composition -> + { + if (composition.getType().equalsIgnoreCase(item.getName())) { + items.add(item); + } + })); + return items; + } + + + public Map<TemplateInfo, JsonObject> getCdumps() { + Map<TemplateInfo, JsonObject> templateInfoToJsonObjectMap = new HashMap<>(); + for (TemplateInfo templateInfo : templateInfos) { + List<Item> items = findTemplate(templateInfo); + if (items == null || items.isEmpty()) { + report.addErrorMessage("vfcmt: " + templateInfo.getName() + ". DCAE Component not found"); + continue; + } + templateInfoToJsonObjectMap.put(templateInfo, getCdumpJsonObject(items, templateInfo)); + } + return templateInfoToJsonObjectMap; + } + + private JsonObject getCdumpJsonObject(List<Item> items, TemplateInfo templateInfo) { + JsonObject cdumpJsonObject = generateCdumpInput(templateInfo); + Map<Item, Map<String, NodeData>> itemMapHashMap = new HashMap<>(); + JsonArray relationsJsonArray = new JsonArray(); + for (Item item : items) { + debugLogger.log("Creating cdump for item: " + item.getName()); + JsonArray jsonArrayNode = cdumpJsonObject.getAsJsonArray(NODES); + JsonParser jsonParser = new JsonParser(); + JsonArray allNodeTemplates = jsonParser.parse(dcaeRestClient.getItemModel(item.getItemId())).getAsJsonObject().get("data").getAsJsonObject().get("model").getAsJsonObject().get(NODES).getAsJsonArray(); + Map<String, NodeData> stringRelationsDataMap = new HashMap<>(); + for (JsonElement nodeElement : allNodeTemplates) { + JsonObject responseModelJson = nodeElement.getAsJsonObject(); + JsonObject responseTypeInfoJson = jsonParser.parse(dcaeRestClient.getItemType(item.getItemId(), responseModelJson.get("type").getAsString())).getAsJsonObject().get("data").getAsJsonObject().get("type").getAsJsonObject(); + JsonObject jsonObjectElement = newVfcmtJSON(responseModelJson.get("name").getAsString(), item.getModels().get(0).getItemId()); + jsonObjectElement.addProperty("id", responseTypeInfoJson.get("itemId").getAsString().split("/")[0]); + String nid = "n." + new Date().getTime() + "." + nidCounter++; + jsonObjectElement.addProperty("nid", nid); + NodeData nodeData = createNodeData(responseModelJson, responseTypeInfoJson, responseModelJson.get("name").getAsString()); + stringRelationsDataMap.put(nid, nodeData); + jsonObjectElement.add("capabilities", nodeData.getCapabilities()); + jsonObjectElement.add("requirements", nodeData.getRequirements()); + jsonObjectElement.add("properties", nodeData.getProperties()); + jsonObjectElement.add("typeinfo", nodeData.getTypeInfo()); + JsonObject typeJsonObject = new JsonObject(); + typeJsonObject.addProperty("name", responseModelJson.get("type").getAsString()); + jsonObjectElement.add("type", typeJsonObject); + JsonElement ndataElement = createNData(responseModelJson.get("name").getAsString(), nid); + jsonObjectElement.add("ndata", ndataElement); + jsonArrayNode.add(jsonObjectElement); + } + itemMapHashMap.put(item, stringRelationsDataMap); + } + JsonElement jsonElement = createTemplateInfoRelations(templateInfo, itemMapHashMap); + if (jsonElement != null && jsonElement.isJsonArray()) { + for (JsonElement element : jsonElement.getAsJsonArray()) { + relationsJsonArray.add(element); + } + } + jsonElement = createSelfRelations(itemMapHashMap); + if (jsonElement != null && jsonElement.isJsonArray()) { + for (JsonElement element : jsonElement.getAsJsonArray()) { + relationsJsonArray.add(element); + } + + } + cdumpJsonObject.add("relations", relationsJsonArray); + return cdumpJsonObject; + } + + //We need it only for printing the relations (front end requirement) + private JsonElement createNData(String name, String nid) { + JsonObject ndataElement = new JsonObject(); + ndataElement.addProperty("name", nid); + ndataElement.addProperty("label", name); + ndataElement.addProperty("x",438); + ndataElement.addProperty("y",435); + ndataElement.addProperty("px",437); + ndataElement.addProperty("py",434); + ndataElement.add("ports", new JsonArray()); + ndataElement.addProperty("radius", 30); + + return ndataElement; + } + + private JsonElement createSelfRelations(Map<Item, Map<String, NodeData>> nodeDataByNidByItem) { + JsonArray jsonArrayRelations = new JsonArray(); + for (Item item : nodeDataByNidByItem.keySet()) { + Map<String, NodeData> nodeDataByNid = nodeDataByNidByItem.get(item); + if (nodeDataByNid.size() < 2) { + continue; + } + Map<JsonObject, String> nidListByRequirement = new HashMap<>(); + for (String nid : nodeDataByNid.keySet()) { + JsonArray jsonArrayRequirements = nodeDataByNid.get(nid).getRequirements(); + for (JsonElement requirement : jsonArrayRequirements) { + JsonObject jsonObject = requirement.getAsJsonObject(); + if (jsonObject.has("node")) { + nidListByRequirement.put(jsonObject, nid); + } + } + } + for (JsonObject requirement : nidListByRequirement.keySet()) { + String toNodeName = requirement.get("node").toString().replaceAll("\"", ""); + boolean isFound = false; + NodeData toNodeData; + String toNId = null; + for (String nid : nodeDataByNid.keySet()) { + toNodeData = nodeDataByNid.get(nid); + toNId = nid; + if (toNodeData.getName().equalsIgnoreCase(toNodeName)) { + isFound = true; + break; + } + } + if (isFound) { + JsonObject relationElement = new JsonObject(); + NodeData fromNode = nodeDataByNidByItem.get(item).get(nidListByRequirement.get(requirement)); + relationElement.addProperty("rid", "ink." + nidListByRequirement.get(requirement) + "." + nidCounter++); + relationElement.addProperty("n1", nidListByRequirement.get(requirement)); + relationElement.addProperty("name1", fromNode.getName()); + JsonObject metaData = new JsonObject(); + metaData.addProperty("n1", nidListByRequirement.get(requirement)); + metaData.addProperty("p1", requirement.get("name").toString().replaceAll("\"", "")); + relationElement.addProperty("n2", toNId); + relationElement.addProperty("name2", toNodeName); + metaData.addProperty("n2", toNId); + String capabilityFullName = requirement.get("capability").getAsJsonObject().get("name").toString(); + String capabilityShortName = StringUtils.substringAfterLast(capabilityFullName, "."); + metaData.addProperty("p2", capabilityShortName.replaceAll("\"", "")); + JsonArray relationship = new JsonArray(); + relationship.add(fromNode.getName().replaceAll("\"", "")); + JsonElement requirementRelationship = requirement.get(RELATIONSHIP); + if (requirementRelationship != null) { + relationship.add(requirementRelationship.getAsJsonObject().get("type").toString().replaceAll("\"", "")); + } else { + relationship.add((JsonElement) null); + } + + relationship.add(requirement.get("name").toString().replaceAll("\"", "")); + metaData.add(RELATIONSHIP, relationship); + relationElement.add("meta", metaData); + jsonArrayRelations.add(relationElement); + } + } + } + return jsonArrayRelations; + } + + private NodeData createNodeData(JsonObject responseModelJson, JsonObject responseTypeInfoJson, String nodeName) { + JsonArray capabilities = responseModelJson.get("capabilities").getAsJsonArray(); + JsonArray requirements = responseModelJson.get("requirements").getAsJsonArray(); + JsonArray properties = responseModelJson.get("properties").getAsJsonArray(); + return new NodeData(capabilities, requirements, properties, responseTypeInfoJson, nodeName); + } + + private JsonArray createTemplateInfoRelations(TemplateInfo templateInfo, Map<Item, Map<String, NodeData>> nodeDataByNidByItem) { + JsonArray jsonArrayRelations = new JsonArray(); + + if (templateInfo.getRelations() == null) { + return null; + } + for (Relation relation : templateInfo.getRelations()) { + JsonObject metaData = new JsonObject(); + JsonObject relationElement = new JsonObject(); + String fromComponent = relation.getFromComponent(); + String toComponent = relation.getToComponent(); + String fromComponentAlias = StringUtils.substringBefore(fromComponent, "."); + String fromComponentNodeName = StringUtils.substringAfterLast(fromComponent, "."); + String toComponentAlias = StringUtils.substringBefore(toComponent, "."); + String toComponentNodeName = StringUtils.substringAfterLast(toComponent, "."); + boolean findTo = false; + boolean findFrom = false; + for (Item item : nodeDataByNidByItem.keySet()) { + Map<String, NodeData> nodeDataByNid = nodeDataByNidByItem.get(item); + for (String nid : nodeDataByNid.keySet()) { + NodeData currentNodeData = nodeDataByNid.get(nid); + + Optional<Composition> isFoundComposition = templateInfo.getComposition().stream() + .filter(element -> fromComponentAlias.equalsIgnoreCase(element.getAlias()) && element.getType().equalsIgnoreCase(item.getName()) && fromComponentNodeName.equalsIgnoreCase(currentNodeData.getName())).findAny(); + if (isFoundComposition.isPresent()) { + boolean isFound = findNode(relation.getFromRequirement(), currentNodeData.getRequirements()); + if (isFound) { + relationElement.addProperty("rid", "ink." + nid + "." + nidCounter++); + relationElement.addProperty("n1", nid); + relationElement.addProperty("name1", currentNodeData.getName()); + metaData.addProperty("n1", nid); + metaData.addProperty("p1", relation.getFromRequirement()); + JsonArray relationship = new JsonArray(); + relationship.add(fromComponentNodeName); + String requirementRelationshipType = findRequirementType(relation.getFromRequirement(), currentNodeData.getRequirements()); + if (requirementRelationshipType != null) { + relationship.add(requirementRelationshipType); + } else { + relationship.add((JsonElement) null); + } + relationship.add(toComponentNodeName); + metaData.add(RELATIONSHIP, relationship); + findFrom = true; + } + + } + + isFoundComposition = templateInfo.getComposition().stream() + .filter(element -> toComponentAlias.equalsIgnoreCase(element.getAlias()) && element.getType().equalsIgnoreCase(item.getName()) && toComponentNodeName.equalsIgnoreCase(currentNodeData.getName())).findAny(); + if (isFoundComposition.isPresent()) { + boolean isFound = findNode(relation.getToCapability(), currentNodeData.getCapabilities()); + if (isFound) { + relationElement.addProperty("n2", nid); + relationElement.addProperty("name2", currentNodeData.getName()); + metaData.addProperty("n2", nid); + metaData.addProperty("p2", relation.getToCapability()); + findTo = true; + } + } + } + } + if (findTo && findFrom) { + relationElement.add("meta", metaData); + jsonArrayRelations.add(relationElement); + } else { + report.addErrorMessage("Didn't find match relation from: " + relation.getFromComponent() + ", to: "+ relation.getToComponent()); + } + } + + return jsonArrayRelations; + } + + private String findRequirementType(String fromRequirement, JsonArray requirements) { + Iterator<JsonElement> jsonElements = requirements.iterator(); + while (jsonElements.hasNext()) { + JsonObject jsonObject = (JsonObject) jsonElements.next(); + String name = jsonObject.get("name").getAsString(); + if (fromRequirement.equals(name) && jsonObject.has("type")) { + return jsonObject.get("type").toString().replaceAll("\"", ""); + } + } + + return null; + } + + private boolean findNode(String endPoint, JsonArray node) { + Iterator<JsonElement> jsonElements = node.iterator(); + while (jsonElements.hasNext()) { + JsonObject jsonObject = (JsonObject) jsonElements.next(); + String name = jsonObject.get("name").getAsString(); + if (endPoint.equals(name)) { + return true; + } + } + + return false; + } + + private JsonObject newVfcmtJSON(String name, String description) { + JsonObject json = new JsonObject(); + json.addProperty("name", name); + json.addProperty("description", description); + return json; + } + + private JsonObject generateCdumpInput(TemplateInfo templateInfo) { + JsonObject json = new JsonObject(); + json.addProperty("version", 0); + json.addProperty("flowType", templateInfo.getName()); + json.add(NODES, new JsonArray()); + + json.add("inputs", new JsonArray()); + json.add("outputs", new JsonArray()); + + return json; + + } +} diff --git a/dcaedt_tools/src/main/java/utilities/DcaeRestClient.java b/dcaedt_tools/src/main/java/utilities/DcaeRestClient.java new file mode 100644 index 0000000..5e81038 --- /dev/null +++ b/dcaedt_tools/src/main/java/utilities/DcaeRestClient.java @@ -0,0 +1,151 @@ +package utilities; + +import json.Credential; +import json.Environment; +import json.response.ElementsResponse.Element; +import json.response.ElementsResponse.ElementsResponse; +import json.response.ItemsResponse.Item; +import json.response.ItemsResponse.ItemsResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicHeader; +import org.onap.sdc.dcae.composition.restmodels.CreateVFCMTRequest; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import org.springframework.http.*; +import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; +import org.springframework.stereotype.Component; +import org.springframework.web.client.RestTemplate; + +import javax.annotation.PostConstruct; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +@Component("dcaerestclient") +public class DcaeRestClient implements IDcaeRestClient { + + private static final String GET_RESOURCES_BY_CATEGORY = "/getResourcesByCategory"; + private static final String CREATE_VFCMT = "/createVFCMT"; + private static final String ELEMENTS = "/elements"; + + + private static final String ECOMP_INSTANCE_ID_HEADER = "X-ECOMP-InstanceID"; + private static final String USER_ID_HEADER = "USER_ID"; + + + private String uri; + private RestTemplate client; + private Credential credential; + + public DcaeRestClient(Credential credential) { + this.credential = credential; + } + + @Override + public String getUserId() { + return credential.getUsername(); + } + + @PostConstruct + @Override + public void init(Environment environment) { + credential = environment.getCredential(); + CloseableHttpClient httpClient = HttpClientBuilder.create().setDefaultHeaders(defaultHeaders(credential)).build(); + HttpComponentsClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory(); + requestFactory.setHttpClient(httpClient); + client = new RestTemplate(requestFactory); + uri = String.format("%s:%s%s", environment.getDcaeBeHost(), environment.getDcaeBePort(), environment.getApiPath()); + } + + private List<BasicHeader> defaultHeaders(Credential credential) { + List<BasicHeader> headers = new ArrayList<>(); + headers.add(new BasicHeader(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE)); + headers.add(new BasicHeader(HttpHeaders.ACCEPT, MediaType.APPLICATION_OCTET_STREAM_VALUE)); + headers.add(new BasicHeader(ECOMP_INSTANCE_ID_HEADER, credential.getUsername())); + return headers; + } + + @Override + public List<ResourceDetailed> getAllVfcmts() { + String url = buildRequestPath(GET_RESOURCES_BY_CATEGORY); + return Arrays.asList(client.getForObject(url, ResourceDetailed[].class)); + } + + @Override + public ResourceDetailed createResource(CreateVFCMTRequest resource) { + String url = buildRequestPath(CREATE_VFCMT); + return client.postForObject(url, new HttpEntity<>(resource, postResourceHeaders(credential.getUsername())), ResourceDetailed.class); + } + + @Override + public ResourceDetailed checkoutVfcmt(String vfcmtUuid) { + String url = buildRequestPath(String.format("/checkout/vfcmt/%s", vfcmtUuid)); + ResponseEntity<ResourceDetailed> resourceDetailedResponse = client.exchange(url, HttpMethod.PUT, new HttpEntity(postResourceHeaders(credential.getUsername())), ResourceDetailed.class); + + return resourceDetailedResponse.getBody(); + } + + @Override + public ResourceDetailed checkinVfcmt(String vfcmtUuid) { + String url = buildRequestPath(String.format("/checkin/vfcmt/%s", vfcmtUuid)); + ResponseEntity<ResourceDetailed> resourceDetailedResponse = client.exchange(url, HttpMethod.PUT, new HttpEntity(postResourceHeaders(credential.getUsername())), ResourceDetailed.class); + + return resourceDetailedResponse.getBody(); + } + + @Override + public List<Element> getElements() { + String url = buildRequestPath(ELEMENTS); + return client.getForObject(url, ElementsResponse.class).getData().getElements(); + } + @Override + public List<Item> getItem(String element) { + String url = buildRequestPath("/"+ element + ELEMENTS); + return client.getForObject(url, ItemsResponse.class).getData().getElement() == null ? null : client.getForObject(url, ItemsResponse.class).getData().getElement().getItems(); + } + + @Override + public String getItemModel(String elementId) { + String url = buildRequestPath("/"+ elementId +"/model"); + return client.getForObject(url, String.class); + } + @Override + public String getItemType(String elementId, String type) { + String url = buildRequestPath("/"+ elementId +"/type/"+ type +"/"); + return client.getForObject(url, String.class); + } + + @Override + public String saveComposition(String componentId, String body) { + String url = buildRequestPath("/saveComposition/" + componentId); + ResponseEntity<String> resourceDetailedResponse = client.exchange(url, HttpMethod.POST, new HttpEntity<>(body, postResourceHeaders(credential.getUsername())), String.class); + + return resourceDetailedResponse.getBody(); + } + + @Override + public String certifyVfcmt(String vfcmtUuid) { + String url = buildRequestPath(String.format("/certify/vfcmt/%s", vfcmtUuid)); + ResponseEntity<String> resourceDetailedResponse = client.exchange(url, HttpMethod.PUT, new HttpEntity(postResourceHeaders(credential.getUsername())), String.class); + + return resourceDetailedResponse.getBody(); + } + + private HttpHeaders postResourceHeaders(String userId) { + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON_UTF8); + headers.add(USER_ID_HEADER, userId); + return headers; + } + + private String buildRequestPath(String... args){ + return uri + Stream.of(args).collect(Collectors.joining()); + } + + @Override + public void updateResource(ResourceDetailed vfcmt) { + // Do nothing + } +} diff --git a/dcaedt_tools/src/main/java/utilities/IDcaeRestClient.java b/dcaedt_tools/src/main/java/utilities/IDcaeRestClient.java new file mode 100644 index 0000000..677175b --- /dev/null +++ b/dcaedt_tools/src/main/java/utilities/IDcaeRestClient.java @@ -0,0 +1,37 @@ +package utilities; + +import json.Environment; +import json.response.ElementsResponse.Element; +import json.response.ItemsResponse.Item; +import org.onap.sdc.dcae.composition.restmodels.CreateVFCMTRequest; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; + +import java.util.List; + +public interface IDcaeRestClient { + void init(Environment environment); + + String getUserId(); + + List<ResourceDetailed> getAllVfcmts(); + + ResourceDetailed createResource(CreateVFCMTRequest resource); + + ResourceDetailed checkoutVfcmt(String vfcmtUuid); + + ResourceDetailed checkinVfcmt(String vfcmtUuid); + + List<Element> getElements(); + + List<Item> getItem(String element); + + String getItemModel(String elementId); + + String getItemType(String elementId, String type); + + String saveComposition(String componentId, String body); + + String certifyVfcmt(String vfcmtUuid); + + void updateResource(ResourceDetailed vfcmt); +} diff --git a/dcaedt_tools/src/main/java/utilities/IReport.java b/dcaedt_tools/src/main/java/utilities/IReport.java new file mode 100644 index 0000000..4c5f8af --- /dev/null +++ b/dcaedt_tools/src/main/java/utilities/IReport.java @@ -0,0 +1,8 @@ +package utilities; + +public interface IReport { + void addCreatedMessage(String message); + void addUpdatedMessage(String message); + void addNotUpdatedMessage(String message); + void addErrorMessage(String message); +} diff --git a/dcaedt_tools/src/main/java/utilities/Report.java b/dcaedt_tools/src/main/java/utilities/Report.java new file mode 100644 index 0000000..8f79056 --- /dev/null +++ b/dcaedt_tools/src/main/java/utilities/Report.java @@ -0,0 +1,54 @@ +package utilities; + +import java.util.ArrayList; +import java.util.List; + +public class Report implements IReport { + + private List<String> created = new ArrayList<>(); + private List<String> updated = new ArrayList<>(); + private List<String> notUpdated = new ArrayList<>(); + private List<String> error = new ArrayList<>(); + + @Override + public void addCreatedMessage(String message) { + created.add(message); + } + + @Override + public void addUpdatedMessage(String message) { + updated.add(message); + } + + @Override + public void addNotUpdatedMessage(String message) { + notUpdated.add(message); + } + + @Override + public void addErrorMessage(String message) { + error.add(message); + } + + @Override + public String toString() { + StringBuilder stringBuilder = new StringBuilder(); + if (!created.isEmpty()) { + stringBuilder.append("Created:").append(System.lineSeparator()); + created.forEach(msg -> stringBuilder.append(msg).append(System.lineSeparator())); + } + if (!updated.isEmpty()) { + stringBuilder.append("Updated:").append(System.lineSeparator()); + updated.forEach(msg -> stringBuilder.append(msg).append(System.lineSeparator())); + } + if (!notUpdated.isEmpty()) { + stringBuilder.append("Not updated:").append(System.lineSeparator()); + notUpdated.forEach(msg -> stringBuilder.append(msg).append(System.lineSeparator())); + } + if (!error.isEmpty()) { + stringBuilder.append("Error:").append(System.lineSeparator()); + error.forEach(msg -> stringBuilder.append(msg).append(System.lineSeparator())); + } + return stringBuilder.toString(); + } +} diff --git a/dcaedt_tools/src/main/resources/conf/config.json b/dcaedt_tools/src/main/resources/conf/config.json new file mode 100644 index 0000000..5b0f542 --- /dev/null +++ b/dcaedt_tools/src/main/resources/conf/config.json @@ -0,0 +1,51 @@ +{ + "templateInfo": [{ + "name": "SNMP Fault", + "description": "SNMP FM with Map-Supplement-Enrich", + "category": "Template", + "subCategory":"Base Monitoring Template", + "updateIfExist": "true", + "composition": [{ + "type": "Map", + "alias": "mapper" + }, { + "type": "Supplement", + "alias": "sup" + }, { + "type": "Enrich", + "alias": "enrich" + }] + }, + { + "name": "FOI", + "description": "FOI SFTP with FOI-Collector and Docker-Map", + "category": "Template", + "subCategory":"Base Monitoring Template", + "updateIfExist": "true", + "composition": [{ + "type": "foi", + "alias": "collector" + }, { + "type": "DockerMap", + "alias": "map" + }], + "relations": [{ + "fromComponent": "collector.foi", + "fromRequirement": "stream_publish_0", + "toComponent": "map.feed0", + "toCapability": "feed" + }] + }, + { + "name": "Syslog non-VES Collector", + "description": "Syslog flow with Syslog Collector", + "category": "Template", + "subCategory": "Base Monitoring Template", + "updateIfExist": "true", + "composition": [{ + "type": "Syslog", + "alias": "collector" + }] + } + ] +}
\ No newline at end of file diff --git a/dcaedt_tools/src/main/resources/conf/environment.json b/dcaedt_tools/src/main/resources/conf/environment.json new file mode 100644 index 0000000..abf66b9 --- /dev/null +++ b/dcaedt_tools/src/main/resources/conf/environment.json @@ -0,0 +1,12 @@ +{ + "dcaeBeHost": "http://localhost", + "dcaeBePort": "8446", + "apiPath": "", + "credential" :{ + "editor": "Carlos_Santana", + "username": "cs0008", + "password": "", + "firstName": "", + "lastName": "" + } +}
\ No newline at end of file diff --git a/dcaedt_tools/src/test/java/BaseTest.java b/dcaedt_tools/src/test/java/BaseTest.java new file mode 100644 index 0000000..49534a5 --- /dev/null +++ b/dcaedt_tools/src/test/java/BaseTest.java @@ -0,0 +1,151 @@ +import json.response.ElementsResponse.Element; +import json.response.ItemsResponse.Item; +import json.response.ItemsResponse.Model; +import org.junit.Before; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; +import org.onap.sdc.dcae.composition.restmodels.sdc.ResourceDetailed; +import utilities.IDcaeRestClient; +import utilities.IReport; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +abstract class BaseTest { + static final String USER_ID = "userId"; + static final String TEMPLATE_INFO_NAME = "templateInfoName"; + static final String VFCMT_NAME1 = "my vfcmt1"; + static final String UUID1 = "my uuid1"; + static final String VFCMT_NAME2 = "my vfcmt2"; + static final String UUID2 = "my uuid2"; + static final String VFCMT_NAME3 = "my vfcmt3"; + static final String UUID3 = "my uuid3"; + static final String ELEMENT_NAME1 = "my element1"; + static final String ELEMENT_NAME2 = "my element2"; + static final String ELEMENT_NAME3 = "my element3"; + static final String ALIAS_NAME1 = "my alias1"; + static final String ALIAS_NAME2 = "my alias2"; + static final String ALIAS_NAME3 = "my alias3"; + static final String ITEM_NAME1 = "my item1"; + static final String ITEM_NAME2 = "my item2"; + static final String ITEM_NAME3 = "my item3"; + + @Mock + IReport report; + @Mock + IDcaeRestClient dcaeRestClient; + + @Before + public void setup() { + when(dcaeRestClient.getUserId()).thenReturn(USER_ID); + mockGetAllVfcmt(); + mockGetElements(); + mockGetItems(); + mockGetItemModel(); + mockGetItemType(); + mockCheckoutVfcmtAndCreateResource(); + when(dcaeRestClient.saveComposition(anyString(), anyString())).thenReturn("Composition Created"); + } + + private void mockCheckoutVfcmtAndCreateResource() { + ResourceDetailed resourceDetailed = new ResourceDetailed(); + resourceDetailed.setName(VFCMT_NAME1); + resourceDetailed.setUuid(UUID1); + resourceDetailed.setLifecycleState("NOT_CERTIFIED_CHECKOUT"); + resourceDetailed.setLastUpdaterUserId(USER_ID); + when(dcaeRestClient.checkoutVfcmt(anyString())).thenReturn(resourceDetailed); + when(dcaeRestClient.createResource(any())).thenReturn(resourceDetailed); + } + + private void mockGetItemType() { + when(dcaeRestClient.getItemType(anyString(), anyString())).thenReturn("{\"data\":{\"type\":{\"itemId\":\"e45ec9d7-01df-4cb1-896f-aff2a6ca5a8b/tosca.dcae.nodes.cdapApp.Map\", \"typeinfo\":\"typeInfo\"}}}"); + } + + private void mockGetItemModel() { + when(dcaeRestClient.getItemModel(anyString())).thenReturn("{\"data\":{\"model\":{\"nodes\":[{\"capability\":{\"type\":\"someType\"}, \"type\":\"type\", \"name\":\"SomeNameFromRequirement\", \"requirements\":[{\"name\":\"SomeNameFromRequirement\"}], \"properties\":[{}], \"capabilities\":[{\"name\":\"SomeNameToCapability\"}],\"type\":\"type\"}]}}}", + "{\"data\":{\"model\":{\"nodes\":[{\"capability\":{\"type\":\"someType\"}, \"type\":\"type\", \"name\":\"SomeNameToCapability\", \"requirements\":[{\"name\":\"SomeNameFromRequirement\"}], \"properties\":[{}], \"capabilities\":[{\"name\":\"SomeNameToCapability\"}],\"type\":\"type\"}]}}}"); + } + + private void mockGetItems() { + when(dcaeRestClient.getItem(ELEMENT_NAME1)).thenReturn(null); + List<Item> items = new ArrayList<>(); + Item item = new Item(); + item.setName(ITEM_NAME1); + Model model = new Model(); + model.setItemId(""); + List<Model> models = Collections.singletonList(model); + item.setModels(models); + items.add(item); + item = new Item(); + item.setName(ITEM_NAME2); + item.setModels(models); + items.add(item); + when(dcaeRestClient.getItem(ELEMENT_NAME2)).thenReturn(items); + items = new ArrayList<>(); + item = new Item(); + item.setName(ITEM_NAME3); + item.setModels(models); + items.add(item); + when(dcaeRestClient.getItem(ELEMENT_NAME3)).thenReturn(items); + } + + private void mockGetElements() { + List<Element> elements = new ArrayList<>(); + Element element = new Element(); + element.setName(ELEMENT_NAME1); + elements.add(element); + element = new Element(); + element.setName(ELEMENT_NAME2); + elements.add(element); + element = new Element(); + element.setName(ELEMENT_NAME3); + elements.add(element); + when(dcaeRestClient.getElements()).thenReturn(elements); + } + + private void mockGetAllVfcmt() { + List<ResourceDetailed> resourceDetaileds = new ArrayList<>(); + ResourceDetailed resourceDetailed = new ResourceDetailed(); + resourceDetailed.setName(VFCMT_NAME1); + resourceDetailed.setUuid(UUID1); + resourceDetailed.setLifecycleState("NOT_CERTIFIED_CHECKOUT"); + resourceDetailed.setLastUpdaterUserId(USER_ID); + resourceDetaileds.add(resourceDetailed); + resourceDetailed = new ResourceDetailed(); + resourceDetailed.setName(VFCMT_NAME2); + resourceDetailed.setUuid(UUID2); + resourceDetaileds.add(resourceDetailed); + resourceDetailed = new ResourceDetailed(); + resourceDetailed.setName(VFCMT_NAME3); + resourceDetailed.setUuid(UUID3); + resourceDetaileds.add(resourceDetailed); + + List<ResourceDetailed> resourceDetaileds2 = new ArrayList<>(); + resourceDetailed = new ResourceDetailed(); + resourceDetailed.setName(VFCMT_NAME1); + resourceDetailed.setUuid(UUID1); + resourceDetailed.setLifecycleState("NOT_CERTIFIED_CHECKOUT"); + resourceDetailed.setLastUpdaterUserId(USER_ID); + resourceDetaileds2.add(resourceDetailed); + resourceDetailed = new ResourceDetailed(); + resourceDetailed.setName(VFCMT_NAME2); + resourceDetailed.setUuid(UUID2); + resourceDetaileds2.add(resourceDetailed); + resourceDetailed = new ResourceDetailed(); + resourceDetailed.setName(VFCMT_NAME3); + resourceDetailed.setUuid(UUID3); + resourceDetaileds2.add(resourceDetailed); + resourceDetailed = new ResourceDetailed(); + resourceDetailed.setName(TEMPLATE_INFO_NAME); + resourceDetailed.setUuid(UUID3); + resourceDetaileds2.add(resourceDetailed); + when(dcaeRestClient.getAllVfcmts()).thenReturn(resourceDetaileds, resourceDetaileds2); + } +} diff --git a/dcaedt_tools/src/test/java/DeployTemplateTest.java b/dcaedt_tools/src/test/java/DeployTemplateTest.java new file mode 100644 index 0000000..3fe0a7d --- /dev/null +++ b/dcaedt_tools/src/test/java/DeployTemplateTest.java @@ -0,0 +1,54 @@ +import com.google.gson.JsonObject; +import json.templateInfo.TemplateInfo; +import org.junit.Before; +import org.junit.Test; +import org.mockito.InjectMocks; +import tools.DeployTemplate; + +import java.util.*; + +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class DeployTemplateTest extends BaseTest { + + @InjectMocks + DeployTemplate deployTemplate; + + private Map<TemplateInfo, JsonObject> templateInfoToJsonObjectMap; + + @Before + @Override + public void setup() { + super.setup(); + templateInfoToJsonObjectMap = new HashMap<>(); + TemplateInfo templateInfo = new TemplateInfo(); + templateInfo.setName(VFCMT_NAME1); + templateInfo.setCategory("category"); + templateInfo.setSubCategory("subCategory"); + templateInfo.setDescription("description"); + templateInfo.setUpdateIfExist(true); + templateInfoToJsonObjectMap.put(templateInfo, new JsonObject()); + templateInfo = new TemplateInfo(); + templateInfo.setName(TEMPLATE_INFO_NAME); + templateInfo.setCategory("category"); + templateInfo.setSubCategory("subCategory"); + templateInfo.setDescription("description"); + templateInfoToJsonObjectMap.put(templateInfo, new JsonObject()); + } + + @Test + public void deployHappyFlow() { + deployTemplate.deploy(templateInfoToJsonObjectMap); + verify(report, times(0)).addErrorMessage(anyString()); + } + + @Test + public void deploy_failedSaving_failedVerify() { + when(dcaeRestClient.saveComposition(anyString(), anyString())).thenReturn("failed"); + deployTemplate.deploy(templateInfoToJsonObjectMap); + verify(report, times(4)).addErrorMessage(anyString()); + } +} diff --git a/dcaedt_tools/src/test/java/EntitiesRetrieverTest.java b/dcaedt_tools/src/test/java/EntitiesRetrieverTest.java new file mode 100644 index 0000000..ea7eb2b --- /dev/null +++ b/dcaedt_tools/src/test/java/EntitiesRetrieverTest.java @@ -0,0 +1,29 @@ + +import json.response.ItemsResponse.Item; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.InjectMocks; +import tools.EntitiesRetriever; + +import java.util.List; +import java.util.Map; + + +public class EntitiesRetrieverTest extends BaseTest { + + @InjectMocks + EntitiesRetriever entitiesRetriever; + + @Before + @Override + public void setup() { + super.setup(); + } + + @Test + public void getElementsByFolder_returns2Items() { + Map<String, List<Item>> result = entitiesRetriever.getElementsByFolder(); + Assert.assertTrue(result.size() == 2); + } +} diff --git a/dcaedt_tools/src/test/java/TemplateContainerTest.java b/dcaedt_tools/src/test/java/TemplateContainerTest.java new file mode 100644 index 0000000..7c3d287 --- /dev/null +++ b/dcaedt_tools/src/test/java/TemplateContainerTest.java @@ -0,0 +1,157 @@ +import com.google.gson.JsonObject; +import json.response.ItemsResponse.Item; +import json.response.ItemsResponse.Model; +import json.templateInfo.Composition; +import json.templateInfo.Relation; +import json.templateInfo.TemplateInfo; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import tools.TemplateContainer; + +import java.util.*; + +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class TemplateContainerTest extends BaseTest { + + private TemplateContainer templateContainer; + + private List<TemplateInfo> templateInfos; + private Map<String, List<Item>> elementsByFolderNames; + @Before + @Override + public void setup() { + super.setup(); + templateInfos = new ArrayList<>(); + TemplateInfo templateInfo = new TemplateInfo(); + templateInfo.setName(TEMPLATE_INFO_NAME); + Composition composition = new Composition(); + composition.setType(ELEMENT_NAME3); + composition.setAlias(ALIAS_NAME3); + templateInfo.setComposition(Collections.singletonList(composition)); + templateInfos.add(templateInfo); + elementsByFolderNames = new HashMap<>(); + Item item = new Item(); + item.setName(ELEMENT_NAME3); + item.setItemId(""); + Model model = new Model(); + model.setItemId(""); + List<Model> models = Collections.singletonList(model); + item.setModels(models); + List<Item> items = new ArrayList<>(Collections.singletonList(item)); + elementsByFolderNames.put(TEMPLATE_INFO_NAME, items); + + } + + @Test + public void getCdumps_emptyTemplateInfo_returnEmptyMap() { + templateContainer = new TemplateContainer(report, dcaeRestClient, new ArrayList<>(), new HashMap<>()); + + Map<TemplateInfo, JsonObject> templateInfoJsonObjectMap = templateContainer.getCdumps(); + + Assert.assertTrue(templateInfoJsonObjectMap.size() == 0); + } + + @Test + public void getCdumps_returnNotFoundEmptyList() { + elementsByFolderNames = new HashMap<>(); + Item item = new Item(); + item.setName(ELEMENT_NAME2); + List<Item> items = new ArrayList<>(Collections.singletonList(item)); + elementsByFolderNames.put(TEMPLATE_INFO_NAME, items); + templateContainer = new TemplateContainer(report, dcaeRestClient, templateInfos, elementsByFolderNames); + + Map<TemplateInfo, JsonObject> templateInfoJsonObjectMap = templateContainer.getCdumps(); + + verify(report).addErrorMessage(anyString()); + Assert.assertTrue(templateInfoJsonObjectMap.size() == 0); + } + + @Test + public void getCdumps_returnOneCdump() { + templateContainer = new TemplateContainer(report, dcaeRestClient, templateInfos, elementsByFolderNames); + + Map<TemplateInfo, JsonObject> templateInfoJsonObjectMap = templateContainer.getCdumps(); + JsonObject jsonObject = templateInfoJsonObjectMap.get(templateInfos.get(0)); + String result = jsonObject.toString(); + + verifyCdump(result); + verify(report, times(0)).addErrorMessage(anyString()); + Assert.assertTrue(templateInfoJsonObjectMap.size() == 1); + } + + @Test + public void getChumps_returnOneChumpWithRelations() { + templateInfos = new ArrayList<>(); + TemplateInfo templateInfo = new TemplateInfo(); + templateInfo.setName(TEMPLATE_INFO_NAME); + List<Composition> compositionList = new ArrayList<>(); + Composition composition = new Composition(); + composition.setType(ELEMENT_NAME3); + composition.setAlias(ALIAS_NAME3); + compositionList.add(composition); + composition = new Composition(); + composition.setType(ELEMENT_NAME2); + composition.setAlias(ALIAS_NAME2); + compositionList.add(composition); + templateInfo.setComposition(compositionList); + Relation relation = new Relation(); + relation.setFromComponent(ALIAS_NAME3 + ".SomeNameFromRequirement"); + relation.setToComponent(ALIAS_NAME2 + ".SomeNameToCapability"); + relation.setFromRequirement("SomeNameFromRequirement"); + relation.setToCapability("SomeNameToCapability"); + templateInfo.setRelations(Collections.singletonList(relation)); + templateInfos.add(templateInfo); + elementsByFolderNames = new HashMap<>(); + List<Item> itemList = new ArrayList<>(); + Item item = new Item(); + item.setName(ELEMENT_NAME3); + item.setItemId(""); + Model model = new Model(); + model.setItemId(""); + List<Model> models = Collections.singletonList(model); + item.setModels(models); + itemList.add(item); + item = new Item(); + item.setName(ELEMENT_NAME2); + item.setItemId(""); + model = new Model(); + model.setItemId(""); + models = Collections.singletonList(model); + item.setModels(models); + itemList.add(item); + elementsByFolderNames.put(TEMPLATE_INFO_NAME, itemList); + templateContainer = new TemplateContainer(report, dcaeRestClient, templateInfos, elementsByFolderNames); + + Map<TemplateInfo, JsonObject> templateInfoJsonObjectMap = templateContainer.getCdumps(); + JsonObject jsonObject = templateInfoJsonObjectMap.get(templateInfos.get(0)); + String result = jsonObject.toString(); + + verifyCdumpRelations(result); + verify(report, times(0)).addErrorMessage(anyString()); + Assert.assertTrue(templateInfoJsonObjectMap.size() == 1); + } + + private void verifyCdumpRelations(String result) { + Assert.assertTrue(result.contains("p2\":\"SomeNameToCapability\"")); + Assert.assertTrue(result.contains("\"SomeNameFromRequirement\",null,\"SomeNameToCapability\"")); + Assert.assertTrue(result.contains("\"relationship\":[")); + Assert.assertTrue(result.contains("\"n1\":\"n.")); + Assert.assertTrue(result.contains("\"relations\":[{")); + Assert.assertTrue(result.contains("\"name1\":\"SomeNameFromRequirement\"")); + Assert.assertTrue(result.contains("\"n2\":\"n.")); + Assert.assertTrue(result.contains("\"p1\":\"SomeNameFromRequirement\"")); + } + + private void verifyCdump(String result) { + String expectedResultStart = "{\"version\":0,\"flowType\":\"templateInfoName\",\"nodes\":[{\"name\":\"SomeNameFromRequirement\",\"description\":\"\",\"id\":\"e45ec9d7-01df-4cb1-896f-aff2a6ca5a8b\",\"nid\":\"n."; + String expectedResultMid = "\",\"capabilities\":[{\"name\":\"SomeNameToCapability\"}],\"requirements\":[{\"name\":\"SomeNameFromRequirement\"}],\"properties\":[{}],\"typeinfo\":{\"itemId\":\"e45ec9d7-01df-4cb1-896f-aff2a6ca5a8b/tosca.dcae.nodes.cdapApp.Map\",\"typeinfo\":\"typeInfo\"},\"type\":{\"name\":\"type\"},\"ndata\":{\"name\":\"n."; + String expectedResultEnd = "\",\"label\":\"SomeNameFromRequirement\",\"x\":438,\"y\":435,\"px\":437,\"py\":434,\"ports\":[],\"radius\":30}}],\"inputs\":[],\"outputs\":[],\"relations\":[]}"; + Assert.assertTrue(result.startsWith(expectedResultStart)); + Assert.assertTrue(result.contains(expectedResultMid)); + Assert.assertTrue(result.endsWith(expectedResultEnd)); + } +} diff --git a/dcaedt_validator/.gitignore b/dcaedt_validator/.gitignore new file mode 100644 index 0000000..f72be57 --- /dev/null +++ b/dcaedt_validator/.gitignore @@ -0,0 +1,14 @@ +/bin/ +.project +checker/target/* +kwalify/target/* +service/target/* +/checker/bin/* +/kwalify/bin/* +/service/bin/* +/kwalify/.settings/* +/checker/.settings/* +*.classpath +/service/.settings/* +/commons-pipeline/* +/.settings/* diff --git a/dcaedt_validator/checker/.gitignore b/dcaedt_validator/checker/.gitignore new file mode 100644 index 0000000..b83d222 --- /dev/null +++ b/dcaedt_validator/checker/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/dcaedt_validator/checker/pom.xml b/dcaedt_validator/checker/pom.xml new file mode 100644 index 0000000..4b64c8b --- /dev/null +++ b/dcaedt_validator/checker/pom.xml @@ -0,0 +1,160 @@ +<project + xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Validator</artifactId> + <version>1806.0.1-SNAPSHOT</version> + </parent> + <artifactId>DCAE-DT-Validator-Checker</artifactId> + <packaging>jar</packaging> + <name>DCAE DT Validator Checker</name> + <build> + <sourceDirectory>src/main/java</sourceDirectory> + <plugins> + <plugin> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.1</version> + <configuration> + <source>1.8</source> + <target>1.8</target> + <encoding>${project.build.sourceEncoding}</encoding> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-dependency-plugin</artifactId> + <version>2.10</version> + <executions> + <execution> + <id>copy-dependencies</id> + <phase>package</phase> + <goals> + <goal>copy-dependencies</goal> + </goals> + <configuration> + <outputDirectory>${project.build.directory}/deps</outputDirectory> + <overWriteReleases>false</overWriteReleases> + <overWriteSnapshots>false</overWriteSnapshots> + <overWriteIfNewer>true</overWriteIfNewer> + </configuration> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>buildnumber-maven-plugin</artifactId> + <version>1.4</version> + <executions> + <execution> + <phase>validate</phase> + <goals> + <goal>create</goal> + </goals> + </execution> + </executions> + <configuration> + <doCheck>false</doCheck> + <doUpdate>false</doUpdate> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>2.1</version> + <configuration> + <archive> + <manifest> + <addDefaultImplementationEntries>true</addDefaultImplementationEntries> + </manifest> + <manifestEntries> + <Implementation-Build>${buildNumber}</Implementation-Build> + </manifestEntries> + </archive> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-assembly-plugin</artifactId> + <version>2.6</version> + <configuration> + <descriptorRefs> + <descriptorRef>jar-with-dependencies</descriptorRef> + </descriptorRefs> + <archive> + <manifest> + <mainClass>org.onap.sdc.dcae.checker.Checker</mainClass> + </manifest> + <manifestEntries> + <Implementation-Build>${buildNumber}</Implementation-Build> + </manifestEntries> + </archive> + </configuration> + <executions> + <execution> + <id>make-assembly</id> + <!-- this is used for inheritance merges --> + <phase>package</phase> + <!-- bind to the packaging phase --> + <goals> + <goal>single</goal> + </goals> + </execution> + </executions> + </plugin> + </plugins> + </build> + <dependencies> + <dependency> + <groupId>commons-io</groupId> + <artifactId>commons-io</artifactId> + <version>2.4</version> + </dependency> + <dependency> + <groupId>commons-cli</groupId> + <artifactId>commons-cli</artifactId> + <version>1.3</version> + </dependency> + <dependency> + <groupId>commons-jxpath</groupId> + <artifactId>commons-jxpath</artifactId> + <version>1.3</version> + </dependency> + <dependency> + <groupId>commons-lang</groupId> + <artifactId>commons-lang</artifactId> + <version>2.6</version> + </dependency> + <dependency> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + <version>17.0</version> + </dependency> + <dependency> + <groupId>org.yaml</groupId> + <artifactId>snakeyaml</artifactId> + <version>1.17</version> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-core</artifactId> + <version>2.7.5</version> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-databind</artifactId> + <version>2.7.5</version> + </dependency> + <dependency> + <groupId>org.reflections</groupId> + <artifactId>reflections</artifactId> + <version>0.9.11</version> + </dependency> + <dependency> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>kwalify</artifactId> + <version>${project.version}</version> + </dependency> + </dependencies> +</project> diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java new file mode 100644 index 0000000..1512e56 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Catalog.java @@ -0,0 +1,444 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Iterator; +import java.util.Collection; +import java.util.Comparator; +import java.util.Set; +import java.util.Map; +import java.util.List; +import java.util.EnumMap; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.ArrayList; +import java.util.Collections; + +import java.util.stream.Collectors; + +import java.net.URI; + +import com.google.common.base.Predicate; +import com.google.common.base.Function; +import com.google.common.collect.Iterators; +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +/* + * Oddball: tracking inputs as data templates could be seen as rather + * odd but we see them as instances of data types, in the same way node + * templates are instances of node types. + */ +public class Catalog { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + /* Type hierarchies are stored as maps from a type name to its definition + * Not the best but easy to follow hierarchies towards their root .. + */ + private EnumMap<Construct, Map<String,Map>> types = + new EnumMap<Construct, Map<String,Map>>(Construct.class); + /* track templates: we track templates (tye instances) first per target then per contruct. + * This allows us to share the catalog among multiple templates sharign the same type set + */ + private Map<Target, EnumMap<Construct, Map<String,Map>>> templates = + new HashMap<Target, EnumMap<Construct, Map<String,Map>>>(); + + private Catalog parent; + + public Catalog(Catalog theParent) { + this.parent = theParent; + /* there are no requirement types, they are the same as capability types */ + types.put(Construct.Data, new LinkedHashMap<String, Map>()); + types.put(Construct.Capability, new LinkedHashMap<String, Map>()); + types.put(Construct.Relationship, new LinkedHashMap<String, Map>()); + types.put(Construct.Artifact, new LinkedHashMap<String, Map>()); + types.put(Construct.Interface, new LinkedHashMap<String, Map>()); + types.put(Construct.Node, new LinkedHashMap<String, Map>()); + types.put(Construct.Group, new LinkedHashMap<String, Map>()); + types.put(Construct.Policy, new LinkedHashMap<String, Map>()); + + } + + public Catalog() { + this(null); + } + + public boolean addType(Construct theConstruct, String theName, Map theDef) { + if (hasType(theConstruct, theName)) { + return false; + } + getConstructTypes(theConstruct).put(theName, theDef); + return true; + } + + public Map getTypeDefinition(Construct theConstruct, String theName) { + Map<String, Map> constructTypes = getConstructTypes(theConstruct); + Map typeDef = constructTypes.get(theName); + if (typeDef == null && this.parent != null) { + return this.parent.getTypeDefinition(theConstruct, theName); + } + return typeDef; + } + + public boolean hasType(Construct theConstruct, String theName) { + Map<String, Map> constructTypes = getConstructTypes(theConstruct); + boolean res = constructTypes.containsKey(theName); + if (!res && this.parent != null) { + res = this.parent.hasType(theConstruct, theName); + } + return res; + } + + protected Map<String, Map> getConstructTypes(Construct theConstruct) { + Map<String, Map> constructTypes = this.types.get(theConstruct); + if (null == constructTypes) { + throw new RuntimeException("Something worse is cooking here!", + new CatalogException("No types for construct " + theConstruct)); + } + return constructTypes; + } + + protected Iterator<Map.Entry<String,Map>> + typesIterator(Construct theConstruct) { + List<Map.Entry<String,Map>> constructTypes = + new ArrayList<Map.Entry<String,Map>>( + this.types.get(theConstruct).entrySet()); + Collections.reverse(constructTypes); + return (this.parent == null) + ? constructTypes.iterator() + : Iterators.concat(constructTypes.iterator(), + this.parent.typesIterator(theConstruct)); + } + + /* this will iterate through the type hierarchy for the given type, included. + */ + public Iterator<Map.Entry<String,Map>> + hierarchy(Construct theConstruct, final String theName) { + return Iterators.filter(typesIterator(theConstruct), + new Predicate<Map.Entry<String,Map>>() { + Object next = theName; + public boolean apply(Map.Entry<String,Map> theEntry) { + if (next != null && next.equals(theEntry.getKey())) { + next = theEntry.getValue().get("derived_from"); + return true; + } + else + return false; + } + }); + } + + public boolean isDerivedFrom(Construct theConstruct, String theType, String theBaseType) { + + Iterator<Map.Entry<String,Map>> hierachyIterator = + hierarchy(theConstruct, theType); + while (hierachyIterator.hasNext()) { + Map.Entry<String,Map> typeDef = hierachyIterator.next(); + + if (typeDef.getKey().equals(theBaseType)) { + return true; + } + } + return false; + } + + /* We go over the type hierarchy and retain only an iterator over the + * elements of the given facet for each type in the hierarchy. + * We concatenate these iterators and filter out duplicates. + * TODO: cannot just filter out duplicates - a redefinition can refine the one in the base construct so we + * should merge them! + */ + public Iterator<Map.Entry> facets(Construct theConstruct, + final Facet theFacet, + final String theName) { + return + Iterators.filter( + Iterators.concat( + Iterators.transform( + hierarchy(theConstruct, theName), + new Function<Map.Entry<String,Map>, Iterator<Map.Entry>>() { + public Iterator<Map.Entry> apply(Map.Entry<String,Map> theEntry) { + Map m = (Map)theEntry.getValue().get(theFacet.name()); + return m == null + ? Collections.emptyIterator() + : m.entrySet().iterator(); + } + } + ) + ), + new Predicate<Map.Entry>() { + Set insts = new HashSet(); + public boolean apply(Map.Entry theEntry) { + return !insts.contains(theEntry.getKey()); + } + } + ); + } + + //no need to specify a construct, only nodes can have requirements + public Iterator<Map.Entry> requirements(final String theName) { + return + Iterators.concat( + Iterators.transform( + hierarchy(Construct.Node, theName), + new Function<Map.Entry<String,Map>, Iterator<Map.Entry>>() { + public Iterator<Map.Entry> apply(Map.Entry<String,Map> theEntry) { + List<Map> l = (List<Map>)theEntry.getValue().get("requirements"); + return l == null + ? Collections.emptyIterator() + : Iterators.concat( + Iterators.transform( + l.iterator(), + new Function<Map, Iterator<Map.Entry>> () { + public Iterator<Map.Entry> apply(Map theEntry) { + return theEntry.entrySet().iterator(); + } + } + ) + ); + } + } + ) + ); + } + + /* Example: find the definition of property 'port' of the node type + * tosca.nodes.Database (properties being a facet of the node construct) + * + * Note: the definition of a facet is cumulative, i.e. more specialized + * definitions contribute (by overwriting) to the + */ + public Map getFacetDefinition(Construct theConstruct, + String theConstructTypeName, + Facet theFacet, + String theName) { + Map def = null; + Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName); + while (ti.hasNext()) { + //this is where requirements would yield a List .. + Map<String,Map> fset = (Map<String,Map>)ti.next().getValue().get(theFacet.name()); + if (fset != null) { + def = def == null ? fset.get(theName) + : mergeDefinitions(def, fset.get(theName)); + } + } + return def; + } + + public Map getRequirementDefinition(Construct theConstruct, + String theConstructTypeName, + String theName) { + Iterator<Map.Entry<String,Map>> ti = hierarchy(theConstruct, theConstructTypeName); + while (ti.hasNext()) { + //this is where requirements yield a List .. + List<Map> reqs = (List<Map>)ti.next().getValue().get("requirements"); + + if(reqs!=null){ + for (Map req: reqs) { + Map.Entry reqe = (Map.Entry)req.entrySet().iterator().next(); + if (theName.equals(reqe.getKey())) { + return (Map)reqe.getValue(); + } + } + }else{ + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Avoiding adding requirment block since it doesn't exists on the template...."); + } + } + return null; + } + + /* */ + private EnumMap<Construct,Map<String,Map>> getTemplates(Target theTarget) { + EnumMap<Construct, Map<String,Map>> targetTemplates = templates.get(theTarget); + if (targetTemplates == null) { + targetTemplates = new EnumMap<Construct,Map<String,Map>>(Construct.class); + targetTemplates.put(Construct.Data, new LinkedHashMap<String, Map>()); + targetTemplates.put(Construct.Relationship, new LinkedHashMap<String, Map>()); + targetTemplates.put(Construct.Node, new LinkedHashMap<String, Map>()); + targetTemplates.put(Construct.Group, new LinkedHashMap<String, Map>()); + targetTemplates.put(Construct.Policy, new LinkedHashMap<String, Map>()); + + templates.put(theTarget, targetTemplates); + } + return targetTemplates; + } + + public Map<String,Map> getTargetTemplates(Target theTarget, Construct theConstruct) { + return getTemplates(theTarget).get(theConstruct); + } + + public void addTemplate(Target theTarget, Construct theConstruct, String theName, Map theDef) + throws CatalogException { + Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct); + if (null == constructTemplates) { + throw new CatalogException("No such thing as " + theConstruct + " templates"); + } + if (constructTemplates.containsKey(theName)) { + throw new CatalogException(theConstruct + " template '" + theName + "' re-declaration"); + } + constructTemplates.put(theName, theDef); + } + + public boolean hasTemplate(Target theTarget, Construct theConstruct, String theName) { + Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct); + return constructTemplates != null && + constructTemplates.containsKey(theName); + } + + public Map getTemplate(Target theTarget, Construct theConstruct, String theName) { + Map<String, Map> constructTemplates = getTargetTemplates(theTarget, theConstruct); + if (constructTemplates != null) + return constructTemplates.get(theName); + else + return null; + } + + public static Map mergeDefinitions(Map theAggregate, Map theIncrement) { + if (theIncrement == null) + return theAggregate; + + for(Map.Entry e: (Set<Map.Entry>)theIncrement.entrySet()) { + theAggregate.putIfAbsent(e.getKey(), e.getValue()); + } + return theAggregate; + } + + /* tracks imports, i.e.targets */ + private LinkedHashMap<URI, Target> targets = + new LinkedHashMap<URI, Target>(); + /* tracks dependencies between targets, i.e. the 'adjency' matrix defined by + * the 'import' relationship */ + private Table<Target,Target,Boolean> imports = HashBasedTable.create(); + + + /* + * theParent contains an 'include/import' statement pointing to the Target + */ + public boolean addTarget(Target theTarget, Target theParent) { + boolean cataloged = targets.containsKey(theTarget.getLocation()); + + if(!cataloged) { + targets.put(theTarget.getLocation(), theTarget); + } + + if (theParent != null) { + imports.put(theParent, theTarget, Boolean.TRUE); + } + + return !cataloged; + } + + public Target getTarget(URI theLocation) { + return targets.get(theLocation); + } + + public Collection<Target> targets() { + return targets.values(); + } + + /* Targets that no other targets depend on */ + public Collection<Target> topTargets() { + return targets.values() + .stream() + .filter(t -> !imports.containsColumn(t)) + .collect(Collectors.toList()); + + } + + public String importString(Target theTarget) { + return importString(theTarget, " "); + } + + private String importString(Target theTarget, String thePrefix) { + StringBuilder sb = new StringBuilder(""); + Map<Target,Boolean> parents = imports.column(theTarget); + if (parents != null) { + for (Target p: parents.keySet()) { + sb.append(thePrefix) + .append("from ") + .append(p.getLocation()) + .append("\n") + .append(importString(p, thePrefix + " ")); + } + //we only keep the positive relationships + } + return sb.toString(); + } + + /* */ + private class TargetComparator implements Comparator<Target> { + + /* @return 1 if there is a dependency path from TargetOne to TargetTwo, -1 otherwise */ + public int compare(Target theTargetOne, Target theTargetTwo) { + if (hasPath(theTargetTwo, theTargetOne)) + return -1; + + if (hasPath(theTargetOne, theTargetTwo)) + return 1; + + return 0; + } + + public boolean hasPath(Target theStart, Target theEnd) { + Map<Target,Boolean> deps = imports.row(theStart); + if (deps.containsKey(theEnd)) + return true; + for (Target dep: deps.keySet()) { + if (hasPath(dep, theEnd)) + return true; + } + return false; + } + } + + public Collection<Target> sortedTargets() { + List keys = new ArrayList(this.targets.values()); + Collections.sort(keys, new TargetComparator()); + return keys; + } + + public static void main(String[] theArgs) throws Exception { + + Catalog cat = new Catalog(); + + Target a = new Target("a", new URI("a")), + b = new Target("b", new URI("b")), + c = new Target("c", new URI("c")), + d = new Target("d", new URI("d")); + + cat.addTarget(a, null); + cat.addTarget(b, null); + cat.addTarget(c, null); + cat.addTarget(d, null); + + cat.addTarget(b, c); + cat.addTarget(a, c); + cat.addTarget(c, d); + cat.addTarget(a, b); + + for (Target t: cat.sortedTargets()) + debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), t.toString()); + + Catalog root = new Catalog(); + root.addType(Construct.Node, "_a", Collections.emptyMap()); + root.addType(Construct.Node, "__a", Collections.singletonMap("derived_from", "_a")); + root.addType(Construct.Node, "___a", Collections.singletonMap("derived_from", "_a")); + + Catalog base = new Catalog(root); + base.addType(Construct.Node, "_b", Collections.singletonMap("derived_from", "__a")); + base.addType(Construct.Node, "__b", Collections.singletonMap("derived_from", "_b")); + base.addType(Construct.Node, "__b_", Collections.singletonMap("derived_from", "_a")); + + if (theArgs.length > 0) { + Iterator<Map.Entry<String, Map>> ti = + base.hierarchy(Construct.Node, theArgs[0]); + while (ti.hasNext()) { + debugLogger.log(LogLevel.DEBUG, Catalog.class.getName(), "> {}", ti.next().getKey()); + } + } + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CatalogException.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CatalogException.java new file mode 100644 index 0000000..d8e2dba --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CatalogException.java @@ -0,0 +1,14 @@ +package org.onap.sdc.dcae.checker; + + +public class CatalogException extends Exception { + + public CatalogException(String theMsg, Throwable theCause) { + super(theMsg, theCause); + } + + public CatalogException(String theMsg) { + super(theMsg); + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java new file mode 100644 index 0000000..fee617f --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Checker.java @@ -0,0 +1,3643 @@ +package org.onap.sdc.dcae.checker; + +import java.lang.reflect.Method; +import java.lang.reflect.InvocationTargetException; + +import java.io.File; +import java.io.Reader; +import java.io.IOException; + +import java.net.URI; +import java.net.URISyntaxException; + +import java.util.HashMap; +import java.util.TreeMap; +import java.util.Iterator; +import java.util.ListIterator; +import java.util.Map; +import java.util.List; +import java.util.LinkedList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Set; +import java.util.Collection; +import java.util.Collections; +import java.util.regex.Pattern; +import java.util.regex.Matcher; +import java.util.stream.Collectors; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.yaml.snakeyaml.Yaml; + +import com.google.common.collect.Maps; +import com.google.common.collect.MapDifference; +import com.google.common.reflect.Invokable; + +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; + +import kwalify.Validator; +import kwalify.Rule; +import kwalify.Types; +import kwalify.ValidationException; +import kwalify.SchemaException; + +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.jxpath.JXPathException; +import org.apache.commons.lang.reflect.ConstructorUtils; +import org.onap.sdc.dcae.checker.annotations.Catalogs; +import org.onap.sdc.dcae.checker.annotations.Checks; +import org.reflections.Reflections; +import org.reflections.util.FilterBuilder; +import org.reflections.util.ConfigurationBuilder; +import org.reflections.scanners.TypeAnnotationsScanner; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.scanners.MethodAnnotationsScanner; + +/* + * To consider: model consistency checking happens now along with validation + * (is implemented as part of the validation hooks). It might be better to + * separate the 2 stages and perform all the consistency checking once + * validation is completed. + */ +public class Checker { + private static final String PROPERTIES = "properties"; + private static final String DEFAULT = "default"; + private static final String ATTRIBUTES = "attributes"; + private static final String DATA_TYPES = "data_types"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String VALID_SOURCE_TYPES = "valid_source_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String INTERFACES = "interfaces"; + private static final String VALID_TARGET_TYPES = "valid_target_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String NODE_TYPES = "node_types"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + private static final String GROUP_TYPES = "group_types"; + private static final String TARGETS_CONSTANT = "targets"; + private static final String POLICY_TYPES = "policy_types"; + private static final String IS_NONE_OF_THOSE = "' is none of those"; + private static final String INPUTS = "inputs"; + private static final String CAPABILITY = "capability"; + private static final String ARTIFACTS = "artifacts"; + private static final String WAS_DEFINED_FOR_THE_NODE_TYPE = " was defined for the node type "; + private static final String UNKNOWN = "Unknown "; + private static final String TYPE = " type "; + + private Target target = null; //what we're validating at the moment + + private Map<String, Target> grammars = new HashMap<>(); //grammars for the different tosca versions + + private Catalog catalog; + private TargetLocator locator = new CommonLocator(); + + private Table<String, Method, Object> checks = HashBasedTable.create(); + private Table<String, Method, Object> catalogs = HashBasedTable.create(); + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private static Catalog commonsCatalogInstance = null; + + private static final String[] EMPTY_STRING_ARRAY = new String[0]; + + /* Need a proper way to indicate where the grammars are and how they should be identified */ + private static final String[] grammarFiles = new String[]{"tosca/tosca_simple_yaml_1_0.grammar", + "tosca/tosca_simple_yaml_1_1.grammar"}; + + private Pattern spacePattern = Pattern.compile("\\s"); + + private Pattern indexPattern = Pattern.compile("/\\p{Digit}+"); + + //this is getting silly .. + private static Class[][] checkHookArgTypes = + new Class[][]{ + new Class[]{Map.class, CheckContext.class}, + new Class[]{List.class, CheckContext.class}}; + + private static Class[] validationHookArgTypes = + new Class[]{Object.class, Rule.class, Validator.ValidationContext.class}; + + public Checker() throws CheckerException { + loadGrammars(); + loadAnnotations(); + } + + public static void main(String[] theArgs) { + if (theArgs.length == 0) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(), "checker resource_to_validate [processor]*"); + return; + } + + try { + Catalog cat = Checker.check(new File(theArgs[0])); + + for (Target t : cat.targets()) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(), "{}\n{}\n{}", t.getLocation(), cat.importString(t), t.getReport()); + } + + for (Target t : cat.sortedTargets()) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(), t.toString()); + } + + } catch (Exception x) { + errLogger.log(LogLevel.ERROR, Checker.class.getName(),"Exception {}", x); + } + } + + private void loadGrammars() throws CheckerException { + + for (String grammarFile : grammarFiles) { + Target grammarTarget = this.locator.resolve(grammarFile); + if (grammarTarget == null) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to locate grammar {}", grammarFile); + continue; + } + + parseTarget(grammarTarget); + if (grammarTarget.getReport().hasErrors()) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: {}", grammarFile, grammarTarget.getReport().toString()); + continue; + } + + List versions = null; + try { + versions = (List) + ((Map) + ((Map) + ((Map) grammarTarget.getTarget()) + .get("mapping")) + .get("tosca_definitions_version")) + .get("enum"); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: cannot locate tosca_definitions_versions. Exception{}", grammarFile, x); + } + if (versions == null || versions.isEmpty()) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: no tosca_definitions_versions specified", grammarFile); + continue; + } + + for (Object version : versions) { + this.grammars.put(version.toString(), grammarTarget); + } + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Loaded grammars: {}", this.grammars); + } + + private void loadAnnotations() { + Reflections reflections = new Reflections( + new ConfigurationBuilder() + .forPackages("org.onap.sdc.dcae") + .filterInputsBy(new FilterBuilder() + .include(".*\\.class") + ) + .setScanners(new TypeAnnotationsScanner(), + new SubTypesScanner(), + new MethodAnnotationsScanner()) + .setExpandSuperTypes(false) + ); + + Map<Class, Object> handlers = new HashMap<>(); + + Set<Method> checkHandlers = reflections.getMethodsAnnotatedWith(Checks.class); + for (Method checkHandler : checkHandlers) { + checks.put(checkHandler.getAnnotation(Checks.class).path(), + checkHandler, + handlers.computeIfAbsent(checkHandler.getDeclaringClass(), + type -> { + try { + return (getClass() == type) ? this + : type.newInstance(); + } catch (Exception x) { + throw new RuntimeException(x); + } + })); + } + + Set<Method> catalogHandlers = reflections.getMethodsAnnotatedWith(Catalogs.class); + for (Method catalogHandler : catalogHandlers) { + catalogs.put(catalogHandler.getAnnotation(Catalogs.class).path(), + catalogHandler, + handlers.computeIfAbsent(catalogHandler.getDeclaringClass(), + type -> { + try { + return (getClass() == type) ? this + : type.newInstance(); + } catch (Exception x) { + throw new RuntimeException(x); + } + })); + } + } + + + public void setTargetLocator(TargetLocator theLocator) { + this.locator = theLocator; + } + + public Collection<Target> targets() { + if (this.catalog == null) { + throw new IllegalStateException("targets are only available after check"); + } + + return this.catalog.targets(); + } + + public Catalog catalog() { + return this.catalog; + } + + public void process(Processor theProcessor) { + + theProcessor.process(this.catalog); + } + + /* a facility for handling all files in a target directory .. */ + public static Catalog check(File theSource) + throws CheckerException { + + Catalog catalog = new Catalog(commonsCatalog()); + Checker checker = new Checker(); + try { + if (theSource.isDirectory()) { + for (File f : theSource.listFiles()) { + if (f.isFile()) { + checker.check(new Target(theSource.getCanonicalPath(), f.toURI().normalize()), catalog); + } + } + } else { + checker.check(new Target(theSource.getCanonicalPath(), theSource.toURI().normalize()), catalog); + } + } catch (IOException iox) { + throw new CheckerException("Failed to initialize target", iox); + } + + return catalog; + } + + public void check(String theSource) + throws CheckerException { + check(theSource, buildCatalog()); + } + + public void check(String theSource, Catalog theCatalog) + throws CheckerException { + Target tgt = + this.locator.resolve(theSource); + if (null == tgt) { + throw new CheckerException("Unable to locate the target " + theSource); + } + + check(tgt, theCatalog); + } + + public void check(Target theTarget) throws CheckerException { + check(theTarget, buildCatalog()); + } + + public void check(Target theTarget, Catalog theCatalog) throws CheckerException { + + this.catalog = theCatalog; + this.locator.addSearchPath(theTarget.getLocation()); + + if (this.catalog.addTarget(theTarget, null)) { + List<Target> targets = parseTarget(theTarget); + if (theTarget.getReport().hasErrors()) { + return; + } + for (Target targetItr : targets) { + this.catalog.addTarget(targetItr, null); + if (!validateTarget(targetItr).getReport().hasErrors()) { + checkTarget(targetItr); + } + } + } + } + + public void validate(Target theTarget) throws CheckerException { + validate(theTarget, buildCatalog()); + } + + public void validate(Target theTarget, Catalog theCatalog) throws CheckerException { + this.catalog = theCatalog; + this.locator.addSearchPath(theTarget.getLocation()); + + if (this.catalog.addTarget(theTarget, null)) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "@validateTarget"); + if (!validateTarget(theTarget).getReport().hasErrors()) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "@checkTarget"); + checkTarget(theTarget); + } + } + } + + private List<Target> parseTarget(final Target theTarget) + throws CheckerException { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "parseTarget {}", theTarget); + + Reader source = null; + try { + source = theTarget.open(); + } catch (IOException iox) { + throw new CheckerException("Failed to open target " + theTarget, iox); + } + + + ArrayList<Object> yamlRoots = new ArrayList<>(); + try { + Yaml yaml = new Yaml(); + for (Object yamlRoot : yaml.loadAll(source)) { + yamlRoots.add(yamlRoot); + } + + + } catch (Exception x) { + theTarget.report(x); + return Collections.emptyList(); + } finally { + try { + source.close(); + } catch (IOException iox) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "Exception {}", iox); + } + } + + ArrayList targets = new ArrayList(yamlRoots.size()); + if (yamlRoots.size() == 1) { + //he target turned out to be a bare document + theTarget.setTarget(yamlRoots.get(0)); + targets.add(theTarget); + } else { + //the target turned out to be a stream containing multiple documents + for (int i = 0; i < yamlRoots.size(); i++) { +/* +!!We're changing the target below, i.e. we're changing the target implementation hence caching implementation will suffer!! +*/ + Target newTarget = new Target(theTarget.getName(), + fragmentTargetURI(theTarget.getLocation(), String.valueOf(i))); + newTarget.setTarget(yamlRoots.get(i)); + targets.add(newTarget); + } + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), " exiting parseTarget {}", theTarget); + return targets; + } + + private URI fragmentTargetURI(URI theRoot, String theFragment) { + try { + return new URI(theRoot.getScheme(), + theRoot.getSchemeSpecificPart(), + theFragment); + } catch (URISyntaxException urisx) { + throw new RuntimeException(urisx); + } + } + + private Target validateTarget(Target theTarget) + throws CheckerException { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering validateTarget {}", theTarget); + + String version = (String) + ((Map) theTarget.getTarget()) + .get("tosca_definitions_version"); + if (version == null) { + throw new CheckerException("Target " + theTarget + " does not specify a tosca_definitions_version"); + } + + Target grammar = this.grammars.get(version); + if (grammar == null) { + throw new CheckerException("Target " + theTarget + " specifies unknown tosca_definitions_version " + version); + } + + TOSCAValidator validator = null; + try { + validator = new TOSCAValidator(theTarget, grammar.getTarget()); + } catch (SchemaException sx) { + throw new CheckerException("Grammar error at: " + sx.getPath(), sx); + } + + theTarget.getReport().addAll( + validator.validate(theTarget.getTarget())); + + if (!theTarget.getReport().hasErrors()) { + applyCanonicals(theTarget.getTarget(), validator.canonicals); + } + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), " exiting validateTarget {}", theTarget); + return theTarget; + } + + private Target checkTarget(Target theTarget) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering checkTarget {}", theTarget); + + CheckContext ctx = new CheckContext(theTarget); + //start at the top + checkServiceTemplateDefinition( + (Map<String, Object>) theTarget.getTarget(), ctx); + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "exiting checkTarget {}", theTarget); + return theTarget; + } + + public void checkProperties( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(PROPERTIES); + try { + if (!checkDefinition(PROPERTIES, theDefinitions, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkPropertyDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkPropertyDefinition( + String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + //check the type + if (!checkDataType(theDefinition, theContext)) { + return; + } + //check default value is compatible with type + Object defaultValue = theDefinition.get(DEFAULT); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDefinition, theContext); + } + + theContext.exit(); + } + + private void checkAttributes( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(ATTRIBUTES); + try { + if (!checkDefinition(ATTRIBUTES, theDefinitions, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkAttributeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkAttributeDefinition( + String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + if (!checkDataType(theDefinition, theContext)) { + return; + } + } finally { + theContext.exit(); + } + } + + /* top level rule, we collected the whole information set. + * this is where checking starts + */ + private void checkServiceTemplateDefinition( + Map<String, Object> theDef, CheckContext theContext) { + theContext.enter(""); + + if (theDef == null) { + theContext.addError("Empty template", null); + return; + } + +//!!! imports need to be processed first now that catalogging takes place at check time!! + + //first catalog whatever it is there to be cataloged so that the checks can perform cross-checking + for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry<String, Object> e = ri.next(); + catalogs(e.getKey(), e.getValue(), theContext); + } + + for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry<String, Object> e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + @Catalogs(path = "/data_types") + protected void catalog_data_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(DATA_TYPES); + try { + catalogTypes(Construct.Data, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + @Checks(path = "/data_types") + protected void check_data_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(DATA_TYPES); + + try { + if (!checkDefinition(DATA_TYPES, theDefinitions, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkDataTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkDataTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Data); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Data, theName, theDefinition, + Facet.properties, theContext); + } + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/capability_types") + protected void catalog_capability_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(CAPABILITY_TYPES); + try { + catalogTypes(Construct.Capability, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/capability_types") + protected void check_capability_types( + Map<String, Map> theTypes, CheckContext theContext) { + theContext.enter(CAPABILITY_TYPES); + try { + if (!checkDefinition(CAPABILITY_TYPES, theTypes, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theTypes.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkCapabilityTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkCapabilityTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(ATTRIBUTES)) { + checkAttributes( + (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, + Facet.attributes, theContext); + } + + //valid_source_types: see capability_type_definition + //unclear: how is the valid_source_types list definition eveolving across + //the type hierarchy: additive, overwriting, ?? + if (theDefinition.containsKey(VALID_SOURCE_TYPES)) { + checkTypeReference(Construct.Node, theContext, + ((List<String>) theDefinition.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/relationship_types") + protected void catalog_relationship_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(RELATIONSHIP_TYPES); + try { + catalogTypes(Construct.Relationship, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/relationship_types") + protected void check_relationship_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(RELATIONSHIP_TYPES); + try { + if (!checkDefinition(RELATIONSHIP_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkRelationshipTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkRelationshipTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(ATTRIBUTES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, + Facet.attributes, theContext); + } + + Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get(INTERFACES); + if (interfaces != null) { + theContext.enter(INTERFACES); + for (Iterator<Map.Entry<String, Map>> i = + interfaces.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + check_type_interface_definition( + e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + if (theDefinition.containsKey(VALID_TARGET_TYPES)) { + checkTypeReference(Construct.Capability, theContext, + ((List<String>) theDefinition.get(VALID_TARGET_TYPES)).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/artifact_types") + protected void catalog_artifact_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(ARTIFACT_TYPES); + try { + catalogTypes(Construct.Artifact, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/artifact_types") + protected void check_artifact_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(ARTIFACT_TYPES); + try { + if (!checkDefinition(ARTIFACT_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkArtifactTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkArtifactTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + try { + checkDefinition(theName, theDefinition, theContext); + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/interface_types") + protected void catalog_interface_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(INTERFACE_TYPES); + try { + catalogTypes(Construct.Interface, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + @Checks(path = "/interface_types") + protected void check_interface_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(INTERFACE_TYPES); + try { + if (!checkDefinition(INTERFACE_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkInterfaceTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkInterfaceTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Interface); + try { + checkDefinition(theName, theDefinition, theContext); + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/node_types") + protected void catalog_node_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(NODE_TYPES); + try { + catalogTypes(Construct.Node, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/node_types") + protected void check_node_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(NODE_TYPES); + try { + if (!checkDefinition(NODE_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkNodeTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkNodeTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(ATTRIBUTES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, + Facet.attributes, theContext); + } + + //requirements + if (theDefinition.containsKey(REQUIREMENTS)) { + check_requirements( + (List<Map>) theDefinition.get(REQUIREMENTS), theContext); + } + + //capabilities + if (theDefinition.containsKey(CAPABILITIES)) { + check_capabilities( + (Map<String, Map>) theDefinition.get(CAPABILITIES), theContext); + } + + //interfaces: + Map<String, Map> interfaces = + (Map<String, Map>) theDefinition.get(INTERFACES); + checkMapTypeInterfaceDefinition(theContext, interfaces); + } finally { + theContext.exit(); + } + } + + private void checkMapTypeInterfaceDefinition(CheckContext theContext, Map<String, Map> interfaces) { + if (interfaces != null) { + try { + theContext.enter(INTERFACES); + for (Iterator<Map.Entry<String, Map>> i = + interfaces.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + check_type_interface_definition( + e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + } + + @Catalogs(path = "/group_types") + protected void catalog_group_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(GROUP_TYPES); + try { + catalogTypes(Construct.Group, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + @Checks(path = "/group_types") + protected void check_group_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(GROUP_TYPES); + try { + if (!checkDefinition(GROUP_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkGroupTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkGroupTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Group); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Group, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey(TARGETS_CONSTANT)) { + checkTypeReference(Construct.Node, theContext, + ((List<String>) theDefinition.get(TARGETS_CONSTANT)).toArray(EMPTY_STRING_ARRAY)); + } + + //interfaces + Map<String, Map> interfaces = + (Map<String, Map>) theDefinition.get(INTERFACES); + checkMapTypeInterfaceDefinition(theContext, interfaces); + + } finally { + theContext.exit(); + } + } + + @Catalogs(path = "/policy_types") + protected void catalog_policy_types( + Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter(POLICY_TYPES); + try { + catalogTypes(Construct.Policy, theDefinitions, theContext); + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/policy_types") + protected void check_policy_types( + Map<String, Map> theDefinition, CheckContext theContext) { + theContext.enter(POLICY_TYPES); + try { + if (!checkDefinition(POLICY_TYPES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkPolicyTypeDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkPolicyTypeDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Policy); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey(PROPERTIES)) { + checkProperties( + (Map<String, Map>) theDefinition.get(PROPERTIES), theContext); + checkTypeConstructFacet(Construct.Policy, theName, theDefinition, + Facet.properties, theContext); + } + + //the targets can be known node types or group types + List<String> targets = (List<String>) theDefinition.get(TARGETS_CONSTANT); + if ((targets != null) && (checkDefinition(TARGETS_CONSTANT, targets, theContext))) { + for (String targetItr : targets) { + if (!(this.catalog.hasType(Construct.Node, targetItr) || + this.catalog.hasType(Construct.Group, targetItr))) { + theContext.addError("The 'targets' entry must contain a reference to a node type or group type, '" + target + IS_NONE_OF_THOSE, null); + } + } + } + } finally { + theContext.exit(); + } + } + + //checking of actual constructs (capability, ..) + + /* First, interface types do not have a hierarchical organization (no + * 'derived_from' in a interface type definition). + * So, when interfaces (with a certain type) are defined in a node + * or relationship type (and they can define new? operations), what + * is there to check: + * Can operations here re-define their declaration from the interface + * type spec?? From A.5.11.3 we are to understand indicates override to be + * the default interpretation .. but they talk about sub-classing so it + * probably intended as a reference to the node or relationship type + * hierarchy and not the interface type (no hierarchy there). + * Or is this a a case of augmentation where new operations can be added?? + */ + private void check_type_interface_definition( + String theName, Map theDef, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkType(Construct.Interface, theDef, theContext)) { + return; + } + + if (theDef.containsKey(INPUTS)) { + check_inputs((Map<String, Map>) theDef.get(INPUTS), theContext); + } + } finally { + theContext.exit(); + } + } + + private void check_capabilities(Map<String, Map> theDefinition, + CheckContext theContext) { + theContext.enter(CAPABILITIES); + try { + if (!checkDefinition(CAPABILITIES, theDefinition, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkCapabilityDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + /* A capability definition appears within the context ot a node type */ + private void checkCapabilityDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + //check capability type + if (!checkType(Construct.Capability, theDef, theContext)) { + return; + } + + //check properties + if (!checkFacetAugmentation( + Construct.Capability, theDef, Facet.properties, theContext)) { + return; + } + + //check attributes + if (!checkFacetAugmentation( + Construct.Capability, theDef, Facet.attributes, theContext)) { + return; + } + + //valid_source_types: should point to valid template nodes + if (theDef.containsKey(VALID_SOURCE_TYPES)) { + checkTypeReference(Construct.Node, theContext, + ((List<String>) theDef.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY)); + //per A.6.1.4 there is an additinal check to be performed here: + //"Any Node Type (names) provides as values for the valid_source_types keyname SHALL be type-compatible (i.e., derived from the same parent Node Type) with any Node Types defined using the same keyname in the parent Capability Type." + } + //occurences: were verified in range_definition + + } finally { + theContext.exit(); + } + } + + private void check_requirements(List<Map> theDefinition, + CheckContext theContext) { + theContext.enter(REQUIREMENTS); + try { + if (!checkDefinition(REQUIREMENTS, theDefinition, theContext)) { + return; + } + + for (Iterator<Map> i = theDefinition.iterator(); i.hasNext(); ) { + Map e = i.next(); + Iterator<Map.Entry<String, Map>> ei = + (Iterator<Map.Entry<String, Map>>) e.entrySet().iterator(); + Map.Entry<String, Map> eie = ei.next(); + checkRequirementDefinition(eie.getKey(), eie.getValue(), theContext); + assert !ei.hasNext(); + } + } finally { + theContext.exit(); + } + } + + private void checkRequirementDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName, Construct.Requirement); + + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + //check capability type + String capabilityType = (String) theDef.get(CAPABILITY); + if (null != capabilityType) { + checkTypeReference(Construct.Capability, theContext, capabilityType); + } + + //check node type + String nodeType = (String) theDef.get("node"); + if (null != nodeType) { + checkTypeReference(Construct.Node, theContext, nodeType); + } + + //check relationship type + Map relationshipSpec = (Map) theDef.get("relationship"); + String relationshipType = null; + if (null != relationshipSpec) { + relationshipType = (String) relationshipSpec.get("type"); + if (relationshipType != null) { //should always be the case + checkTypeReference(Construct.Relationship, theContext, relationshipType); + } + + Map<String, Map> interfaces = (Map<String, Map>) + relationshipSpec.get(INTERFACES); + if (interfaces != null) { + //augmentation (additional properties or operations) of the interfaces + //defined by the above relationship types + + //check that the interface types are known + for (Map interfaceDef : interfaces.values()) { + checkType(Construct.Interface, interfaceDef, theContext); + } + } + } + + //cross checks + + //the capability definition might come from the capability type or from the capability definition + //within the node type. We might have more than one as a node might specify multiple capabilities of the + //same type. + //the goal here is to cross check the compatibility of the valid_source_types specification in the + //target capability definition (if that definition contains a valid_source_types entry). + List<Map> capabilityDefs = new LinkedList<>(); + //nodeType exposes capabilityType + if (nodeType != null) { + Map<String, Map> capabilities = + findTypeFacetByType(Construct.Node, nodeType, + Facet.capabilities, capabilityType); + if (capabilities.isEmpty()) { + theContext.addError("The node type " + nodeType + " does not appear to expose a capability of a type compatible with " + capabilityType, null); + } else { + for (Map.Entry<String, Map> capability : capabilities.entrySet()) { + //this is the capability as it was defined in the node type + Map capabilityDef = capability.getValue(); + //if it defines a valid_source_types then we're working with it, + //otherwise we're working with the capability type it points to. + //The spec does not make it clear if the valid_source_types in a capability definition augments or + //overwrites the one from the capabilityType (it just says they must be compatible). + if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) { + capabilityDefs.add(capabilityDef); + } else { + capabilityDef = + catalog.getTypeDefinition(Construct.Capability, (String) capabilityDef.get("type")); + if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) { + capabilityDefs.add(capabilityDef); + } else { + //!!if there is a capability that does not have a valid_source_type than there is no reason to + //make any further verification (as there is a valid node_type/capability target for this requirement) + capabilityDefs.clear(); + break; + } + } + } + } + } else { + Map capabilityDef = catalog.getTypeDefinition(Construct.Capability, capabilityType); + if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) { + capabilityDefs.add(capabilityDef); + } + } + + //check that the node type enclosing this requirement definition + //is in the list of valid_source_types + if (!capabilityDefs.isEmpty()) { + String enclosingNodeType = + theContext.enclosingConstruct(Construct.Node); + assert enclosingNodeType != null; + + if (!capabilityDefs.stream().anyMatch( + (Map capabilityDef) -> { + List<String> valid_source_types = + (List<String>) capabilityDef.get(VALID_SOURCE_TYPES); + return valid_source_types.stream().anyMatch( + (String source_type) -> catalog.isDerivedFrom( + Construct.Node, enclosingNodeType, source_type)); + })) { + theContext.addError("Node type: " + enclosingNodeType + " not compatible with any of the valid_source_types provided in the definition of compatible capabilities", null); + } + } + + //if we have a relationship type, check if it has a valid_target_types + //if it does, make sure that the capability type is compatible with one + //of them + if (relationshipType != null) { //should always be the case + Map relationshipTypeDef = catalog.getTypeDefinition( + Construct.Relationship, relationshipType); + if (relationshipTypeDef != null) { + List<String> valid_target_types = + (List<String>) relationshipTypeDef.get(VALID_TARGET_TYPES); + if (valid_target_types != null) { + boolean found = false; + for (String target_type : valid_target_types) { + if (catalog.isDerivedFrom( + Construct.Capability, capabilityType, target_type)) { + found = true; + break; + } + } + if (!found) { + theContext.addError("Capability type: " + capabilityType + " not compatible with any of the valid_target_types " + valid_target_types + " provided in the definition of relationship type " + relationshipType, null); + } + } + } + } + + //relationship declares the capabilityType in its valid_target_type set + //in A.6.9 'Relationship Type' the spec does not indicate how inheritance + //is to be applied to the valid_target_type spec: cumulative, overwrites, + //so we treat it as an overwrite. + } finally { + theContext.exit(); + } + } + + //topology_template_definition and sub-rules + /* */ + @Checks(path = "/topology_template") + protected void check_topology_template( + Map theDef, CheckContext theContext) { + + theContext.enter("topology_template"); + + for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry<String, Object> e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + /* + * Once the syntax of the imports section is validated parse/validate/catalog * all the imported template information + */ + @Checks(path = "/imports") + protected void check_imports(List theImports, CheckContext theContext) { + theContext.enter("imports"); + + for (ListIterator li = theImports.listIterator(); li.hasNext(); ) { + Object importEntry = li.next(); + Object importFile = ((Map) mapEntry(importEntry).getValue()).get("file"); + Target tgt = null; + try { + tgt = catalog.getTarget((URI) importFile); + } catch (ClassCastException ccx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Import is {}. Exception {}", importFile, ccx); + } + + if (tgt == null || tgt.getReport().hasErrors()) { + //import failed parsing or validation, we skip it + continue; + } + + //import should have been fully processed by now ??? + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "Processing import {}.", tgt); + checkTarget(tgt); + + } + theContext.exit(); + } + + /* */ + @Checks(path = "/topology_template/substitution_mappings") + protected void check_substitution_mappings(Map<String, Object> theSub, + CheckContext theContext) { + theContext.enter("substitution_mappings"); + try { + //type is mandatory + String type = (String) theSub.get("node_type"); + if (!checkTypeReference(Construct.Node, theContext, type)) { + theContext.addError("Unknown node type: " + type + "", null); + return; //not much to go on with + } + + Map<String, List> capabilities = (Map<String, List>) theSub.get(CAPABILITIES); + if (null != capabilities) { + for (Map.Entry<String, List> ce : capabilities.entrySet()) { + //the key must be a capability of the type + if (null == findTypeFacetByName(Construct.Node, type, + Facet.capabilities, ce.getKey())) { + theContext.addError("Unknown node type capability: " + ce.getKey() + ", type " + type, null); + } + //the value is a 2 element list: first is a local node, + //second is the name of one of its capabilities + List targetList = ce.getValue(); + if (targetList.size() != 2) { + theContext.addError("Invalid capability mapping: " + target + ", expecting 2 elements", null); + continue; + } + + String targetNode = (String) targetList.get(0); + String targetCapability = (String) targetList.get(1); + + Map<String, Object> targetNodeDef = (Map<String, Object>) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode); + if (null == targetNodeDef) { + theContext.addError("Invalid capability mapping node template: " + targetNode, null); + continue; + } + + String targetNodeType = (String) targetNodeDef.get("type"); + if (null == findTypeFacetByName(Construct.Node, targetNodeType, + Facet.capabilities, targetCapability)) { + theContext.addError("Invalid capability mapping capability: " + targetCapability + ". No such capability found for node template " + targetNode + ", of type " + targetNodeType, null); + } + } + } + + Map<String, List> requirements = (Map<String, List>) theSub.get(REQUIREMENTS); + if (null != requirements) { + for (Map.Entry<String, List> re : requirements.entrySet()) { + //the key must be a requirement of the type + if (null == findNodeTypeRequirementByName(type, re.getKey())) { + theContext.addError("Unknown node type requirement: " + re.getKey() + ", type " + type, null); + } + + List targetList = re.getValue(); + if (targetList.size() != 2) { + theContext.addError("Invalid requirement mapping: " + targetList + ", expecting 2 elements", null); + continue; + } + + String targetNode = (String) targetList.get(0); + String targetRequirement = (String) targetList.get(1); + + Map<String, Object> targetNodeDef = (Map<String, Object>) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode); + if (null == targetNodeDef) { + theContext.addError("Invalid requirement mapping node template: " + targetNode, null); + continue; + } + + String targetNodeType = (String) targetNodeDef.get("type"); + if (null == findNodeTypeRequirementByName(targetNodeType, targetRequirement)) { + theContext.addError("Invalid requirement mapping requirement: " + targetRequirement + ". No such requirement found for node template " + targetNode + ", of type " + targetNodeType, null); + } + } + } + } finally { + theContext.exit(); + } + } + + + /* */ + @Checks(path = "/topology_template/inputs") + protected void check_inputs(Map<String, Map> theInputs, + CheckContext theContext) { + theContext.enter(INPUTS); + + try { + if (!checkDefinition(INPUTS, theInputs, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theInputs.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkInputDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkInputDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + // + if (!checkDataType(theDef, theContext)) { + return; + } + //check default value + Object defaultValue = theDef.get(DEFAULT); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDef, theContext); + } + } finally { + theContext.exit(); + } + } + + @Checks(path = "topology_template/outputs") + protected void check_outputs(Map<String, Map> theOutputs, + CheckContext theContext) { + theContext.enter("outputs"); + + try { + if (!checkDefinition("outputs", theOutputs, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theOutputs.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkOutputDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkOutputDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + checkDefinition(theName, theDef, theContext); + //check the expression + } finally { + theContext.exit(); + } + } + + @Checks(path = "/topology_template/groups") + protected void check_groups(Map<String, Map> theGroups, + CheckContext theContext) { + theContext.enter("groups"); + + try { + if (!checkDefinition("groups", theGroups, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theGroups.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkGroupDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkGroupDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkType(Construct.Group, theDef, theContext)) { + return; + } + + if (!checkFacet( + Construct.Group, theDef, Facet.properties, theContext)) { + return; + } + + if (theDef.containsKey(TARGETS_CONSTANT)) { + + List<String> targetsTypes = (List<String>) + this.catalog.getTypeDefinition(Construct.Group, + (String) theDef.get("type")) + .get(TARGETS_CONSTANT); + + List<String> targets = (List<String>) theDef.get(TARGETS_CONSTANT); + for (String targetItr : targets) { + if (!this.catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) { + theContext.addError("The 'targets' entry must contain a reference to a node template, '" + targetItr + "' is not one", null); + } else { + if (targetsTypes != null) { + String targetType = (String) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetItr).get("type"); + + boolean found = false; + for (String type : targetsTypes) { + found = this.catalog + .isDerivedFrom(Construct.Node, targetType, type); + if (found) { + break; + } + } + + if (!found) { + theContext.addError("The 'targets' entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null); + } + } + } + } + } + } finally { + theContext.exit(); + } + } + + @Checks(path = "/topology_template/policies") + protected void check_policies(List<Map<String, Map>> thePolicies, + CheckContext theContext) { + theContext.enter("policies"); + + try { + if (!checkDefinition("policies", thePolicies, theContext)) { + return; + } + + for (Map<String, Map> policy : thePolicies) { + assert policy.size() == 1; + Map.Entry<String, Map> e = policy.entrySet().iterator().next(); + checkPolicyDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkPolicyDefinition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkType(Construct.Policy, theDef, theContext)) { + return; + } + + if (!checkFacet( + Construct.Policy, theDef, Facet.properties, theContext)) { + return; + } + + //targets: must point to node or group templates (that are of a type + //specified in the policy type definition, if targets were specified + //there). + if (theDef.containsKey(TARGETS_CONSTANT)) { + List<String> targetsTypes = (List<String>) + this.catalog.getTypeDefinition(Construct.Policy, + (String) theDef.get("type")) + .get(TARGETS_CONSTANT); + + List<String> targets = (List<String>) theDef.get(TARGETS_CONSTANT); + for (String targetItr : targets) { + Construct targetConstruct = null; + + if (this.catalog.hasTemplate(theContext.target(), Construct.Group, targetItr)) { + targetConstruct = Construct.Group; + } else if (this.catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) { + targetConstruct = Construct.Node; + } else { + theContext.addError("The 'targets' entry must contain a reference to a node template or group template, '" + target + IS_NONE_OF_THOSE, null); + } + + if (targetConstruct != null && + targetsTypes != null) { + //get the target type and make sure is compatible with the types + //indicated in the type spec + String targetType = (String) + this.catalog.getTemplate(theContext.target(), targetConstruct, targetItr).get("type"); + + boolean found = false; + for (String type : targetsTypes) { + found = this.catalog + .isDerivedFrom(targetConstruct, targetType, type); + if (found) { + break; + } + } + + if (!found) { + theContext.addError("The 'targets' " + targetConstruct + " entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null); + } + } + } + } + + } finally { + theContext.exit(); + } + } + + /* */ + @Checks(path = "/topology_template/node_templates") + protected void check_node_templates(Map<String, Map> theTemplates, + CheckContext theContext) { + theContext.enter("node_templates"); + try { + if (!checkDefinition("node_templates", theTemplates, theContext)) { + return; + } + + for (Iterator<Map.Entry<String, Map>> i = theTemplates.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkNodeTemplateDefinition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + /* */ + private void checkNodeTemplateDefinition(String theName, + Map theNode, + CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkDefinition(theName, theNode, theContext)) { + return; + } + + if (!checkType(Construct.Node, theNode, theContext)) { + return; + } + + //copy + String copy = (String) theNode.get("copy"); + if (copy != null) { + if (!checkTemplateReference(Construct.Node, theContext, copy)) { + theContext.addError("The 'copy' reference " + copy + " does not point to a known node template", null); + } else { + //the 'copy' node specification should be used to provide 'defaults' + //for this specification + } + } + + /* check that we operate on properties and attributes within the scope of + the specified node type */ + if (!checkFacet( + Construct.Node, /*theName,*/theNode, Facet.properties, theContext)) { + return; + } + + if (!checkFacet( + Construct.Node, /*theName,*/theNode, Facet.attributes, theContext)) { + return; + } + + //requirement assignment seq + if (theNode.containsKey(REQUIREMENTS)) { + checkRequirementsAssignmentDefinition( + (List<Map>) theNode.get(REQUIREMENTS), theContext); + } + + //capability assignment map: subject to augmentation + if (theNode.containsKey(CAPABILITIES)) { + checkCapabilitiesAssignmentDefinition( + (Map<String, Map>) theNode.get(CAPABILITIES), theContext); + } + + //interfaces + if (theNode.containsKey(INTERFACES)) { + checkTemplateInterfacesDefinition( + (Map<String, Map>) theNode.get(INTERFACES), theContext); + } + + //artifacts: artifacts do not have different definition forms/syntax + //depending on the context (type or template) but they are still subject + //to 'augmentation' + if (theNode.containsKey(ARTIFACTS)) { + check_template_artifacts_definition( + (Map<String, Object>) theNode.get(ARTIFACTS), theContext); + } + + /* node_filter: the context to which the node filter is applied is very + * wide here as opposed to the node filter specification in a requirement + * assignment which has a more strict context (target node/capability are + * specified). + * We could check that there are nodes in this template having the + * properties/capabilities specified in this filter, i.e. the filter has + * a chance to succeed. + */ + } finally { + theContext.exit(); + } + } + + @Checks(path = "/topology_template/relationship_templates") + protected void check_relationship_templates(Map theTemplates, + CheckContext theContext) { + theContext.enter("relationship_templates"); + + for (Iterator<Map.Entry<String, Map>> i = theTemplates.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String, Map> e = i.next(); + checkRelationshipTemplateDefinition(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + private void checkRelationshipTemplateDefinition( + String theName, + Map theRelationship, + CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkDefinition(theName, theRelationship, theContext)) { + return; + } + + if (!checkType(Construct.Relationship, theRelationship, theContext)) { + return; + } + + /* check that we operate on properties and attributes within the scope of + the specified relationship type */ + if (!checkFacet(Construct.Relationship, theRelationship, + Facet.properties, theContext)) { + return; + } + + if (!checkFacet(Construct.Relationship, theRelationship, + Facet.attributes, theContext)) { + return; + } + + /* interface definitions + note: augmentation is allowed here so not clear what to check .. + maybe report augmentations if so configured .. */ + + } finally { + theContext.exit(); + } + } + + //requirements and capabilities assignment appear in a node templates + private void checkRequirementsAssignmentDefinition( + List<Map> theRequirements, CheckContext theContext) { + theContext.enter(REQUIREMENTS); + try { + if (!checkDefinition(REQUIREMENTS, theRequirements, theContext)) { + return; + } + + //the node type for the node template enclosing these requirements + String nodeType = (String) catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node)) + .get("type"); + + for (Iterator<Map> ri = theRequirements.iterator(); ri.hasNext(); ) { + Map<String, Map> requirement = (Map<String, Map>) ri.next(); + + Iterator<Map.Entry<String, Map>> rai = requirement.entrySet().iterator(); + + Map.Entry<String, Map> requirementEntry = rai.next(); + assert !rai.hasNext(); + + String requirementName = requirementEntry.getKey(); + Map requirementDef = findNodeTypeRequirementByName( + nodeType, requirementName); + + if (requirementDef == null) { + theContext.addError("No requirement " + requirementName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null); + continue; + } + + checkRequirementAssignmentDefinition( + requirementName, requirementEntry.getValue(), requirementDef, theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkRequirementAssignmentDefinition( + String theRequirementName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + theContext//.enter("requirement_assignment") + .enter(theRequirementName, Construct.Requirement); + + //grab the node type definition to verify compatibility + + try { + //node assignment + boolean targetNodeIsTemplate = false; + String targetNode = (String) theAssignment.get("node"); + if (targetNode == null) { + targetNode = (String) theDefinition.get("node"); + //targetNodeIsTemplate stays false, targetNode must be a type + } else { + //the value must be a node template or a node type + targetNodeIsTemplate = isTemplateReference( + Construct.Node, theContext, targetNode); + if ((!targetNodeIsTemplate) && (!isTypeReference(Construct.Node, targetNode))){ + theContext.addError("The 'node' entry must contain a reference to a node template or node type, '" + targetNode + IS_NONE_OF_THOSE, null); + return; + } + + //additional checks + String targetNodeDef = (String) theDefinition.get("node"); + if (targetNodeDef != null && targetNode != null) { + if (targetNodeIsTemplate) { + //if the target is node template, it must be compatible with the + //node type specification in the requirement defintion + String targetNodeType = (String) + catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type"); + if (!catalog.isDerivedFrom( + Construct.Node, targetNodeType, targetNodeDef)) { + theContext.addError("The required target node type '" + targetNodeType + "' of target node " + targetNode + " is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null); + return; + } + } else { + //if the target is a node type it must be compatible (= or derived + //from) with the node type specification in the requirement definition + if (!catalog.isDerivedFrom( + Construct.Node, targetNode, targetNodeDef)) { + theContext.addError("The required target node type '" + targetNode + "' is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null); + return; + } + } + } + } + + String targetNodeType = targetNodeIsTemplate ? + (String) catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type") : + targetNode; + + //capability assignment + boolean targetCapabilityIsType = false; + String targetCapability = (String) theAssignment.get(CAPABILITY); + if (targetCapability == null) { + targetCapability = (String) theDefinition.get(CAPABILITY); + //in a requirement definition the target capability can only be a + //capability type (and not a capability name within some target node + //type) + targetCapabilityIsType = true; + } else { + targetCapabilityIsType = isTypeReference(Construct.Capability, targetCapability); + + //check compatibility with the target compatibility type specified + //in the requirement definition, if any + String targetCapabilityDef = (String) theDefinition.get(CAPABILITY); + if (targetCapabilityDef != null && targetCapability != null) { + if (targetCapabilityIsType) { + if (!catalog.isDerivedFrom( + Construct.Capability, targetCapability, targetCapabilityDef)) { + theContext.addError("The required target capability type '" + targetCapability + "' is not compatible with the target capability type found in the requirement definition: " + targetCapabilityDef, null); + return; + } + } else { + //the capability is from a target node. Find its definition and + //check that its type is compatible with the capability type + //from the requirement definition + + //check target capability compatibility with target node + if (targetNode == null) { + theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', which was not specified", null); + return; + } + if (!targetNodeIsTemplate) { + theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', but there you specified a node type", null); + return; + } + //check that the targetNode (its type) indeed has the + //targetCapability + + Map<String, Object> targetNodeCapabilityDef = + findTypeFacetByName( + Construct.Node, targetNodeType, + Facet.capabilities, targetCapability); + if (targetNodeCapabilityDef == null) { + theContext.addError("No capability '" + targetCapability + "' was specified in the node " + targetNode + " of type " + targetNodeType, null); + return; + } + + String targetNodeCapabilityType = (String) targetNodeCapabilityDef.get("type"); + + if (!catalog.isDerivedFrom(Construct.Capability, + targetNodeCapabilityType, + targetCapabilityDef)) { + theContext.addError("The required target capability type '" + targetCapabilityDef + "' is not compatible with the target capability type found in the target node type capability definition : " + targetNodeCapabilityType + ", targetNode " + targetNode + ", capability name " + targetCapability, null); + return; + } + } + } + } + + //relationship assignment + Map targetRelationship = (Map) theAssignment.get("relationship"); + if (targetRelationship != null) { + //this has to be compatible with the relationship with the same name + //from the node type + //check the type + } + + //node_filter; used jxpath to simplify the navigation somewhat + //this is too cryptic + JXPathContext jxPath = JXPathContext.newContext(theAssignment); + jxPath.setLenient(true); + + List<Map> propertiesFilter = + (List<Map>) jxPath.getValue("/node_filter/properties"); + if (propertiesFilter != null) { + for (Map propertyFilter : propertiesFilter) { + if (targetNode != null) { + //if we have a target node or node template then it must have + //have these properties + for (Object propertyName : propertyFilter.keySet()) { + if (null == findTypeFacetByName(Construct.Node, + targetNodeType, + Facet.properties, + propertyName.toString())) { + theContext.addError("The node_filter property " + propertyName + " is invalid: requirement target node " + targetNode + " does not have such a property", null); + } + } + } + } + } + + List<Map> capabilitiesFilter = + (List<Map>) jxPath.getValue("node_filter/capabilities"); + if (capabilitiesFilter != null) { + for (Map capabilityFilterDef : capabilitiesFilter) { + assert capabilityFilterDef.size() == 1; + Map.Entry<String, Map> capabilityFilterEntry = + (Map.Entry<String, Map>) capabilityFilterDef.entrySet().iterator().next(); + String targetFilterCapability = capabilityFilterEntry.getKey(); + Map<String, Object> targetFilterCapabilityDef = null; + + //if we have a targetNode capabilityName must be a capability of + //that node (type); or it can be simply capability type (but the node + //must have a capability of that type) + + String targetFilterCapabilityType = null; + if (targetNode != null) { + targetFilterCapabilityDef = + findTypeFacetByName(Construct.Node, targetNodeType, + Facet.capabilities, targetFilterCapability); + if (targetFilterCapabilityDef != null) { + targetFilterCapabilityType = + (String) targetFilterCapabilityDef/*.values().iterator().next()*/.get("type"); + } else { + Map<String, Map> targetFilterCapabilities = + findTypeFacetByType(Construct.Node, targetNodeType, + Facet.capabilities, targetFilterCapability); + + if (!targetFilterCapabilities.isEmpty()) { + if (targetFilterCapabilities.size() > 1) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "checkRequirementAssignmentDefinition: filter check, target node type '{}' has more than one capability of type '{}', not supported", targetNodeType, targetFilterCapability); + } + //pick the first entry, it represents a capability of the required type + Map.Entry<String, Map> capabilityEntry = targetFilterCapabilities.entrySet().iterator().next(); + targetFilterCapabilityDef = Collections.singletonMap(capabilityEntry.getKey(), + capabilityEntry.getValue()); + targetFilterCapabilityType = targetFilterCapability; + } + } + } else { + //no node (type) specified, it can be a straight capability type + targetFilterCapabilityDef = catalog.getTypeDefinition( + Construct.Capability, targetFilterCapability); + //here comes the odd part: it can still be a just a name in which + //case we should look at the requirement definition, see which + //capability (type) it indicates + assert targetCapabilityIsType; //cannot be otherwise, we'd need a node + targetFilterCapabilityDef = catalog.getTypeDefinition( + Construct.Capability, targetCapability); + targetFilterCapabilityType = targetCapability; + } + + if (targetFilterCapabilityDef == null) { + theContext.addError("Capability (name or type) " + targetFilterCapability + " is invalid: not a known capability (type) " + + ((targetNodeType != null) ? (" of node type" + targetNodeType) : ""), null); + continue; + } + + for (Map propertyFilter : + (List<Map>) jxPath.getValue("/node_filter/capabilities/" + targetFilterCapability + "/properties")) { + //check that the properties are in the scope of the + //capability definition + for (Object propertyName : propertyFilter.keySet()) { + if (null == findTypeFacetByName(Construct.Capability, + targetCapability, + Facet.properties, + propertyName.toString())) { + theContext.addError("The capability filter " + targetFilterCapability + " property " + propertyName + " is invalid: target capability " + targetFilterCapabilityType + " does not have such a property", null); + } + } + } + } + } + + } finally { + theContext//.exit() + .exit(); + } + } + + private void checkCapabilitiesAssignmentDefinition( + Map<String, Map> theCapabilities, CheckContext theContext) { + theContext.enter(CAPABILITIES); + try { + if (!checkDefinition(CAPABILITIES, theCapabilities, theContext)) { + return; + } + + //the node type for the node template enclosing these requirements + String nodeType = (String) catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node)) + .get("type"); + + for (Iterator<Map.Entry<String, Map>> ci = + theCapabilities.entrySet().iterator(); + ci.hasNext(); ) { + + Map.Entry<String, Map> ce = ci.next(); + + String capabilityName = ce.getKey(); + Map capabilityDef = findTypeFacetByName(Construct.Node, nodeType, + Facet.capabilities, capabilityName); + if (capabilityDef == null) { + theContext.addError("No capability " + capabilityName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null); + continue; + } + + checkCapabilityAssignmentDefinition( + capabilityName, ce.getValue(), capabilityDef, theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkCapabilityAssignmentDefinition( + String theCapabilityName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + + theContext.enter(theCapabilityName, Construct.Capability); + try { + String capabilityType = (String) theDefinition.get("type"); + //list of property and attributes assignments + checkFacet(Construct.Capability, theAssignment, capabilityType, + Facet.properties, theContext); + checkFacet(Construct.Capability, theAssignment, capabilityType, + Facet.attributes, theContext); + } finally { + theContext.exit(); + } + } + + private void checkTemplateInterfacesDefinition( + Map<String, Map> theInterfaces, + CheckContext theContext) { + theContext.enter(INTERFACES); + try { + if (!checkDefinition(INTERFACES, theInterfaces, theContext)) { + return; + } + + //the node type for the node template enclosing these requirements + String nodeType = (String) catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node)) + .get("type"); + + for (Iterator<Map.Entry<String, Map>> ii = + theInterfaces.entrySet().iterator(); + ii.hasNext(); ) { + + Map.Entry<String, Map> ie = ii.next(); + + String interfaceName = ie.getKey(); + Map interfaceDef = findTypeFacetByName(Construct.Node, nodeType, + Facet.interfaces, interfaceName); + + if (interfaceDef == null) { + /* this is subject to augmentation: this could be a warning but not an error */ + theContext.addError("No interface " + interfaceName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null); + continue; + } + + checkTemplateInterfaceDefinition( + interfaceName, ie.getValue(), interfaceDef, theContext); + } + } finally { + theContext.exit(); + } + } + + private void checkTemplateInterfaceDefinition( + String theInterfaceName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + + theContext.enter(theInterfaceName, Construct.Interface); + try { + //check the assignment of the common inputs + checkFacet(Construct.Interface, + theAssignment, + (String) theDefinition.get("type"), + Facet.inputs, + theContext); + } finally { + theContext.exit(); + } + } + + + @Checks(path = "/topology_template/artifacts") + protected void check_template_artifacts_definition( + Map<String, Object> theDefinition, + CheckContext theContext) { + theContext.enter(ARTIFACTS); + theContext.exit(); + } + + //generic checking actions, not related to validation rules + + /* will check the validity of the type specification for any construct containing a 'type' entry */ + private boolean checkType(Construct theCategory, Map theSpec, CheckContext theContext) { + String type = (String) theSpec.get("type"); + if (type == null) { + theContext.addError("Missing type specification", null); + return false; + } + + if (!catalog.hasType(theCategory, type)) { + theContext.addError(UNKNOWN + theCategory + " type: " + type, null); + return false; + } + + return true; + } + + /* the type can be: + * a known type: predefined or user-defined + * a collection (list or map) and then check that the entry_schema points to one of the first two cases (is that it?) + */ + private boolean checkDataType(Map theSpec, CheckContext theContext) { + + if (!checkType(Construct.Data, theSpec, theContext)) { + return false; + } + + String type = (String) theSpec.get("type"); + if (/*isCollectionType(type)*/ + "list".equals(type) || "map".equals(type)) { + Map entrySchema = (Map) theSpec.get("entry_schema"); + if (entrySchema == null) { + //maybe issue a warning ?? or is 'string' the default?? + return true; + } + + if (!catalog.hasType(Construct.Data, (String) entrySchema.get("type"))) { + theContext.addError("Unknown entry_schema type: " + entrySchema, null); + return false; + } + } + return true; + } + + /* Check that a particular facet (properties, attributes) of a construct type + * (node type, capability type, etc) is correctly (consistenly) defined + * across a type hierarchy + */ + private boolean checkTypeConstructFacet(Construct theConstruct, + String theTypeName, + Map theTypeSpec, + Facet theFacet, + CheckContext theContext) { + Map<String, Map> defs = + (Map<String, Map>) theTypeSpec.get(theFacet.name()); + if (null == defs) { + return true; + } + + boolean res = true; + + //given that the type was cataloged there will be at least one entry + Iterator<Map.Entry<String, Map>> i = + catalog.hierarchy(theConstruct, theTypeName); + if (!i.hasNext()) { + theContext.addError( + "The type " + theTypeName + " needs to be cataloged before attempting 'checkTypeConstruct'", null); + return false; + } + i.next(); //skip self + while (i.hasNext()) { + Map.Entry<String, Map> e = i.next(); + Map<String, Map> superDefs = (Map<String, Map>) e.getValue() + .get(theFacet.name()); + if (null == superDefs) { + continue; + } + //this computes entries that appear on both collections but with different values, i.e. the re-defined properties + Map<String, MapDifference.ValueDifference<Map>> diff = Maps.difference(defs, superDefs).entriesDiffering(); + + for (Iterator<Map.Entry<String, MapDifference.ValueDifference<Map>>> di = diff.entrySet().iterator(); di.hasNext(); ) { + Map.Entry<String, MapDifference.ValueDifference<Map>> de = di.next(); + MapDifference.ValueDifference<Map> dediff = de.getValue(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {}: {} has been re-defined between the {} types {} and {}", theConstruct, theFacet, de.getKey(), theConstruct, e.getKey(), theTypeName); + //for now we just check that the type is consistenly re-declared + if (!this.catalog.isDerivedFrom(theFacet.construct(), + (String) dediff.leftValue().get("type"), + (String) dediff.rightValue().get("type"))) { + theContext.addError( + theConstruct + TYPE + theFacet + ", redefiniton changed its type: " + de.getKey() + " has been re-defined between the " + theConstruct + " types " + e.getKey() + " and " + theTypeName + " in an incompatible manner", null); + res = false; + } + } + } + + return res; + } + + /* + * Checks the validity of a certain facet of a construct + * (properties of a node) across a type hierarchy. + * For now the check is limited to a verifying that a a facet was declared + * somewhere in the construct type hierarchy (a node template property has + * been declared in the node type hierarchy). + * + * 2 versions with the more generic allowing the specification of the type + * to be done explicitly. + */ + private boolean checkFacet(Construct theConstruct, + Map theSpec, + Facet theFacet, + CheckContext theContext) { + return checkFacet(theConstruct, theSpec, null, theFacet, theContext); + } + + /** + * We walk the hierarchy and verify the assignment of a property with respect to its definition. + * We also collect the names of those properties defined as required but for which no assignment was provided. + */ + private boolean checkFacet(Construct theConstruct, + Map theSpec, + String theSpecType, + Facet theFacet, + CheckContext theContext) { + + Map<String, Map> defs = (Map<String, Map>) theSpec.get(theFacet.name()); + if (null == defs) { + return true; + } + defs = Maps.newHashMap(defs); // + + boolean res = true; + if (theSpecType == null) { + theSpecType = (String) theSpec.get("type"); + } + if (theSpecType == null) { + theContext.addError("No specification type available", null); + return false; + } + + Map<String, Byte> missed = new HashMap<>(); //keeps track of the missing required properties, the value is + //false if a default was found along the hierarchy + Iterator<Map.Entry<String, Map>> i = + catalog.hierarchy(theConstruct, theSpecType); + while (i.hasNext() && !defs.isEmpty()) { + Map.Entry<String, Map> type = i.next(); + + Map<String, Map> typeDefs = (Map<String, Map>) type.getValue() + .get(theFacet.name()); + if (null == typeDefs) { + continue; + } + + MapDifference<String, Map> diff = Maps.difference(defs, typeDefs); + + //this are the ones this type and the spec have in common (same key, + //different values) + Map<String, MapDifference.ValueDifference<Map>> facetDefs = + diff.entriesDiffering(); + //TODO: this assumes the definition of the facet is not cumulative, i.e. + //subtypes 'add' something to the definition provided by the super-types + //it considers the most specialized definition stands on its own + for (MapDifference.ValueDifference<Map> valdef : facetDefs.values()) { + checkDataValuation(valdef.leftValue(), valdef.rightValue(), theContext); + } + + //remove from properties all those that appear in this type: unfortunately this returns an unmodifiable map .. + defs = Maps.newHashMap(diff.entriesOnlyOnLeft()); + } + + if (!defs.isEmpty()) { + theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + defs, null); + res = false; + } + + if (!missed.isEmpty()) { + List missedNames = + missed.entrySet() + .stream() + .filter(e -> e.getValue().byteValue() == (byte) 1) + .map(e -> e.getKey()) + .collect(Collectors.toList()); + if (!missedNames.isEmpty()) { + theContext.addError(theConstruct + " " + theFacet + " missing required values for: " + missedNames, null); + res = false; + } + } + + return res; + } + + /* Augmentation occurs in cases such as the declaration of capabilities within a node type. + * In such cases the construct facets (the capabilitity's properties) can redefine (augment) the + * specification found in the construct type. + */ + private boolean checkFacetAugmentation(Construct theConstruct, + Map theSpec, + Facet theFacet, + CheckContext theContext) { + return checkFacetAugmentation(theConstruct, theSpec, null, theFacet, theContext); + } + + private boolean checkFacetAugmentation(Construct theConstruct, + Map theSpec, + String theSpecType, + Facet theFacet, + CheckContext theContext) { + + Map<String, Map> augs = (Map<String, Map>) theSpec.get(theFacet.name()); + if (null == augs) { + return true; + } + + boolean res = true; + if (theSpecType == null) { + theSpecType = (String) theSpec.get("type"); + } + if (theSpecType == null) { + theContext.addError("No specification type available", null); + return false; + } + + for (Iterator<Map.Entry<String, Map>> ai = augs.entrySet().iterator(); ai.hasNext(); ) { + Map.Entry<String, Map> ae = ai.next(); + + //make sure it was declared by the type + Map facetDef = catalog.getFacetDefinition(theConstruct, theSpecType, theFacet, ae.getKey()); + if (facetDef == null) { + theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + ae.getKey(), null); + res = false; + continue; + } + + //check the compatibility of the augmentation: only the type cannot be changed + //can the type be changed in a compatible manner ?? + if (!facetDef.get("type").equals(ae.getValue().get("type"))) { + theContext.addError(theConstruct + " " + theFacet + " " + ae.getKey() + " has a different type than its definition: " + ae.getValue().get("type") + " instead of " + facetDef.get("type"), null); + res = false; + continue; + } + + //check any valuation (here just defaults) + Object defaultValue = ae.getValue().get(DEFAULT); + if (defaultValue != null) { + checkDataValuation(defaultValue, ae.getValue(), theContext); + } + } + + return res; + } + + private boolean catalogTypes(Construct theConstruct, Map<String, Map> theTypes, CheckContext theContext) { + + boolean res = true; + for (Map.Entry<String, Map> typeEntry : theTypes.entrySet()) { + res &= catalogType(theConstruct, typeEntry.getKey(), typeEntry.getValue(), theContext); + } + + return res; + } + + private boolean catalogType(Construct theConstruct, + String theName, + Map theDef, + CheckContext theContext) { + + if (!catalog.addType(theConstruct, theName, theDef)) { + theContext.addError(theConstruct + TYPE + theName + " re-declaration", null); + return false; + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {} has been cataloged", theConstruct, theName); + + String parentType = (String) theDef.get("derived_from"); + if (parentType != null && !catalog.hasType(theConstruct, parentType)) { + theContext.addError( + theConstruct + TYPE + theName + " indicates a supertype that has not (yet) been declared: " + parentType, null); + return false; + } + return true; + } + + private boolean checkTypeReference(Construct theConstruct, + CheckContext theContext, + String... theTypeNames) { + boolean res = true; + for (String typeName : theTypeNames) { + if (!isTypeReference(theConstruct, typeName)) { + theContext.addError("Reference to " + theConstruct + " type '" + typeName + "' points to unknown type", null); + res = false; + } + } + return res; + } + + private boolean isTypeReference(Construct theConstruct, + String theTypeName) { + return this.catalog.hasType(theConstruct, theTypeName); + } + + /* node or relationship templates */ + private boolean checkTemplateReference(Construct theConstruct, + CheckContext theContext, + String... theTemplateNames) { + boolean res = true; + for (String templateName : theTemplateNames) { + if (!isTemplateReference(theConstruct, theContext, templateName)) { + theContext.addError("Reference to " + theConstruct + " template '" + templateName + "' points to unknown template", null); + res = false; + } + } + return res; + } + + private boolean isTemplateReference(Construct theConstruct, + CheckContext theContext, + String theTemplateName) { + return this.catalog.hasTemplate(theContext.target(), theConstruct, theTemplateName); + } + + /* + * For inputs/properties/attributes/(parameters). It is the caller's + * responsability to provide the value (from a 'default', inlined, ..) + * + * @param theDef the definition of the given construct/facet as it appears in + * its enclosing type definition. + * @param + */ + private boolean checkDataValuation(Object theExpr, + Map<String, ?> theDef, + CheckContext theContext) { + //first check if the expression is a function, if not handle it as a value assignment + Data.Function f = Data.function(theExpr); + if (f != null) { + return f.evaluator() + .eval(theExpr, theDef, theContext); + } else { + Data.Type type = Data.typeByName((String) theDef.get("type")); + if (type != null) { + Data.Evaluator evaluator; + + evaluator = type.evaluator(); + if (evaluator == null) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No value evaluator available for type {}", type); + } else { + if ((theExpr != null) && (!evaluator.eval(theExpr, theDef, theContext))) { + return false; + } + } + + + evaluator = type.constraintsEvaluator(); + if (evaluator == null) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No constraints evaluator available for type {}", type); + } else { + if (theExpr != null) { + if (!evaluator.eval(theExpr, theDef, theContext)) { + return false; + } + } else { + //should have a null value validatorT + } + } + + return true; + } else { + theContext.addError("Expression " + theExpr + " of " + theDef + " could not be evaluated", null); + return false; + } + } + } + + /** + * Given the type of a certain construct (node type for example), look up + * in one of its facets (properties, capabilities, ..) for one of the given + * facet type (if looking in property, one of the given data type). + * + * @return a map of all facets of the given type, will be empty to signal + * none found + * <p> + * Should we look for a facet construct of a compatible type: any type derived + * from the given facet's construct type?? + */ + private Map<String, Map> + findTypeFacetByType(Construct theTypeConstruct, + String theTypeName, + Facet theFacet, + String theFacetType) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType {}, {}: {} {}", theTypeName, theTypeConstruct, theFacetType, theFacet); + Map<String, Map> res = new HashMap<>(); + Iterator<Map.Entry<String, Map>> i = + catalog.hierarchy(theTypeConstruct, theTypeName); + while (i.hasNext()) { + Map.Entry<String, Map> typeSpec = i.next(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", theTypeConstruct, typeSpec.getKey()); + Map<String, Map> typeFacet = + (Map<String, Map>) typeSpec.getValue().get(theFacet.name()); + if (typeFacet == null) { + continue; + } + Iterator<Map.Entry<String, Map>> fi = typeFacet.entrySet().iterator(); + while (fi.hasNext()) { + Map.Entry<String, Map> facet = fi.next(); + String facetType = (String) facet.getValue().get("type"); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", facet.getKey(), facetType); + + //here is the question: do we look for an exact match or .. + //now we check that the type has a capability of a type compatible + //(equal or derived from) the given capability type. + if (catalog.isDerivedFrom( + theFacet.construct(), facetType, theFacetType)) { + res.putIfAbsent(facet.getKey(), facet.getValue()); + } + } + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, found {}", res); + + return res; + } + + private Map<String, Object> + findTypeFacetByName(Construct theTypeConstruct, + String theTypeName, + Facet theFacet, + String theFacetName) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName {} {}", theTypeConstruct, theTypeName); + Iterator<Map.Entry<String, Map>> i = + catalog.hierarchy(theTypeConstruct, theTypeName); + while (i.hasNext()) { + Map.Entry<String, Map> typeSpec = i.next(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName, Checking {} type {}", theTypeConstruct, typeSpec.getKey()); + Map<String, Map> typeFacet = + (Map<String, Map>) typeSpec.getValue().get(theFacet.name()); + if (typeFacet == null) { + continue; + } + Map<String, Object> facet = typeFacet.get(theFacetName); + if (facet != null) { + return facet; + } + } + return null; + } + + /* Requirements are the odd ball as they are structured as a sequence .. */ + private Map<String, Map> findNodeTypeRequirementByName( + String theNodeType, String theRequirementName) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName {}/{}", theNodeType, theRequirementName); + Iterator<Map.Entry<String, Map>> i = + catalog.hierarchy(Construct.Node, theNodeType); + while (i.hasNext()) { + Map.Entry<String, Map> nodeType = i.next(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName, Checking node type {}", nodeType.getKey()); + List<Map<String, Map>> nodeTypeRequirements = + (List<Map<String, Map>>) nodeType.getValue().get(REQUIREMENTS); + if (nodeTypeRequirements == null) { + continue; + } + + for (Map<String, Map> requirement : nodeTypeRequirements) { + Map requirementDef = requirement.get(theRequirementName); + if (requirementDef != null) { + return requirementDef; + } + } + } + return null; + } + + /* + * Additional generics checks to be performed on any definition: construct, + * construct types, etc .. + */ + public boolean checkDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + if (theDefinition == null) { + theContext.addError("Missing definition for " + theName, null); + return false; + } + + if (theDefinition.isEmpty()) { + theContext.addError("Empty definition for " + theName, null); + return false; + } + + return true; + } + + private boolean checkDefinition(String theName, + List theDefinition, + CheckContext theContext) { + if (theDefinition == null) { + theContext.addError("Missing definition for " + theName, null); + return false; + } + + if (theDefinition.isEmpty()) { + theContext.addError("Empty definition for " + theName, null); + return false; + } + + return true; + } + + /* plenty of one entry maps around */ + private Map.Entry mapEntry(Object theMap) { + return (Map.Entry) ((Map) theMap).entrySet().iterator().next(); + } + + /** + * Given that we remembered the canonical forms that were needed during + * validation to replace the short forms we can apply them to the target + * yaml. + * We take advantage here of the fact that the context path maintained + * during validation is compatible with (j)xpath, with the exception of + * sequence/array indentation .. + */ + + private String patchIndexes(CharSequence thePath) { + Matcher m = indexPattern.matcher(thePath); + StringBuffer path = new StringBuffer(); + while (m.find()) { + String index = m.group(); + index = "[" + (Integer.valueOf(index.substring(1)).intValue() + 1) + "]"; + m.appendReplacement(path, Matcher.quoteReplacement(index)); + } + m.appendTail(path); + return path.toString(); + } + + private String patchWhitespaces(String thePath) { + String[] elems = thePath.split("/"); + StringBuffer path = new StringBuffer(); + for (int i = 0; i < elems.length; i++) { + if (spacePattern.matcher(elems[i]).find()) { + path.append("[@name='") + .append(elems[i]) + .append("']"); + } else { + path.append("/") + .append(elems[i]); + } + } + return path.toString(); + } + + private void applyCanonicals(Object theTarget, + Map<String, Object> theCanonicals) { + if (theCanonicals.isEmpty()) { + return; + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "applying canonicals: {}", theCanonicals); + applyCanonicals(theTarget, theCanonicals, "/", false); + } + + /* + * applies canonicals selectively + */ + private void applyCanonicals(Object theTarget, + Map<String, Object> theCanonicals, + String thePrefix, + boolean doRemove) { + + JXPathContext jxPath = JXPathContext.newContext(theTarget); + for (Iterator<Map.Entry<String, Object>> ces = + theCanonicals.entrySet().iterator(); + ces.hasNext(); ) { + Map.Entry<String, Object> ce = ces.next(); + //should we check prefix before or after normalization ?? + String path = ce.getKey(); + if (path.startsWith(thePrefix)) { + path = patchWhitespaces( + patchIndexes(path)); + try { + jxPath.setValue(path, ce.getValue()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Applied canonical form (prefix '{}') at: {}", thePrefix, path); + + if (doRemove) { + ces.remove(); + } + } catch (JXPathException jxpx) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to apply canonical to {} {}", theTarget, jxpx); + } + } + } + } + + /* + * commons are built-in and supposed to be bulletproof so any error in here + * goes out loud. + */ + private static Catalog commonsCatalog() { + + synchronized (Catalog.class) { + + if (commonsCatalogInstance != null) { + return commonsCatalogInstance; + } + + //if other templates are going to be part of the common type system + //add them to this list. order is relevant. + final String[] commons = new String[]{ + "tosca/tosca-common-types.yaml"}; + + Checker commonsChecker; + try { + commonsChecker = new Checker(); + + for (String common : commons) { + commonsChecker.check(common, buildCatalog(false)); + Report commonsReport = commonsChecker.targets().iterator().next().getReport(); + + if (commonsReport.hasErrors()) { + throw new RuntimeException("Failed to process commons:\n" + + commonsReport); + } + } + } catch (CheckerException cx) { + throw new RuntimeException("Failed to process commons", cx); + } + commonsCatalogInstance = commonsChecker.catalog; + return commonsCatalogInstance; + } + } + + public static Catalog buildCatalog() { + return buildCatalog(true); + } + + private static Catalog buildCatalog(boolean doCommons) { + + Catalog catalog = new Catalog(doCommons ? commonsCatalog() : null); + if (!doCommons) { + //add core TOSCA types + for (Data.CoreType type : Data.CoreType.class.getEnumConstants()) { + catalog.addType(Construct.Data, type.toString(), Collections.emptyMap()); + } + } + return catalog; + } + + private boolean invokeHook(String theHookName, + Class[] theArgTypes, + Object... theArgs) { + + Invokable hookHandler = null; + try { + Method m = Checker.class.getDeclaredMethod( + theHookName, theArgTypes); + m.setAccessible(true); + hookHandler = Invokable.from(m); + } catch (NoSuchMethodException nsmx) { + //that's ok, not every rule has to have a handler + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "That's ok, not every rule has to have a handler. Method name =", theHookName); + } + + if (hookHandler != null) { + try { + hookHandler.invoke(this, theArgs); + } catch (InvocationTargetException | IllegalAccessException itx) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invocation failed for hook handler {} {}", theHookName, itx); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Hook handler failed {} {}", theHookName, x); + } + } + + return hookHandler != null; + } + + private void validationHook(String theTiming, + Object theTarget, + Rule theRule, + Validator.ValidationContext theContext) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "looking up validation handler for {}, {} {}", theRule.getName(), theTiming, theContext.getPath()); + if (!invokeHook(theRule.getName() + "_" + theTiming + "_validation_handler", + validationHookArgTypes, + theTarget, theRule, theContext)) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "no validation handler for {}", theRule.getName() + "_" + theTiming); + } + } + + private void checks(String theName, + Object theTarget, + CheckContext theContext) { + Map<Method, Object> handlers = checks.row(/*theName*/theContext.getPath(theName)); + if (handlers != null) { + for (Map.Entry<Method, Object> handler : handlers.entrySet()) { + try { + handler.getKey().invoke(handler.getValue(), new Object[]{theTarget, theContext}); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Check {} with {} failed {}", theName, handler.getKey(), x); + } + } + } else { + boolean hasHook = false; + for (Class[] argTypes : checkHookArgTypes) { + hasHook |= invokeHook("check_" + theName, + argTypes, + theTarget, theContext); + //shouldn't we stop as soon as hasHook is true?? + } + + if (!hasHook) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "no check handler for {}", theName); + } + } + } + + private void catalogs(String theName, + Object theTarget, + CheckContext theContext) { + + Map<Method, Object> handlers = catalogs.row(/*theName*/theContext.getPath(theName)); + if (handlers != null) { + for (Map.Entry<Method, Object> handler : handlers.entrySet()) { + try { + handler.getKey().invoke(handler.getValue(), new Object[]{theTarget, theContext}); + } catch (Exception x) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Cataloging {} with {} failed {}", theName, handler.getKey(), x); + } + } + } + } + + private class TOSCAValidator extends Validator { + + //what were validating + private Target target; + + /* Some of the TOSCA entries accept a 'short form/notation' instead of the canonical map representation. + * kwalify cannot easily express these alternatives and as such we handle them here. In the pre-validation phase we detect the presence of a short notation +and compute the canonical form and validate it. In the post-validation phase we +substitute the canonical form for the short form so that checking does not have to deal with it. + */ + + private Map<String, Object> canonicals = new TreeMap<>(); + + TOSCAValidator(Target theTarget, Object theSchema) { + super(theSchema); + this.target = theTarget; + } + + public Target getTarget() { + return this.target; + } + + /* hook method called by Validator#validate() + */ + @Override + protected boolean preValidationHook(Object value, Rule rule, ValidationContext context) { + + validationHook("pre", value, rule, context); + //short form handling + String hint = rule.getShort(); + if (value != null && + hint != null) { + + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Attempting canonical at {}, rule {}", context.getPath(), rule.getName()); + + Object canonical = null; + //if the canonical form requires a collection + if (Types.isCollectionType(rule.getType())) { + //and the actual value isn't one + if (!(value instanceof Map || value instanceof List)) { + //used to use singleton map/list here (was good for catching errors) + //but there is the possibility if short forms within short forms so + //the created canonicals need to accomodate other values. + if (Types.isMapType(rule.getType())) { + canonical = new HashMap(); + ((Map) canonical).put(hint, value); + } else { + //the hint is irrelevant here but we should impose a value when the target is a list + canonical = new LinkedList(); + ((List) canonical).add(value); + } + } else { + //we can accomodate: + // map to list of map transformation + if (!Types.isMapType(rule.getType()) /* a seq */ && + value instanceof Map) { + canonical = new LinkedList(); + ((List) canonical).add(value); + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType()); + return false; + } + } + + int errc = context.errorCount(); + validateRule(canonical, rule, context); + if (errc != context.errorCount()) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} failed validation", rule.getName(), hint, context.getPath()); + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} passed validation. Canonical form is {}", rule.getName(), hint, context.getPath(), canonical); + //replace the short notation with the canonicall one so we don't + //have to deal it again during checking + this.canonicals.put(context.getPath(), canonical); + return true; + } + } else { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType()); + } + } + + //perform default validation process + return false; + } + + /* + * Only gets invoked once the value was succesfully verified against the syntax indicated by the given rule. + */ + @Override + protected void postValidationHook(Object value, + Rule rule, + ValidationContext context) { + validationHook("post", value, rule, context); + } + + } + + /** + * Maintains state across the checking process. + */ + public class CheckContext { + + private Target target; + private ArrayList<String> elems = new ArrayList<>(10); + private ArrayList<Construct> constructs = new ArrayList<>(10); + + CheckContext(Target theTarget) { + this.target = theTarget; + } + + public CheckContext enter(String theName) { + return enter(theName, null); + } + + public CheckContext enter(String theName, Construct theConstruct) { + this.elems.add(theName); + this.constructs.add(theConstruct); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering check {} {}", theName, getPath()); + return this; + } + + public CheckContext exit() { + String path = getPath(); + String name = this.elems.remove(this.elems.size() - 1); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "exiting check {} {}", name, path); + this.constructs.remove(this.constructs.size() - 1); + return this; + } + + public String getPath() { + return buildPath(null); + } + + String getPath(String theNextElem) { + return buildPath(theNextElem); + } + + String buildPath(String theElem) { + StringBuilder sb = new StringBuilder(); + for (String e : this.elems) { + sb.append(e) + .append("/"); + } + if (theElem != null) { + sb.append(theElem) + .append("/"); + } + + return sb.substring(0, sb.length() - 1); + } + + public String enclosingConstruct(Construct theConstruct) { + for (int i = this.constructs.size() - 1; i > 0; i--) { + Construct c = this.constructs.get(i); + if (c != null && c.equals(theConstruct)) { + return this.elems.get(i); + } + } + return null; + } + + public CheckContext addError(String theMessage, Throwable theCause) { + this.target.report(new TargetError("", getPath(), theMessage, theCause)); + return this; + } + + public Checker checker() { + return Checker.this; + } + + public Catalog catalog() { + return Checker.this.catalog; + } + + public Target target() { + return this.target; + } + + public String toString() { + return "CheckContext(" + this.target.getLocation() + "," + getPath() + ")"; + } + } + + // -------------------------------------------------------------------------------------------------- // + + private String errorReport(List<Throwable> theErrors) { + StringBuilder sb = new StringBuilder(theErrors.size() + " errors"); + for (Throwable x : theErrors) { + sb.append("\n"); + if (x instanceof ValidationException) { + ValidationException vx = (ValidationException) x; + // .apend("at ") + // .append(error.getLineNumber()) + // .append(" : ") + sb.append("[").append(vx.getPath()).append("] "); + } else if (x instanceof TargetError) { + TargetError tx = (TargetError) x; + sb.append("[").append(tx.getLocation()).append("] "); + } + sb.append(x.getMessage()); + if (x.getCause() != null) { + sb.append("\n\tCaused by:\n").append(x.getCause()); + } + } + sb.append("\n"); + return sb.toString(); + } + + protected void range_definition_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering range_definition {}", + theContext.getPath()); + + assert theRule.getType().equals("seq"); + List bounds = (List) theValue; + + if (bounds.size() != 2) { + theContext.addError("Too many values in bounds specification", theRule, theValue, null); + return; + } + + try { + Double.parseDouble(bounds.get(0).toString()); + } catch (NumberFormatException nfe) { + theContext.addError("Lower bound not a number", theRule, theValue, null); + } + + try { + Double.parseDouble(bounds.get(1).toString()); + } catch (NumberFormatException nfe) { + if (!"UNBOUNDED".equals(bounds.get(1).toString())) { + theContext.addError("Upper bound not a number or 'UNBOUNDED'", theRule, theValue, null); + } + } + + } + + /* + * early processing (validation time) of the imports allows us to catalog + * their types before those declared in the main document. + */ + protected void imports_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering imports {}", theContext.getPath()); + assert theRule.getType().equals("seq"); + + Target tgt = ((TOSCAValidator) theContext.getValidator()).getTarget(); + + applyCanonicals(tgt.getTarget(), ((TOSCAValidator) theContext.getValidator()).canonicals, "/imports", true); + + for (ListIterator li = ((List) theValue).listIterator(); li.hasNext();) { + + Map.Entry importEntry = mapEntry(li.next()); + + Map def = (Map) importEntry.getValue(); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Processing import {}", def); + + String tfile = (String) def.get("file"); + Target tgti = this.locator.resolve(tfile); + if (tgti == null) { + theContext.addError("Failure to resolve import '" + def + "', imported from " + tgt, theRule, null, + null); + continue; + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Import {} located at {}", def, + tgti.getLocation()); + + if (this.catalog.addTarget(tgti, tgt)) { + // we've never seen this import (location) before + try { + + List<Target> tgtis = parseTarget(tgti); + if (tgtis.isEmpty()) + continue; + + if (tgtis.size() > 1) { + theContext.addError( + "Import '" + tgti + "', imported from " + tgt + ", contains multiple yaml documents", + theRule, null, null); + continue; + } + + tgti = tgtis.get(0); + + // tgti = parseTarget(tgti); + if (tgt.getReport().hasErrors()) { + theContext.addError("Failure parsing import '" + tgti + "',imported from " + tgt, theRule, null, + null); + continue; + } + + validateTarget(tgti); + if (tgt.getReport().hasErrors()) { + theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, + null, null); + continue; + } + } catch (CheckerException cx) { + theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, cx, + null); + } + } + + // replace with the actual location (also because this is what they + // get + // index by .. bad, this exposed catalog inner workings) + + def.put("file", tgti.getLocation()); + } + } + + protected void node_templates_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering node_templates_post_validation_handler {}", + theContext.getPath()); + assert theRule.getType().equals("map"); + Map<String, Map> nodeTemplates = (Map<String, Map>) theValue; + for (Iterator<Map.Entry<String, Map>> i = nodeTemplates.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> node = i.next(); + try { + catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Node, + node.getKey(), node.getValue()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Node template {} has been cataloged", + node.getKey()); + } catch (CatalogException cx) { + theContext.addError(cx.toString(), theRule, node, null); + } + } + } + + protected void inputs_post_validation_handler(Object theValue, Rule theRule, + Validator.ValidationContext theContext) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering inputs_post_validation_handler {}", + theContext.getPath()); + assert theRule.getType().equals("map"); + + // we'll repeat this test during checking but because we index inputs + // early + // we need it here too + if (theValue == null) { + return; + } + + Map<String, Map> inputs = (Map<String, Map>) theValue; + for (Iterator<Map.Entry<String, Map>> i = inputs.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> input = i.next(); + try { + catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Data, + input.getKey(), input.getValue()); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Input {} has been cataloged", + input.getKey()); + } catch (CatalogException cx) { + theContext.addError(cx.toString(), theRule, input, null); + } + } + } + + private void process(String theProcessorSpec) throws CheckerException { + + String[] spec = theProcessorSpec.split(" "); + if (spec.length == 0) + throw new IllegalArgumentException("Incomplete processor specification"); + + Class processorClass = null; + try { + processorClass = Class.forName(spec[0]); + } catch (ClassNotFoundException cnfx) { + throw new CheckerException("Cannot find processor implementation", cnfx); + } + + Processor proc = null; + try { + proc = (Processor) ConstructorUtils.invokeConstructor(processorClass, + Arrays.copyOfRange(spec, 1, spec.length)); + } catch (Exception x) { + throw new CheckerException("Cannot instantiate processor", x); + } + + process(proc); + } + + protected void check_artifact_definition(String theName, Map theDef, CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + + try { + if (!checkDefinition(theName, theDef, theContext)) { + return; + } + // check artifact type + if (!checkType(Construct.Artifact, theDef, theContext)) + return; + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_policy_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Policy); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Policy, theName, theDefinition, Facet.properties, theContext); + } + + // the targets can be known node types or group types + List<String> targets = (List<String>) theDefinition.get("targets"); + if (targets != null) { + if (checkDefinition("targets", targets, theContext)) { + for (String target : targets) { + if (!(this.catalog.hasType(Construct.Node, target) + || this.catalog.hasType(Construct.Group, target))) { + theContext.addError( + "The 'targets' entry must contain a reference to a node type or group type, '" + + target + "' is none of those", + null); + } + } + } + } + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_group_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Group); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Group, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("targets")) { + checkTypeReference(Construct.Node, theContext, + ((List<String>) theDefinition.get("targets")).toArray(EMPTY_STRING_ARRAY)); + } + + // interfaces + Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces"); + if (interfaces != null) { + try { + theContext.enter("interfaces"); + for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> e = i.next(); + check_type_interface_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_node_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_properties((Map<String, Map>) theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.attributes, theContext); + } + + // requirements + if (theDefinition.containsKey("requirements")) { + check_requirements((List<Map>) theDefinition.get("requirements"), theContext); + } + + // capabilities + if (theDefinition.containsKey("capabilities")) { + check_capabilities((Map<String, Map>) theDefinition.get("capabilities"), theContext); + } + + // interfaces: + Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces"); + if (interfaces != null) { + try { + theContext.enter("interfaces"); + for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> e = i.next(); + check_type_interface_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + // artifacts + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_interface_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Interface); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + // not much else here: a list of operation_definitions, each with + // its + // implementation and inputs + + // check that common inputs are re-defined in a compatible manner + + // check that the interface operations are overwritten in a + // compatible manner + // for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet() + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_artifact_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_relationship_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_properties((Map<String, Map>) theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.attributes, theContext); + } + + Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces"); + if (interfaces != null) { + theContext.enter("interfaces"); + for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> e = i.next(); + check_type_interface_definition(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + if (theDefinition.containsKey("valid_target_types")) { + checkTypeReference(Construct.Capability, theContext, + ((List<String>) theDefinition.get("valid_target_types")).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_capability_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_attributes((Map<String, Map>) theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.attributes, theContext); + } + + // valid_source_types: see capability_type_definition + // unclear: how is the valid_source_types list definition eveolving + // across + // the type hierarchy: additive, overwriting, ?? + if (theDefinition.containsKey("valid_source_types")) { + checkTypeReference(Construct.Node, theContext, + ((List<String>) theDefinition.get("valid_source_types")).toArray(EMPTY_STRING_ARRAY)); + } + } finally { + theContext.exit(); + } + } + + /* */ + protected void check_data_type_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName, Construct.Data); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + + if (theDefinition.containsKey("properties")) { + check_properties((Map<String, Map>) theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Data, theName, theDefinition, Facet.properties, theContext); + } + } finally { + theContext.exit(); + } + } + + /* + * top level rule, we collected the whole information set. this is where + * checking starts + */ + protected void check_service_template_definition(Map<String, Object> theDef, CheckContext theContext) { + theContext.enter(""); + + if (theDef == null) { + theContext.addError("Empty template", null); + return; + } + + // !!! imports need to be processed first now that catalogging takes + // place at check time!! + + // first catalog whatever it is there to be cataloged so that the checks + // can perform cross-checking + for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); ri.hasNext();) { + Map.Entry<String, Object> e = ri.next(); + catalogs(e.getKey(), e.getValue(), theContext); + } + + for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); ri.hasNext();) { + Map.Entry<String, Object> e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + protected void check_attribute_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + if (!checkDataType(theDefinition, theContext)) { + return; + } + } finally { + theContext.exit(); + } + } + + public void check_attributes(Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter("attributes"); + try { + if (!checkDefinition("attributes", theDefinitions, theContext)) + return; + + for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> e = i.next(); + check_attribute_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + + protected void check_property_definition(String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + if (!checkDefinition(theName, theDefinition, theContext)) { + return; + } + // check the type + if (!checkDataType(theDefinition, theContext)) { + return; + } + // check default value is compatible with type + Object defaultValue = theDefinition.get("default"); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDefinition, theContext); + } + + theContext.exit(); + } + + public void check_properties(Map<String, Map> theDefinitions, CheckContext theContext) { + theContext.enter("properties"); + try { + if (!checkDefinition("properties", theDefinitions, theContext)) + return; + + for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) { + Map.Entry<String, Map> e = i.next(); + check_property_definition(e.getKey(), e.getValue(), theContext); + } + } finally { + theContext.exit(); + } + } + +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CheckerException.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CheckerException.java new file mode 100644 index 0000000..1963c28 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CheckerException.java @@ -0,0 +1,18 @@ +package org.onap.sdc.dcae.checker; + + +/** + * A checker exception represents an error that stops the checker from + * completing its task. + */ +public class CheckerException extends Exception { + + public CheckerException(String theMsg, Throwable theCause) { + super(theMsg, theCause); + } + + public CheckerException(String theMsg) { + super(theMsg); + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java new file mode 100644 index 0000000..295a1f2 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/CommonLocator.java @@ -0,0 +1,144 @@ +package org.onap.sdc.dcae.checker; + +import java.io.InputStream; +import java.io.IOException; + +import java.net.URL; +import java.net.URI; +import java.net.URISyntaxException; + +import java.nio.file.Paths; + +import java.util.Set; +import java.util.LinkedHashSet; + +import com.google.common.collect.Iterables; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + + +public class CommonLocator implements TargetLocator { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private Set<URI> searchPaths = new LinkedHashSet(); + + /* will create a locator with 2 default search paths: the file directory + * from where the app was and the jar from which this checker (actually this + * class) was loaded */ + public CommonLocator() { + addSearchPath( + Paths.get(".").toAbsolutePath().normalize().toUri()); + } + + public CommonLocator(String... theSearchPaths) { + for (String path: theSearchPaths) { + addSearchPath(path); + } + } + + public boolean addSearchPath(URI theURI) { + + if (!theURI.isAbsolute()) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Search paths must be absolute uris: {}", theURI); + return false; + } + + return searchPaths.add(theURI); + } + + public boolean addSearchPath(String thePath) { + URI suri = null; + try { + suri = new URI(thePath); + } + catch(URISyntaxException urisx) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid search path: {} {}", thePath, urisx); + return false; + } + + return addSearchPath(suri); + } + + public Iterable<URI> searchPaths() { + return Iterables.unmodifiableIterable(this.searchPaths); + } + + /** + * Takes the given path and first URI resolves it and then attempts to open + * it (a way of verifying its existence) against each search path and stops + * at the first succesful test. + */ + public Target resolve(String theName) { + URI puri = null; + InputStream pis = null; + + //try classpath + URL purl = getClass().getClassLoader().getResource(theName); + if (purl != null) { + try { + return new Target(theName, purl.toURI()); + } + catch (URISyntaxException urisx) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "The file {} wasn't found {}", theName, urisx); + } + } + + //try absolute + try { + puri = new URI(theName); + if (puri.isAbsolute()) { + try { + pis = puri.toURL().openStream(); + } + catch (IOException iox) { + errLogger.log(LogLevel.WARN, this.getClass().getName(), "The path {} is an absolute uri but it cannot be opened {}", theName, iox); + return null; + } + } + } + catch(URISyntaxException urisx) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, urisx); + //keep it silent but what are the chances .. + } + + //try relative to the search paths + for (URI suri: searchPaths) { + try { + puri = suri.resolve(theName); + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "TargetResolver trying {}", puri); + pis = puri.toURL().openStream(); + return new Target(theName, puri.normalize()); + } + catch (Exception x) { + debugLogger.log(LogLevel.ERROR, this.getClass().getName(), "TargetResolver failed attempting {} {}", puri, x); + continue; + } + finally { + if (pis!= null) { + try { + pis.close(); + } + catch (IOException iox) { + } + } + } + } + + return null; + } + + public String toString() { + return "CommonLocator(" + this.searchPaths + ")"; + } + + + public static void main(String[] theArgs) { + TargetLocator tl = new CommonLocator(); + tl.addSearchPath(java.nio.file.Paths.get("").toUri()); + tl.addSearchPath("file:///"); + debugLogger.log(LogLevel.DEBUG, CommonLocator.class.getName(), tl.resolve(theArgs[0]).toString()); + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java new file mode 100644 index 0000000..b05cff9 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Construct.java @@ -0,0 +1,22 @@ +package org.onap.sdc.dcae.checker; + +/* + * What exactly is allowed to go in here is a subject of meditation :) I would have said 'elements with a type' but + * that will no cover Requirement and Workflow, or topology template top elements but won't cover others .. + * + * Properties/Attributes/Inputs/Outputs are just Data constructs under a particular name. + */ +public enum Construct { + Data, + Requirement, + Capability, + Relationship, + Artifact, + Interface, + Node, + Group, + Policy, + Workflow +} + + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java new file mode 100644 index 0000000..70552bb --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Data.java @@ -0,0 +1,895 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Collection; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.HashMap; +import java.util.Iterator; +import java.util.EnumSet; + +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +/* + * String -- 'primitive tosca type' converters, used in verifying valuations + */ +public class Data { + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private Data() { + } + + /* + */ + @FunctionalInterface + public static interface Evaluator { + + public boolean eval(Object theExpr, Map theDef, Checker.CheckContext theCtx); + } + + + /* data type processing */ + + private static Map<String,Type> typesByName = new HashMap<String,Type>(); + static { + //CoreType.String.toString(); + //CoreFunction.concat.toString(); + //Constraint.equal.toString(); + } + + + public static Data.Type typeByName(String theName) { + return typesByName.getOrDefault(theName, userType); + } +/* + public static Evaluator getTypeEvaluator(Type theType) { + } +*/ + + /* Needs a better name ?? RValue?? + * This is not an rvalue (C def) per se but the construct who's instances + * yield rvalues. It is a construct that yields data, not the data (yield) + * itself. + */ + public static interface Type { + + public String name(); + + public Evaluator evaluator(); + + public Evaluator constraintsEvaluator(); + } + + /* generic placeholder + */ + private static Type userType = new Type() { + + public String name() { + return null; + } + + public Evaluator evaluator() { + return Data::evalUser; + } + + public Evaluator constraintsEvaluator() { + return Data::evalUserConstraints; + } + }; + + + public static enum CoreType implements Type { + + String("string", + (expr,def,ctx) -> expr != null && expr instanceof String, + Data::evalScalarConstraints), + Integer("integer", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Integer.class), + Data::evalScalarConstraints), + Float("float", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Double.class, Integer.class), + Data::evalScalarConstraints), + Boolean("boolean", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Boolean.class), + Data::evalScalarConstraints), + Null("null", + (expr,def,ctx) -> expr.equals("null"), + null), + Timestamp("timestamp", + (expr,def,ctx) -> timestampRegex.matcher(expr.toString()).matches(), + null), + List("list", Data::evalList, Data::evalListConstraints), + Map("map", Data::evalMap, Data::evalMapConstraints), + Version("version", + (expr,def,ctx) -> versionRegex.matcher(expr.toString()).matches(), + null), + /* use a scanner and check that the upper bound is indeed greater than + * the lower bound */ + Range("range", + (expr,def,ctx) -> { return rangeRegex.matcher(expr.toString()).matches();}, + null ), + Size("scalar-unit.size", + (expr,def,ctx) -> sizeRegex.matcher(expr.toString()).matches(), + null), + Time("scalar-unit.time", + (expr,def,ctx) -> timeRegex.matcher(expr.toString()).matches(), + null), + Frequency("scalar-unit.frequency", + (expr,def,ctx) -> frequencyRegex.matcher(expr.toString()).matches(), + null); + + + private String toscaName; + private Evaluator valueEvaluator, + constraintsEvaluator; + + private CoreType(String theName, Evaluator theValueEvaluator, Evaluator theConstraintsEvaluator) { + this.toscaName = theName; + this.valueEvaluator = theValueEvaluator; + this.constraintsEvaluator = theConstraintsEvaluator; + + if (typesByName == null) + throw new RuntimeException("No type index available!"); + + typesByName.put(this.toscaName, this); + } + + public String toString() { + return this.toscaName; + } + + public Evaluator evaluator() { + return this.valueEvaluator; + } + + public Evaluator constraintsEvaluator() { + return this.constraintsEvaluator; + } + } + + private static Pattern timestampRegex = null, + versionRegex = null, + rangeRegex = null, + sizeRegex = null, + timeRegex = null, + frequencyRegex = null; + + static { + try { + timestampRegex = Pattern.compile( + "\\p{Digit}+"); //?? where to find the definition + + //<major_version>.<minor_version>[.<fix_version>[.<qualifier>[-<build_version]]] + versionRegex = Pattern.compile( + "\\p{Digit}+\\.\\p{Digit}+?(\\.\\p{Digit}+(\\.\\p{Alpha}+(\\-\\p{Digit}+))*)*"); + + rangeRegex = Pattern.compile( + "\\[[ ]*\\p{Digit}+(\\.\\p{Digit}+)?[ ]*\\,[ ]*(\\p{Digit}+(\\.\\p{Digit}+)?|UNBOUNDED)[ ]*\\]"); + + sizeRegex = Pattern.compile( + "\\p{Digit}+(\\.\\p{Digit}+)?[ ]*(B|kB|KiB|MB|MiB|GB|GiB|TB|TiB)"); + + timeRegex = Pattern.compile( + "\\p{Digit}+(\\.\\p{Digit}+)?[ ]*(d|h|m|s|ms|us|ns)"); + + frequencyRegex = Pattern.compile( + "\\p{Digit}+(\\.\\p{Digit}+)?[ ]*(Hz|kHz|MHz|GHz)"); + } + catch (PatternSyntaxException psx) { + throw new RuntimeException("Bad patterns", psx); + } + } + + /* */ + public static boolean evalScalarConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + Data.Type type = typeByName((String)theDef.get("type")); + List<Map<String,Object>> constraints = + (List<Map<String,Object>>)theDef.get("constraints"); + if (constraints == null) { + return true; + } + + //check value against constraints + boolean res = true; + for (Map<String,Object> constraintDef: constraints) { + Map.Entry<String,Object> constraintEntry = + constraintDef.entrySet().iterator().next(); + Data.Constraint constraint = constraintByName(constraintEntry.getKey()); + +// the def passed here includes all constraints, not necessary! we can pass +// simple constraintEntry.getValue() + Evaluator constraintEvaluator = getTypeConstraintEvaluator(type, constraint); + if (constraintEvaluator == null) { + debugLogger.log(LogLevel.DEBUG, Data.class.getName(), "No constant evaluator available for {}/{}", type, constraint); + continue; + } + + if (!constraintEvaluator.eval(theVal, theDef, theCtx)) { + theCtx.addError("Value " + theVal + " failed constraint " + constraintEntry, null); + res = false; + } + } + return res; + } + + /* + * It assumes the specification is complete, i.e. it contains a valid + * entry_schema section. + * TODO: check constraints, i.e. entrySchema.get("constraints") + */ + public static boolean evalList(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + try { + return evalCollection((List)theVal, theDef, theCtx); + } + catch (ClassCastException ccx) { + theCtx.addError("Value " + theVal + " not a list", null); + return false; + } + } + + public static boolean evalMap(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + try { + return evalCollection(((Map)theVal).values(), theDef, theCtx); + } + catch (ClassCastException ccx) { + theCtx.addError("Value " + theVal + " not a map", null); + return false; + } + } + + + /** + * The elements of a collection can be of a core type or user defined type. + */ + private static boolean evalCollection(Collection theVals, + Map theDef, + Checker.CheckContext theCtx) { + Data.Type entryType = null; + Map entryTypeDef = (Map)theDef.get("entry_schema"); + if (null != entryTypeDef) + entryType = typeByName((String)entryTypeDef.get("type")); + + boolean res = true; + for (Object val: theVals) { + //check if the value is not a function call + Data.Function f = Data.function(val); + if (f != null && + f.evaluator().eval(val, entryTypeDef, theCtx)) { + res = false; + } + else if (entryType != null && + !entryType.evaluator().eval(val, entryTypeDef, theCtx)) { + res= false; + //the error should hav been reported by the particular evaluator + //theCtx.addError("Value " + val + " failed evaluation", null); + } + } + return res; + } + + public static boolean evalListConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + return evalCollectionConstraints((List)theVal, theDef, theCtx); + } + + public static boolean evalMapConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + return evalCollectionConstraints(((Map)theVal).values(), theDef, theCtx); + } + + private static boolean evalCollectionConstraints(Collection theVals, + Map theDef, + Checker.CheckContext theCtx) { + //should check overall constraints + + if (theVals == null) + return true; + + Map entryTypeDef = (Map)theDef.get("entry_schema"); + if (null == entryTypeDef) + return true; + + String entryTypeName = (String)entryTypeDef.get("type"); + Data.Type entryType = typeByName(entryTypeName); + + boolean res = true; + for (Object val: theVals) { + Evaluator entryEvaluator = entryType.constraintsEvaluator(); + if (entryEvaluator != null && + !entryEvaluator.eval(val, entryTypeDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error, but it also adds some context + //theCtx.addError("Value " + val + " failed evaluation", null); + } + } + return res; + } + + /* + * All required properties across the hierarchical defintion must be present + * TODO: The expr cannot contain any entry not specified in the type definition + */ + public static boolean evalUser(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + + boolean res = true; + Map val = (Map)theVal; + //must be done with respect to the super-type(s) definition + Iterator<Map.Entry> props = theCtx.catalog() + .facets(Construct.Data, + Facet.properties, + (String)theDef.get("type")); + while (props.hasNext()) { + Map.Entry propEntry = props.next(); + Map propDef = (Map)propEntry.getValue(); + Object propVal = val.get(propEntry.getKey()); + + if (propVal != null) { + Data.Type propType = typeByName((String)propDef.get("type")); + + if (!propType.evaluator().eval(propVal, propDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error + //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null); + } + } + } + return res; + } + + public static boolean evalUserConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + boolean res = true; + Map val = (Map)theVal; + Iterator<Map.Entry> props = theCtx.catalog() + .facets(Construct.Data, + Facet.properties, + (String)theDef.get("type")); + while (props.hasNext()) { + Map.Entry propEntry = props.next(); + Map propDef = (Map)propEntry.getValue(); + Object propVal = val.get(propEntry.getKey()); + + if (propVal != null) { + Data.Type propType = typeByName((String)propDef.get("type")); + + if (propType.constraintsEvaluator() != null && + !propType.constraintsEvaluator().eval(propVal, propDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error + //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null); + } + } + else { + if (Boolean.TRUE == (Boolean)propDef.getOrDefault("required", Boolean.FALSE) && + !propDef.containsKey("default")) { + theCtx.addError("Property " + propEntry.getKey() + " failed 'required' constraint; definition is " + propDef, null); + res = false; + } + } + } + return res; + } + + private static boolean valueOf(Checker.CheckContext theCtx, + Object theExpr, + Class ... theTypes) { + for (Class type: theTypes) { + if (type.isAssignableFrom(theExpr.getClass())) { + return true; + } + } + + theCtx.addError("Expression " + theExpr + " as " + theExpr.getClass().getName() + " is not compatible with any of required types: " + Arrays.toString(theTypes), null); + return false; + } + +/* + private static boolean valueOf(Class theTarget, + String theExpr, + Checker.CheckContext theCtx) { + try { + theTarget.getMethod("valueOf", new Class[] {String.class}) + .invoke(null, theExpr); + return true; + } + catch (InvocationTargetException itx) { + theCtx.addError("Failed to parse " + theExpr + " as a " + theTarget.getName(), itx.getCause()); + return false; + } + catch (Exception x) { + theCtx.addError("Failed to valueOf " + theExpr + " as a " + theTarget.getName(), x); + return false; + } + } +*/ + + /* + * Function e(valuation) + * ? + * note to self : is there a more efficient way of retrieving a map's + * single entry? (without knowing the key) + * + * ! Function evaluators have to handle null definition (i.e. perform argument checking) so that + * we can use them in the context of collections with without entry_schemas + */ + + //just as Type but is it worth expressing this 'commonality'?? + + public static interface Function { + + public String name(); + + public Evaluator evaluator(); + } + + /* + * This is a heuristic induced from the tosca specification .. it answers the + * question of wether the given expression is a function + */ + public static Function function(Object theExpr) { + if (theExpr instanceof Map && + ((Map)theExpr).size() == 1) { + try { + return Enum.valueOf(CoreFunction.class, functionName(theExpr)); + } + catch (IllegalArgumentException iax) { + //no such function but we cannot really record an error as we only guessed the expression as being a function .. + debugLogger.log(LogLevel.DEBUG, Data.class.getName(), "Failed attempt to interpret {} as a function call", theExpr); + } + } + + return null; + } + + /* + */ + public static String functionName(Object theExpr) { + return (String) + ((Map.Entry) + ((Map)theExpr).entrySet().iterator().next()) + .getKey(); + } + + /* + */ + public static Data.Function functionByName(String theName) { + return Enum.valueOf(CoreFunction.class, theName); + } + + /* + */ + public static enum CoreFunction implements Function { + + concat(Data::evalConcat), + token(Data::evalToken), + get_input(Data::evalGetInput), + get_property(Data::evalGetProperty), + get_attribute(Data::evalGetAttribute), + get_operation_output((expr,def,ctx) -> true), + get_nodes_of_type(Data::evalGetNodesOfType), + get_artifact((expr,def,ctx) -> true); + + private Evaluator evaluator; + + private CoreFunction(Evaluator theEval) { + this.evaluator = theEval; + } + + public Evaluator evaluator() { + return this.evaluator; + } + } + + private static boolean evalConcat( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return true; + } + + private static boolean evalToken( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return true; + } + + private static boolean evalGetInput( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof String)) { + theCtx.addError("get_input: argument must be a String" ,null); + return false; + } + + //check that an input with the given name exists and has a compatible type + Map inputDef = theCtx.catalog() + .getTemplate(theCtx.target(), Construct.Data, (String)entry.getValue()); + if (inputDef == null) { + theCtx.addError("get_input: no such input " + entry.getValue(), null); + return false; + } + + if (theDef == null) + return true; + + //the output must be type compatible with the input + String targetType = (String)theDef.get("type"); + if (targetType != null) { + String inputType = (String)inputDef.get("type"); + + if (!theCtx.catalog() + .isDerivedFrom(Construct.Data, inputType, targetType)) { + theCtx.addError("get_input: input type " + inputType + " is incompatible with the target type " + targetType, null); + return false; + } + } + + return true; + } + + /* + * Who's the smarty that decided to define optional arguments in between + * required ones ?! + * (factors the evaluation of get_attribute and get_property) + */ + private static boolean evalGetData( + Object theVal, Map theDef, + EnumSet<Facet> theFacets, Checker.CheckContext theCtx) { + + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof List)) { + theCtx.addError("get_property: argument must be a List" ,null); + return false; + } + + List args = (List)entry.getValue(); + if (args.size() < 2) { + theCtx.addError("'get_property' has at least 2 arguments", null); + return false; + } + + //the first argument is a node or relationship template + String tmpl = (String)args.get(0); + Construct tmplConstruct = null; + Map tmplSpec = null; + + if ("SELF".equals(tmpl)) { + tmpl = theCtx.enclosingConstruct(Construct.Node); + if (tmpl == null) { + tmpl = theCtx.enclosingConstruct(Construct.Relationship); + if (tmpl == null) { + theCtx.addError("'get_property' invalid SELF reference: no node or relationship template in scope at " + theCtx.getPath(), null); + return false; + } + else { + tmplConstruct = Construct.Relationship; + } + } + else { + tmplConstruct = Construct.Node; + } + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), tmplConstruct, tmpl); + } + else if ("SOURCE".equals("tmpl")) { + //we are in the scope of a relationship template and this is the source node template. + tmpl = theCtx.enclosingConstruct(Construct.Relationship); + if (tmpl == null) { + theCtx.addError("'get_property' invalid SOURCE reference: no relationship template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else if ("TARGET".equals("tmpl")) { + //we are in the scope of a relationship template and this is the target node template. + tmpl = theCtx.enclosingConstruct(Construct.Relationship); + if (tmpl == null) { + theCtx.addError("'get_property' invalid TARGET reference: no relationship template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else if ("HOST".equals("tmpl")) { + tmpl = theCtx.enclosingConstruct(Construct.Node); + if (tmpl == null) { + theCtx.addError("'get_property' invalid HOST reference: no node template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else { + //try node template first + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Node, tmpl); + if (tmplSpec == null) { + //try relationship + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Relationship, tmpl); + if (tmplSpec == null) { + theCtx.addError("'get_data' invalid template reference '" + tmpl + "': no node or relationship template with this name", null); + return false; + } + else { + tmplConstruct = Construct.Relationship; + } + } + else { + tmplConstruct = Construct.Node; + } + } + + int facetNameIndex = 1; + Construct facetConstruct = tmplConstruct; //who's construct the facet is supposed to belong to + Map facetConstructSpec = null; + String facetConstructType = null; + + if (tmplConstruct.equals(Construct.Node) && + args.size() > 2) { + //the second arg might be a capability or requirement name. If it is a + //capability than the third argument becomes a property of the + //coresponding capability type. If it is a requirement than the + //requirement definition indicates a capability who's type has a + //property with the name indicated in the third argument .. + // + //while the spec does not make it explicit this can only take place + //if the first argument turned out to be a node template (as relationship + //templates/types do not have capabilities/requirements + String secondArg = (String)args.get(1); + if ((facetConstructSpec = theCtx.catalog().getFacetDefinition( + tmplConstruct, + (String)tmplSpec.get("type"), + Facet.capabilities, + secondArg)) != null) { + facetNameIndex = 2; + facetConstruct = Construct.Capability; + facetConstructType = (String)facetConstructSpec.get("type"); + } + else if ((facetConstructSpec = theCtx.catalog().getRequirementDefinition( + tmplConstruct, + (String)tmplSpec.get("type"), + secondArg)) != null) { + facetNameIndex = 2; + facetConstruct = Construct.Capability; + + //find the specof the capability this requirement points to + //TODO: check, can the capability reference be anything else but a capability tyep? + facetConstructType = (String)facetConstructSpec.get("capability"); + } + } + else { + //we'll attempt to handle it as a property of the node template + facetConstruct = Construct.Node; + facetConstructSpec = tmplSpec; + facetConstructType = (String)facetConstructSpec.get("type"); + } + + //validate the facet name + Map facetSpec = null; + { + String facetName = (String)args.get(facetNameIndex); + for (Facet facet: theFacets) { + facetSpec = theCtx.catalog() + .getFacetDefinition( + facetConstruct, + facetConstructType, + facet, + facetName); + if (facetSpec != null) + break; + } + + if (facetSpec == null) { +//TODO: not the greatest message if the call strated with a requirement .. + theCtx.addError("'get_data' invalid reference, '" + facetConstruct + "' " + facetConstructType + " has no " + theFacets + " with name " + facetName, null); + return false; + } + } + + //the rest of the arguments have to resolve to a field of the property's + //data type; the propertySpec contains the type specification + for (int i = facetNameIndex + 1; i < args.size(); i++) { + } + + return true; + } + + /**/ + private static boolean evalGetProperty( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return evalGetData(theVal, theDef, EnumSet.of(Facet.properties), theCtx); + } + + /* + * get_property and get_attribute are identical, just operating on different + * facets, with one exception: there is an intrinsec attribute for every + * declared property. + */ + private static boolean evalGetAttribute( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return evalGetData(theVal, theDef, EnumSet.of(Facet.attributes, Facet.properties), theCtx); + } + + private static boolean evalGetNodesOfType( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof String)) { + theCtx.addError("get_nodes_of_type: argument must be a String", null); + return false; + } + + String arg = (String)entry.getValue(); + + if (null == theCtx.catalog().getTypeDefinition(Construct.Node, arg)) { + theCtx.addError("get_nodes_of_type: no such node type " + arg, null); + return false; + } + else { + return true; + } + } + + /* */ + public static Constraint constraintByName(String theName) { + return Enum.valueOf(Constraint.class, theName); + } + + /* */ + public static Constraint constraint(Object theExpr) { + if (theExpr instanceof Map && + ((Map)theExpr).size() == 1) { + return constraintByName(constraintName(theExpr)); + } + + return null; + } + + /* */ + public static String constraintName(Object theExpr) { + return (String) + ((Map.Entry) + ((Map)theExpr).entrySet().iterator().next()) + .getKey(); + } + + private static Object getConstraintValue(Map theDef, + Constraint theConstraint) { + List<Map> constraints = (List<Map>)theDef.get("constraints"); + if (null == constraints) + return null; + + for(Map constraint: constraints) { + Object val = constraint.get(theConstraint.toString()); + if (val != null) + return val; + } + return null; + } + + public static enum Constraint { + equal, + greater_than, + greater_or_equal, + less_than, + less_or_equal, + in_range, + valid_values, + length, + min_length, + max_length, + pattern; + } + + + /* hold the constraint evaluators for pairs of type/constraint. + * If a pair is not present than the given constraint does not apply + * to the type. + */ + private static Table<Type,Constraint,Evaluator> typeConstraintEvaluator =null; + + public static Evaluator + getTypeConstraintEvaluator(Type theType, Constraint theConstraint) { + if (typeConstraintEvaluator == null) { + typeConstraintEvaluator = HashBasedTable.create(); + + typeConstraintEvaluator.put(CoreType.String, Constraint.equal, + (val,def,ctx) -> val.equals(getConstraintValue(def,Constraint.equal))); + typeConstraintEvaluator.put(CoreType.String, Constraint.valid_values, + (val,def,ctx) -> { + return ((List)getConstraintValue(def,Constraint.valid_values)).contains(val); + }); + typeConstraintEvaluator.put(CoreType.String, Constraint.length, + (val,def,ctx) -> ((String)val).length() == ((Number)getConstraintValue(def,Constraint.length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.min_length, + (val,def,ctx) -> ((String)val).length() >= ((Number)getConstraintValue(def,Constraint.min_length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.max_length, + (val,def,ctx) -> ((String)val).length() <= ((Number)getConstraintValue(def,Constraint.max_length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.pattern, + (val,def,ctx) -> Pattern.compile((String)getConstraintValue(def,Constraint.pattern)) + .matcher((String)val) + .matches()); + + typeConstraintEvaluator.put(CoreType.Integer, Constraint.equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.equal)) == 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.greater_than, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.greater_than)) > 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.greater_or_equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.greater_or_equal)) >= 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.less_than, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.less_than)) < 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.less_or_equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.less_or_equal)) <= 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.in_range, + (val,def,ctx) -> { List<Integer> range = (List<Integer>)getConstraintValue(def, Constraint.in_range); + return ((Integer)val).compareTo(range.get(0)) >= 0 && + ((Integer)val).compareTo(range.get(1)) <= 0; + }); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.valid_values, + (val,def,ctx) -> ((List<Integer>)getConstraintValue(def, Constraint.valid_values)).contains((Integer)val)); + +//yaml parser represents yaml floats as java Double and we are even more tolerant as many double values +//get represented as ints and the parser will substitute an Integer + typeConstraintEvaluator.put(CoreType.Float, Constraint.equal, + (val,def,ctx) -> ((Number)val).doubleValue() == ((Number)getConstraintValue(def,Constraint.equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.greater_than, + (val,def,ctx) -> ((Number)val).doubleValue() > ((Number)getConstraintValue(def,Constraint.greater_than)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.greater_or_equal, + (val,def,ctx) -> ((Number)val).doubleValue() >= ((Number)getConstraintValue(def,Constraint.greater_or_equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.less_than, + (val,def,ctx) -> ((Number)val).doubleValue() < ((Number)getConstraintValue(def,Constraint.less_than)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.less_or_equal, + (val,def,ctx) -> ((Number)val).doubleValue() <= ((Number)getConstraintValue(def,Constraint.less_or_equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.in_range, + (val,def,ctx) -> { List<Number> range = (List<Number>)getConstraintValue(def, Constraint.in_range); + return ((Number)val).doubleValue() >= range.get(0).doubleValue() && + ((Number)val).doubleValue() <= range.get(1).doubleValue(); + }); + typeConstraintEvaluator.put(CoreType.Float, Constraint.valid_values, + (val,def,ctx) -> ((List<Number>)getConstraintValue(def, Constraint.valid_values)).contains((Number)val)); + } + + Evaluator eval = typeConstraintEvaluator.get(theType, theConstraint); + + return eval == null ? (expr,def,ctx) -> true + : eval; + } + + + private static boolean stringValidValues(String theVal, + List<String> theValidValues, + Checker.CheckContext theCtx) { + if (!theValidValues.contains(theVal)) { + theCtx.addError("not a valid value: " + theVal + " not part of " + theValidValues, null); + return false; + } + + return true; + } + + public static final void main(String[] theArgs) { + Data.CoreType dt = Enum.valueOf(Data.CoreType.class, theArgs[0]); + debugLogger.log(LogLevel.DEBUG, Data.class.getName(), "{} > {}", theArgs[1], dt.evaluator().eval(theArgs[1], null, null)); + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Facet.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Facet.java new file mode 100644 index 0000000..3dfd140 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Facet.java @@ -0,0 +1,37 @@ +package org.onap.sdc.dcae.checker; + +/* + * Oddballs: + * - requirements (a requirement does not have a type - i.e. is not based + * on a Construct) and can target a node, a capability or both .. When present + * as a facet of another Construct it is also the only one represented as a + * sequence so it will need special handling anyway. + */ +public enum Facet { + + inputs(Construct.Data), + outputs(Construct.Data), + properties(Construct.Data), + attributes(Construct.Data), + capabilities(Construct.Capability), + //requirements(Construct.Capability),//?? + artifacts(Construct.Artifact), + interfaces(Construct.Interface); + /* + Node + Relationship + they can be considered as facets of the topology template ... + */ + + private Construct construct; + + private Facet(Construct theConstruct) { + this.construct = theConstruct; + } + + public Construct construct() { + return this.construct; + } +} + + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/JSP.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/JSP.java new file mode 100644 index 0000000..797b4e2 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/JSP.java @@ -0,0 +1,624 @@ +package org.onap.sdc.dcae.checker; + +import java.io.IOException; +import java.io.File; + +import java.net.URI; + +import java.util.Set; +import java.util.Map; +import java.util.List; +import java.util.Arrays; +import java.util.Iterator; +import java.util.Collection; +import java.util.Collections; +import java.util.stream.Collectors; +import java.util.function.Consumer; +import java.util.function.BiFunction; + +import javax.script.Compilable; +import javax.script.CompiledScript; +import javax.script.Bindings; +import javax.script.ScriptContext; +import javax.script.SimpleScriptContext; +import javax.script.ScriptEngine; +import javax.script.ScriptEngineManager; +import javax.script.ScriptException; + +import jdk.nashorn.api.scripting.JSObject; +import jdk.nashorn.api.scripting.AbstractJSObject; + +import org.apache.commons.jxpath.JXPathContext; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + + +/** + * Java Script Processor + * Each script is represented by a Target and the JSP processor maintains a collection of Targets, i.e. scripts. + * A collection of targets can be used with only one JSP processor at a time (as the processor stores processor specific * compiled versions within the target). + */ +public class JSP implements Processor<JSP> { + + private ScriptEngine engine; + private Collection<? extends Target> targets; + + public JSP(String[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s, new File(s).toURI())) + .collect(Collectors.toList())); + } + + public JSP(File[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s.getName(), s.toURI())) + .collect(Collectors.toList())); + } + + public JSP(URI[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s.toString(), s)) + .collect(Collectors.toList())); + } + + /** + * The given collection is allowed to change while used by the JSP engine but access to it needs to be synchronized. + * The engine uses the target field of each Target to store a compiled version of each script. An external reset of + * this field (maybe in order to indicate some change in the Target) will caue a re-compilation of the Target. + */ + public JSP(Collection<? extends Target> theTargets) { + this.targets = theTargets; + ScriptEngineManager engineManager = new ScriptEngineManager(); + this.engine = engineManager.getEngineByName("nashorn"); + } + + public Collection<? extends Target> targets() { + return this.targets; + } + + /* pre-compiles all known targets + */ + protected void compile() throws ProcessorException { + synchronized (this.targets) { + for (Target t: this.targets) + compile(t); + } + } + + protected CompiledScript compile(Target theTarget) throws ProcessorException { + + CompiledScript cs = null; + + synchronized(theTarget) { + try { + cs = (CompiledScript)theTarget.getTarget(); + } + catch(ClassCastException ccx) { + throw new ProcessorException(theTarget, "Unexpected target content"); + } + + if (cs == null) { + try { + cs = ((Compilable)this.engine).compile(theTarget.open()); + theTarget.setTarget(cs); + } + catch (IOException iox) { + throw new ProcessorException(theTarget, "Failed to read script", iox); + } + catch (ScriptException sx) { + throw new ProcessorException(theTarget, "Failed to compile script", sx); + } + } + } + + return cs; + } + + public ContextBuilder process(Catalog theCatalog) { + return new ContextBuilder( + this.engine.createBindings()) + //new DelegateBindings(this.engine.getBindings(ScriptContext.ENGINE_SCOPE))) + .with("catalog", new JSCatalog(theCatalog)); + } + + /** + */ + public class ContextBuilder implements ProcessBuilder<JSP> { + + private ScriptContext context; + + protected ContextBuilder(Bindings theBindings) { + this.context = new SimpleScriptContext(); + this.context.setBindings(theBindings, Process.PROCESS_SCOPE /*ScriptContext.ENGINE_SCOPE*/); + } + + public ContextBuilder withPreprocessing(BiFunction<Target, ScriptContext, Boolean> thePreprocessing) { + this.context.setAttribute("preprocessor", thePreprocessing, Process.PROCESS_SCOPE); + return this; + } + + public ContextBuilder withPostprocessing(BiFunction<Target, ScriptContext, Boolean> thePostprocessing) { + this.context.setAttribute("postprocessor", thePostprocessing, Process.PROCESS_SCOPE); + return this; + } + + public ContextBuilder with(String theName, Object theValue) { + this.context.getBindings(Process.PROCESS_SCOPE).put(theName, theValue); + return this; + } + + public ContextBuilder withOpt(String theName, Object theValue) { + if (theValue != null) + this.context.getBindings(Process.PROCESS_SCOPE).put(theName, theValue); + return this; + } + + public JSProcess process() { + return new JSProcess(this.context); + } + + } + + /** + */ + public class JSProcess implements Process<JSP> { + + private Report report = new Report(); + private Iterator<? extends Target> scripts; + private JScriptInfo scriptInfo = new JScriptInfo(); + private Target script; //script currently being evaluated + private boolean stopped = false; + private ScriptContext context; + + private JSProcess(ScriptContext theContext) { + + this.context = theContext; + this.context.getBindings(Process.PROCESS_SCOPE) + .put("stop", new Consumer<String>() { + public void accept(String theMsg) { + JSProcess.this.stopped = true; + //log the message?? + } + }); + this.context.getBindings(Process.PROCESS_SCOPE) + .put("report", new Consumer<String>() { + public void accept(String theMsg) { + JSProcess.this.report.add(new ProcessorException(script, theMsg)); + } + }); + this.context.getBindings(Process.PROCESS_SCOPE) + .put("reportOnce", new Consumer<String>() { + public void accept(String theMsg) { + JSProcess.this.report.addOnce(new ProcessorException(script, theMsg)); + } + }); + this.scripts = JSP.this.targets.iterator(); + } + + protected String infoName(Target theTarget) { + String name = theTarget.getName(); + return name.substring(0, name.indexOf(".")) + "_info"; + } + + public JSP processor() { + return JSP.this; + } + + public boolean hasNext() { + return !this.stopped && this.scripts.hasNext(); + } + + protected Target next() { + if (hasNext()) + return this.script = this.scripts.next(); + else + throw new RuntimeException("Process is completed"); + } + + protected boolean runProcessor(String theName) throws ProcessorException { + BiFunction<Target, ScriptContext, Boolean> proc = (BiFunction<Target, ScriptContext, Boolean>) + this.context.getAttribute(theName, Process.PROCESS_SCOPE); + if (proc != null) { + try { + return proc.apply(this.script, this.context).booleanValue(); + } + catch (Exception x) { + throw new ProcessorException(this.script, theName + "failed", x); + } + } + + return true; + } + + public Process runNext() throws ProcessorException { + Target target = next(); + synchronized(target) { + String name = infoName(target); + try { + if (runProcessor("preprocessor")) { + compile(target).eval(this.context); + runProcessor("postprocessor"); + } + } + catch (ScriptException sx) { + throw new ProcessorException(target, "Failed to execute validation script", sx); + } + } + + return this; + } + + public Process runNextSilently() { + try { + return runNext(); + } + catch (ProcessorException px) { + this.report.add(px); + } + return this; + } + + public Report run() { + while (hasNext()) + runNextSilently(); + return this.report; + } + + public void stop() { + this.stopped = true; + } + + public Report report() { + return this.report; + } + } + + private static class JScriptInfo implements TargetInfo { + + private JSObject info; + + protected JScriptInfo() { + } + + protected JScriptInfo setInfo(JSObject theInfo) { + this.info = theInfo; + return this; + } + + public Set<String> entryNames() { + return this.info == null ? Collections.EMPTY_SET : this.info.keySet(); + } + + public boolean hasEntry(String theName) { + return this.info == null ? false : this.info.hasMember(theName); + } + + public Object getEntry(String theName) { + return this.info == null ? null : + this.info.hasMember(theName) ? this.info.getMember(theName) : null; + } + } + + + /* Exposes the catalog information in a more Java Script friendly manner. + */ + public static class JSCatalog { + + private Catalog catalog; + + private JSCatalog(Catalog theCatalog) { + this.catalog = theCatalog; + } + + /** */ + public JSTarget[] targets() { + return + this.catalog.targets() + .stream() + .map(t -> { return new JSTarget(t); }) + .toArray(size -> new JSTarget[size]); //or toArray(JSNode[]::new) + } + + public JSTarget[] topTargets() { + return + this.catalog.topTargets() + .stream() + .map(t -> { return new JSTarget(t); }) + .toArray(size -> new JSTarget[size]); //or toArray(JSNode[]::new) + } + + /** */ + public String[] types(String theConstruct) { + Set<String> names = + this.catalog.getConstructTypes(Enum.valueOf(Construct.class,theConstruct)).keySet(); + return names.toArray(new String[names.size()]); + } + + /** */ + public boolean isDerivedFrom(String theConstruct, String theType, String theSuperType) { + return this.catalog.isDerivedFrom(Enum.valueOf(Construct.class,theConstruct), theType, theSuperType); + } + + /** */ + public JSObject facetDefinition(String theConstruct, String theType, String theFacet, String theName) { + return new JSElement(theName, + this.catalog.getFacetDefinition( + Enum.valueOf(Construct.class, theConstruct), theType, + Enum.valueOf(Facet.class, theFacet), theName)); + } + + + /** */ +/* + public JSElement[] targetNodes(Target theTarget) { + return + this.catalog.getTargetTemplates(theTarget, Construct.Node) + .entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); //or toArray(JSNode[]::new) + } +*/ + + public class JSTarget { + + private Target tgt; + private JXPathContext jxPath; + + private JSTarget(Target theTarget) { + this.tgt = theTarget; + this.jxPath = JXPathContext.newContext(this.tgt.getTarget()); + this.jxPath.setLenient(true); + } + + public String getName() { return this.tgt.getName(); } + + public JSElement resolve(String thePath) { + Object res = jxPath.getValue(thePath); + if (res instanceof Map) { + return new JSElement(thePath, (Map)res); + } + //?? + return null; + } + + public JSElement[] getInputs() { + + Map<String,Map> inputs = (Map<String,Map>)jxPath.getValue("/topology_template/inputs"); + return (inputs == null) ? + new JSElement[0] + : inputs.entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); + } + +// public JSElement[] getOutputs() { +// } + + public JSElement getMetadata() { + return new JSElement("metadata", (Map)jxPath.getValue("/metadata")); + } + + public JSElement[] getNodes() { + return + JSCatalog.this.catalog.getTargetTemplates(this.tgt, Construct.Node) + .entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); //or toArray(JSElement[]::new) + } + +// public JSElement[] getPolicies() { +// } + + } + + + /* + */ + public class JSElement extends AbstractJSObject { + + + private String name; + private Map def; + + private JSElement(String theName, Object theDef) { + this.name = theName; + this.def = theDef == null ? Collections.emptyMap() + : (theDef instanceof Map) ? (Map)theDef + : Collections.singletonMap("value",theDef); + } + + public String getName() { return this.name; } + + public boolean hasMember(String theMember) { + return this.def.containsKey(theMember); + } + + public Object getMember(final String theMember) { + Object val = this.def.get(theMember); + if (val != null) { + if (val instanceof Map) { + return new JSElement(theMember, val); + /* + return ((Map<String,?>)obj).entrySet() + .stream() + .map((Map.Entry<String,?> e) -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); + */ + } + + if (val instanceof List) { + //a property value can be a list of: primitive types or maps (for a user defined type) + //requirements are exposed as a list of maps + List lval = (List)val; + if (lval.get(0) instanceof Map) { + return lval + .stream() + .map((e) -> new JSElement(theMember, e)) + .toArray(size -> new JSElement[size]); + + /* + return val + .stream() + .map((e) -> { + Map.Entry<String,?> re = ((Map<String,?>)e).entrySet().iterator().next(); + return new JSElement(re.getKey(), re.getValue()); + }) + .toArray(size -> new JSElement[size]); + */ + } + } + + return val; + } + else { + if ("name".equals(theMember)) + return this.name; + if ("toString".equals(theMember)) + return _toString; + if ("hasOwnProperty".equals(theMember)) + return _hasOwnProperty; + return super.getMember(theMember); + } + } + /* TODO: we do not expose 'name' in here */ + public Set<String> keySet() { + return this.def.keySet(); + } + + } + + + static final JSObject _toString = + new TracerJSObject("_toString") { + public Object call(Object thiz, Object... args) { + return ((JSElement)thiz).def.toString(); + } + + public boolean isFunction() { return true; } + }; + + static final JSObject _hasOwnProperty = + new TracerJSObject("_hasOwnProperty") { + public Object call(Object thiz, Object... args) { + return ((JSElement)thiz).def.containsKey(args[0]); + } + + public boolean isFunction() { return true; } + }; + + }//JSCatalog + + + + private static class TracerJSObject extends AbstractJSObject { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private String mark; + + TracerJSObject(String theMark) { + this.mark = theMark; + } + + public Object call(Object thiz, Object... args) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:call", this.mark); + return super.call(thiz, args); + } + + public Object newObject(Object... args) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:newObject", this.mark); + return super.newObject(args); + } + + public Object eval(String s) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:eval", this.mark); + return super.eval(s); + } + + public Object getMember(String name) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getMember", this.mark); + return super.getMember(name); + } + + public Object getSlot(int index) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getSlot", this.mark); + return super.getSlot(index); + } + + public boolean hasMember(String name) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:hasMember", this.mark); + return super.hasMember(name); + } + + public boolean hasSlot(int slot) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:hasSlot", this.mark); + return super.hasSlot(slot); + } + + public void removeMember(String name) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:removeMember", this.mark); + super.removeMember(name); + } + + public void setMember(String name, Object value) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:setMember", this.mark); + super.setMember(name,value); + } + + public void setSlot(int index, Object value) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:setSlot", this.mark); + super.setSlot(index,value); + } + + public Set<String> keySet() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:JSObject:keySet", this.mark); + return super.keySet(); + } + + public Collection<Object> values() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:values", this.mark); + return super.values(); + } + + public boolean isInstance(Object instance) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isInstance", this.mark); + return super.isInstance(instance); + } + + public boolean isInstanceOf(Object clazz) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isInstanceOf", this.mark); + return super.isInstance(clazz); + } + + public String getClassName() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getClassName", this.mark); + return super.getClassName(); + } + + public boolean isFunction() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isFunction", this.mark); + return super.isFunction(); + } + + public boolean isStrictFunction() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isStrictFunction", this.mark); + return super.isStrictFunction(); + } + + public boolean isArray() { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:isArray", this.mark); + return super.isArray(); + } + + public Object getDefaultValue(Class<?> hint) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{}:getDefaultValue({})", this.mark, hint); + return super.getDefaultValue(hint); + } + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Process.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Process.java new file mode 100644 index 0000000..0f529af --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Process.java @@ -0,0 +1,29 @@ +package org.onap.sdc.dcae.checker; + +/** + * + */ +public interface Process<T extends Processor> { + + public static final int PROCESS_SCOPE = 100; + + /** + * the processor running this process + */ + public T processor(); + + /* */ + public boolean hasNext(); + + /* */ + public Process runNext() throws ProcessorException; + + /* execute all steps to completion + */ + public Report run(); + + /* execution report + */ + public Report report(); + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessBuilder.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessBuilder.java new file mode 100644 index 0000000..8295055 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessBuilder.java @@ -0,0 +1,24 @@ +package org.onap.sdc.dcae.checker; + + +/** + * Just in case you might want to do something with a template (set) once it was checked + */ +public interface ProcessBuilder<T extends Processor> { + + /* */ + public ProcessBuilder<T> with(String theName, Object theValue); + + /* */ + public ProcessBuilder<T> withOpt(String theName, Object theValue); + + /* */ + public Process<T> process(); + + /* */ + default public Report run() { + return process() + .run(); + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Processor.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Processor.java new file mode 100644 index 0000000..7f29d23 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Processor.java @@ -0,0 +1,11 @@ +package org.onap.sdc.dcae.checker; + + +/** + * Just in case you might want to do something with a template (set) once it was checked + */ +public interface Processor<T extends Processor<T>> { + + /* */ + public ProcessBuilder<T> process(Catalog theCatalog); +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessorException.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessorException.java new file mode 100644 index 0000000..d4c5571 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/ProcessorException.java @@ -0,0 +1,28 @@ +package org.onap.sdc.dcae.checker; + + +/** + */ +public class ProcessorException extends CheckerException { + + private Target target; + + public ProcessorException(Target theTarget, String theMsg, Throwable theCause) { + super(theMsg, theCause); + this.target = theTarget; + } + + public ProcessorException(Target theTarget, String theMsg) { + super(theMsg); + this.target = theTarget; + } + + public Target getTarget() { + return this.target; + } + + @Override + public String getMessage() { + return this.target + ":" + super.getMessage() + (getCause() == null ? "" : ("(" + getCause() + ")")); + } +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Report.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Report.java new file mode 100644 index 0000000..0f1b7c3 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Report.java @@ -0,0 +1,102 @@ +package org.onap.sdc.dcae.checker; + +import java.io.IOException; + +import java.util.LinkedList; +import java.util.Collections; + +import org.yaml.snakeyaml.error.MarkedYAMLException; +import kwalify.ValidationException; + +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; + +/** + * Represents a collection of errors that occured during one of the stages + * of the checker: yaml parsing, yaml validation (tosca syntax), tosca checking + */ +/* + * This needs some re-thinking: while it is useful to have all original errors introducing + * the custom json conversion (just to help the service) is not great either. + * I was torn between this approach or creating a custom deserializer and object mapper (which + * would have kept all the customized serialization in the service but then the error analysis + * would be duplicated there too ..). + */ +@JsonSerialize(contentUsing=org.onap.sdc.dcae.checker.Report.ReportEntrySerializer.class) +public class Report<T extends Throwable> extends LinkedList<T> { + + public Report() { + } + + public Report(T[] theErrors) { + Collections.addAll(this, theErrors); + } + + public boolean hasErrors() { + return !this.isEmpty(); + } + + public boolean addOnce(T theError) { + for (T e: this) { + if (e.getMessage().equals(theError.getMessage())) + return false; + } + return add(theError); + } + + public String toString() { + StringBuilder sb = new StringBuilder(this.size() + " errors"); + for (Throwable x: this) { + sb.append("\n") + .append("[") + .append(location(x)) + .append("] ") + .append(x.getMessage()); + if (x.getCause() != null) { + sb.append("\n\tCaused by:\n") + .append(x.getCause()); + } + } + sb.append("\n"); + return sb.toString(); + } + + private static String location(Throwable theError) { + if (theError instanceof MarkedYAMLException) { + MarkedYAMLException mx = (MarkedYAMLException)theError; + return "line " + mx.getProblemMark().getLine() + ", column " + mx.getProblemMark().getColumn(); + } + if (theError instanceof ValidationException) { + ValidationException vx = (ValidationException)theError; + return vx.getPath(); + } + if (theError instanceof TargetError) { + TargetError tx = (TargetError)theError; + return tx.getLocation(); + } + return "unknown"; + } + + + public static class ReportEntrySerializer extends StdSerializer<Throwable> { + + public ReportEntrySerializer() { + super(Throwable.class); + } + + @Override + public void serialize(Throwable theError, JsonGenerator theGenerator, SerializerProvider theProvider) + throws IOException, JsonProcessingException { + theGenerator.writeStartObject(); + theGenerator.writeStringField("location", location(theError)); + theGenerator.writeStringField("message", theError.getMessage()); + if (theError.getCause() != null) + theGenerator.writeStringField("cause", theError.getCause().toString()); + theGenerator.writeEndObject(); + } + } +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Repository.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Repository.java new file mode 100644 index 0000000..9cb853b --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Repository.java @@ -0,0 +1,50 @@ +package org.onap.sdc.dcae.checker; + + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; + +import java.net.URI; +import java.net.URL; +import java.net.MalformedURLException; + +import java.util.Map; + +/** + * Represents a 'container' of (yaml) TOSCA documents + */ +public abstract class Repository { + + protected OnapLoggerError errLogger = OnapLoggerError.getInstance(); + protected OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private String name, + description; + protected URI rootURI; + protected Map credential; //TOSCA type tosca.datatype.Credential + + public Repository(String theName, URI theRoot) { + this.name = theName; + this.rootURI = theRoot; + } + + public String getName() { + return this.name; + } + + public URI getRoot() { + return this.rootURI; + } + + /** optional */ + public abstract Iterable<Target> targets(); + + /** */ + public abstract Target resolve(URI theURI); + + @Override + public String toString() { + return "Repository " + this.name + " at " + this.rootURI; + } +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java new file mode 100644 index 0000000..b630564 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Target.java @@ -0,0 +1,80 @@ +package org.onap.sdc.dcae.checker; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.io.BufferedReader; +import java.io.IOException; + +import java.net.URI; +import java.net.URL; +import java.net.MalformedURLException; + +/** + * Represents a yaml document to be parsed/validated/checked + */ +public class Target { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + + private String name; //maintained mainly for logging + private URI location; + private Object target; //this is the parsed form of the target + + private Report report = new Report(); //collects the errors related to this target + + public Target(String theName, URI theLocation) { + this.name = theName; + this.location = theLocation; + } + + public String getName() { + return this.name; + } + + public URI getLocation() { + return this.location; + } + + public Report getReport() { + return this.report; + } + + public void report(Throwable theError) { + this.report.add(theError); + } + + public void report(String theErrMsg) { + this.report.add(new Exception(theErrMsg)); + } + + public void setTarget(Object theTarget) { + this.target = theTarget; + } + + public Object getTarget() { + return this.target; + } + + /* + * @return a reader for the source or null if failed + */ + public Reader open() throws IOException { + + return new BufferedReader( + new InputStreamReader( + this.location.toURL().openStream())); + } + + public String toString() { + //return String.format("Target %s (%.20s ...)", this.location, this.target == null ? "" : this.target.toString()); + return String.format("Target %s at %s", this.name, this.location); + + } +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetError.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetError.java new file mode 100644 index 0000000..0764a56 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetError.java @@ -0,0 +1,43 @@ +package org.onap.sdc.dcae.checker; + + +/** + * A target error represents an error in target the resource being checked. + * We only represent it as a Throwable because the libraries that perform parsing and syntax validation + * represent their errors as such .. + */ +public class TargetError extends Throwable { + + /* + public static enum Level { + error, + warning + } + */ + + private String location; //we might need an more detailed representation + //here: it could be a YAML document jpath or + //document location (line). + private String target; + + public TargetError(String theTarget, String theLocation, String theMessage, Throwable theCause) { + super(theMessage, theCause); + this.target = theTarget; + this.location = theLocation; + } + + public TargetError(String theTarget, String theLocation, String theMessage) { + this(theTarget, theLocation, theMessage, null); + } + + public String getTarget() { + return this.target; + } + + public String getLocation() { + return this.location; + } + + +} + diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetInfo.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetInfo.java new file mode 100644 index 0000000..480b6a8 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetInfo.java @@ -0,0 +1,20 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Set; + + +/** + * Exposes target properties. How they are obtained/calculated not of importance here. + */ +public interface TargetInfo { + + /** */ + public Set<String> entryNames(); + + /** */ + public boolean hasEntry(String theName); + + /** */ + public Object getEntry(String theName); + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetLocator.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetLocator.java new file mode 100644 index 0000000..9b82f16 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/TargetLocator.java @@ -0,0 +1,20 @@ +package org.onap.sdc.dcae.checker; + +import java.net.URI; + + +public interface TargetLocator { + + /** */ + public boolean addSearchPath(URI theURI); + + /** */ + public boolean addSearchPath(String thePath); + + /** */ + public Iterable<URI> searchPaths(); + + /** */ + public Target resolve(String theName); + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Workflows.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Workflows.java new file mode 100644 index 0000000..88eb192 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/Workflows.java @@ -0,0 +1,120 @@ +package org.onap.sdc.dcae.checker; + +import java.util.Map; + +import org.onap.sdc.dcae.checker.annotations.Checks; + +import java.util.List; +import java.util.Iterator; + +@Checks +public class Workflows { + + @Checks(path="/topology_template/workflows") + public void check_workflows(Map theDefinition, Checker.CheckContext theContext) { + + theContext.enter("workflows"); + + try { + if(!theContext.checker().checkDefinition("workflows", theDefinition, theContext)) + return; + + for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String,Map> e = i.next(); + check_workflow_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + + public void check_workflow_definition(String theName, Map theDef, Checker.CheckContext theContext) { + + theContext.enter("workflow", Construct.Workflow); + + if (theDef.containsKey("inputs")) { + theContext + .checker() + .checkProperties((Map<String,Map>)theDef.get("inputs"), theContext); + } + + if (theDef.containsKey("preconditions")) { + check_workflow_preconditions_definition((List<Map>)theDef.get("preconditions"), theContext); + } + + if (theDef.containsKey("steps")) { + check_workflow_steps_definition((Map<String, Map>)theDef.get("steps"), theContext); + } + + theContext.exit(); + } + + + public void check_workflow_steps_definition(Map theSteps, Checker.CheckContext theContext) { + + theContext.enter("steps"); + + try { + for (Iterator<Map.Entry<String,Map>> i = theSteps.entrySet().iterator(); i.hasNext(); ) { + Map.Entry<String,Map> e = i.next(); + check_workflow_step_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + + } + + public void check_workflow_step_definition(String theName, Map theDef, Checker.CheckContext theContext) { + + theContext.enter(theName); + try { + //requireed entry, must be a node or group template + String target = (String)theDef.get("target"); + Construct targetConstruct = null; + + if (theContext.catalog().hasTemplate(theContext.target(), Construct.Group, target)) { + targetConstruct = Construct.Group; + } + else if (theContext.catalog().hasTemplate(theContext.target(), Construct.Node, target)) { + targetConstruct = Construct.Node; + } + else { + theContext.addError("The 'target' entry must contain a reference to a node template or group template, '" + target + "' is none of those", null); + } + + String targetRelationship = (String)theDef.get("target_relationship"); + if (targetConstruct.equals(Construct.Node)) { + if (targetRelationship != null) { + //must be a requirement of the target Node + } + } + + + } + finally { + theContext.exit(); + } + } + + public void check_workflow_preconditions_definition(List<Map> thePreconditions, Checker.CheckContext theContext) { + + theContext.enter("preconditions"); + + try { + for (Map precondition: thePreconditions) { + check_workflow_precondition_definition(precondition, theContext); + } + } + finally { + theContext.exit(); + } + } + + public void check_workflow_precondition_definition(Map theDef, Checker.CheckContext theContext) { + } + +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/.Validates.java.swp b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/.Validates.java.swp Binary files differnew file mode 100644 index 0000000..dae35da --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/.Validates.java.swp diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Catalogs.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Catalogs.java new file mode 100644 index 0000000..8dbe275 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Catalogs.java @@ -0,0 +1,14 @@ +package org.onap.sdc.dcae.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.METHOD}) +public @interface Catalogs { + String path() default "/"; +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Checks.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Checks.java new file mode 100644 index 0000000..96349d7 --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Checks.java @@ -0,0 +1,19 @@ +package org.onap.sdc.dcae.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +/* The iffy part: as a type annotaton we do not need a path or a version specification, + as a method annotation it is mandatory (cannot be the default) + We could forsee that a version indcation at type level would cover all check handler within the type + */ +public @interface Checks { + String path() default "/"; + String[] version() default { "1.0", "1.0.0", "1.1", "1.1.0" }; +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Validates.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Validates.java new file mode 100644 index 0000000..29e080d --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/annotations/Validates.java @@ -0,0 +1,15 @@ +package org.onap.sdc.dcae.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +public @interface Validates { + String rule() default "/"; + String[] timing() default { "post" }; +} diff --git a/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/package-info.java b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/package-info.java new file mode 100644 index 0000000..da2c5ba --- /dev/null +++ b/dcaedt_validator/checker/src/main/java/org/onap/sdc/dcae/checker/package-info.java @@ -0,0 +1,101 @@ +/** + * The checker provides an api/tool for the verification of TOSCA yaml files + * as specified in the OASIS specification found at: + * http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/TOSCA-Simple-Profile-YAML-v1.0.pdf + * + * It provides a three stage processing of a tosca yaml file: + * - yaml verification: is the document a valid yaml document as per yaml.org/spec. In particular we're using the snakeyaml library for parsing the yaml document to a nested structure of java objects. + * - tosca yaml grammar validation: is the document a valid tosca yaml + * document, as per the the TOSCA simple profile for yaml. We use a modified + * version of the kwalify library for this task. The grammar for TOSCA yaml + * is itself a yaml document (found in the package in + * resources/tosca-schema.yaml). There are certain limitations on how far + * this grammar can go. + * - consistency verification: we check the type hierarchies for all TOSCA + * constructs (data types, capability types, node types, etc), the definition + * of all facets of a construct (properties, attributes, etc) across the type + * hierachies, the conformity of construct templates (node templates, ..) with + * their types, data valuations(input assignements, constants, function calls). + * + * Each stage is blocking, i.e. a stage will be performed only if the previous + * one completed successfully. + * + * The verification is done across all the imported documents. The common TOSCA + * types are by default made available to all documents being processed (the + * specification is in resources/tosca-common-types.yaml). Networking related + * types can be made available by importing resources/tosca-network-types.yaml + * while the tosca nfv profile definitions are available at + * resources/tosca-nfv-types.yaml. + * + * Besides snakeyaml and kwalify this package also has dependencies on Google's + * guava library and Apache's jxpath. + * + * The three java interfaces exposed by the package are the Checker, Target + * and Report. A Target represents a document processed by the Checker. While + * the Checker starts with a top Target, through import statements it can end up + * processing a number of Targets. The results of processing a Target are made + * available through a Report which currently is nothing more that a list of + * recorded errors. + * + * <div> + * {@code + * Checker checker = new Checker(); + * checker.check("tests/example.yaml"); + * + * for (Target t: checker.targets()) + * System.out.println(t.getLocation() + "\n" + t.getReport()); + * } + * </div> + * + * The errors are recorded as instances of Exception, mostly due to the fact + * snakeyaml and kwalify do report errors as exceptions. As such there are 3 + * basic types of errros to be expected in a report: YAMLException (from + * snakeyaml, related to parsing), ValidationException (from kwalify, tosca + * grammar validation), TargetException (from the checker itself). This might + * change as we're looking to unify the way errors are reported. A Report + * object has a user friendly toString function. + * + * A CheckerException thrown during the checking process is an indication of a + * malfunction in the checker itself. + * + * The checker handles targets as URIs. The resolution of a target consists in + * going from a string representing some path/uri to the absolute URI. URIs can + * be of any java recognizable schema: file, http, etc. A TargetResolver (not + * currently exposed through the API) attempts in order: + * - if the String is an absolute URI, keep it as such + * - if the String is a relative URI attempt to resolve it as relative to + * know search paths (pre-configured absolute URIs: current directory and the + * root of the main target's URI). The option of adding custom search paths will + * be added. + * - attempt to resolve as a classpath resource (a jar:file: URI) + * + * At this time there are no options for the checker (please provide + * requirements to be considered). + * + * + * + * Other: + * - the checker performs during tosca grammar validation a 'normalization' + * process as the tosca yaml profile allows for short forms in the + * specification of a number of its constructs (see spec). The checker changes + * the actual structure of the parsed document such that only normalized + * (complete) forms of specification are present before the checking phase. + * (the kwalify library was extended in order to be able to specify these + * short forms in the grammar itself and process/tolerate them at validation + * time). + * + * - the checker contains an internal catalog where the types and templates + * of different constructs are aggregated and indexed across all targets in + * order to facilitate the checking phase. Catalogs can be 'linked' and the + * resolution process delegated (the checker maintains a basic catalog with + * the core and common types and there is always a second catalog maintaining + * the information related to the current targets). + * The catalog is currently not exposed by the library. + * + * - imports processing: the import statements present in a target are first + * 'detected' during tosca yaml grammar validation phase. At that stage all + * imports are (recursively) parsed and validated (first 2 phases). Checking + * off all imports (recursively) is done during stage 3. + * + */ +package org.onap.sdc.dcae.checker;
\ No newline at end of file diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca-common-types.yaml b/dcaedt_validator/checker/src/main/resources/tosca/tosca-common-types.yaml new file mode 100644 index 0000000..c26c6e8 --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca-common-types.yaml @@ -0,0 +1,665 @@ +tosca_definitions_version: tosca_simple_yaml_1_1_0 +description: > + TOSCA simple profile common types. To be included by default in all templates. + + +data_types: + +#see 5.3.1 + tosca.datatypes.Root: + description: The TOSCA root Data Type all other TOSCA base Data Types derive from + +#from 5.3.2 + tosca.datatypes.Credential: + derived_from: tosca.datatypes.Root + properties: + protocol: + type: string + required: false + token_type: + type: string + default: password + token: + type: string + keys: + type: map + required: false + entry_schema: string + user: + type: string + required: false + +#from 5.3.3 + tosca.datatypes.TimeInterval: + derived_from: tosca.datatypes.Root + properties: + start_time: + type: timestamp + required: true + end_time: + type: timestamp + required: true + +#from 5.3.4 + tosca.datatypes.network.NetworkInfo: + derived_from: tosca.datatypes.Root + properties: + network_name: + type: string + network_id: + type: string + addresses: + type: list + entry_schema: string + +#from 5.3.5 + tosca.datatypes.network.PortInfo: + derived_from: tosca.datatypes.Root + properties: + port_name: + type: string + port_id: + type: string + network_id: + type: string + mac_address: + type: string + addresses: + type: list + entry_schema: string + +#from 5.3.6 + tosca.datatypes.network.PortDef: + derived_from: integer + constraints: + - in_range: [ 1, 65535 ] + +#from 5.3.7 + tosca.datatypes.network.PortSpec: + derived_from: tosca.datatypes.Root + properties: + protocol: + type: string + required: true + default: tcp + constraints: + - valid_values: [ udp, tcp, igmp ] + target: +#I think the intent was (same for source): +#type: tosca.datatypes.network.PortDef + type: integer + entry_schema: tosca.datatypes.network.PortDef + target_range: + type: range + constraints: + - in_range: [ 1, 65535 ] + source: + type: integer + entry_schema: tosca.datatypes.network.PortDef + source_range: + type: range + constraints: + - in_range: [ 1, 65535 ] + +capability_types: + +#from 5.5.1 + tosca.capabilities.Root: + description: The TOSCA root Capability Type all other TOSCA base Capability Types derive from + +#from 5.5.2 + tosca.capabilities.Node: + derived_from: tosca.capabilities.Root + +#from 5.5.3 + tosca.capabilities.Compute: + derived_from: tosca.capabilities.Root + properties: + name: + type: string + required: false + num_cpus: + type: integer + required: false + constraints: + - greater_or_equal: 1 + cpu_frequency: + type: scalar-unit.frequency + required: false + constraints: + - greater_or_equal: 0.1 GHz + disk_size: + type: scalar-unit.size + required: false + constraints: + - greater_or_equal: 0 MB + mem_size: + type: scalar-unit.size + required: false + constraints: + - greater_or_equal: 0 MB + +#from 5.5.4 + tosca.capabilities.Network: + derived_from: tosca.capabilities.Root + properties: + name: + type: string + required: false + +#from 5.5.5 + tosca.capabilities.Storage: + derived_from: tosca.capabilities.Root + properties: + name: + type: string + required: false + +#from 5.5.6 + tosca.capabilities.compute.Container: + derived_from: tosca.capabilities.Compute + +#from 5.5.7 + tosca.capabilities.Endpoint: + derived_from: tosca.capabilities.Root + properties: + protocol: + type: string + default: tcp + port: + type: tosca.datatypes.network.PortDef + required: false + secure: + type: boolean + default: false + url_path: + type: string + required: false + port_name: + type: string + required: false + network_name: + type: string + required: false + default: PRIVATE + initiator: + type: string + default: source + constraints: + - valid_values: [ source, target, peer ] + ports: + type: map + required: false + constraints: + - min_length: 1 + entry_schema: tosca.datatypes.network.PortSpec + attributes: + ip_address: + type: string + +#from 5.5.8 + tosca.capabilities.Endpoint.Public: + derived_from: tosca.capabilities.Endpoint + properties: + # Change the default network_name to use the first public network found + network_name: + type: string + default: PUBLIC + floating: + description: > + indicates that the public address should be allocated from a pool of floating IPs that are associated with the network. + type: boolean + default: false + status: experimental + dns_name: + description: The optional name to register with DNS + type: string + required: false + status: experimental + +#from 5.5.9 + tosca.capabilities.Endpoint.Admin: + derived_from: tosca.capabilities.Endpoint + # Change Endpoint secure indicator to true from its default of false + properties: + secure: + type: boolean + default: true + constraints: + - equal: true + +#from 5.5.10 + tosca.capabilities.Endpoint.Database: + derived_from: tosca.capabilities.Endpoint + +#from 5.5.11 + tosca.capabilities.Attachment: + derived_from: tosca.capabilities.Root + +#from 5.5.12 + tosca.capabilities.OperatingSystem: + derived_from: tosca.capabilities.Root + properties: + architecture: + type: string + required: false + type: + type: string + required: false + distribution: + type: string + required: false + version: + type: version + required: false + +#from 5.5.13 + tosca.capabilities.Scalable: + derived_from: tosca.capabilities.Root + properties: + min_instances: + type: integer + default: 1 + max_instances: + type: integer + default: 1 + default_instances: + type: integer + +#from C.3.11 + tosca.capabilities.network.Bindable: + derived_from: tosca.capabilities.Node + + +relationship_types: + +#from 5.7.1 + tosca.relationships.Root: + description: The TOSCA root Relationship Type all other TOSCA base Relationship Types derive from + attributes: + tosca_id: + type: string + tosca_name: + type: string + interfaces: + Configure: + type: tosca.interfaces.relationship.Configure + +#from 5.7.2 + tosca.relationships.DependsOn: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Node ] + +#from 5.7.3 + tosca.relationships.HostedOn: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.compute.Container ] + +#from 5.7.4 + tosca.relationships.ConnectsTo: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Endpoint ] + properties: + credential: + type: tosca.datatypes.Credential + required: false + +#from 5.7.5 + tosca.relationships.AttachesTo: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Attachment ] + properties: + location: + type: string + constraints: + - min_length: 1 + device: + type: string + required: false + +#from 5.7.6 + tosca.relationships.RoutesTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.Endpoint ] + + +interface_types: + +#from 5.8.3 + tosca.interfaces.Root: +# derived_from: tosca.entity.Root + description: The TOSCA root Interface Type all other TOSCA base Interface Types derive from + +#from 5.8.4 + tosca.interfaces.node.lifecycle.Standard: + derived_from: tosca.interfaces.Root + create: + description: Standard lifecycle create operation. + configure: + description: Standard lifecycle configure operation. + start: + description: Standard lifecycle start operation. + stop: + description: Standard lifecycle stop operation. + delete: + description: Standard lifecycle delete operation. + +#from 5.8.5 + tosca.interfaces.relationship.Configure: + derived_from: tosca.interfaces.Root + pre_configure_source: + description: Operation to pre-configure the source endpoint. + pre_configure_target: + description: Operation to pre-configure the target endpoint. + post_configure_source: + description: Operation to post-configure the source endpoint. + post_configure_target: + description: Operation to post-configure the target endpoint. + add_target: + description: Operation to notify the source node of a target node being added via a relationship. + add_source: + description: Operation to notify the target node of a source node which is now available via a relationship. + target_changed: + description: Operation to notify source some property or attribute of the target changed + remove_target: + description: Operation to remove a target node. + + +node_types: + +#from 5.9.1 + tosca.nodes.Root: + description: The TOSCA Node Type all other TOSCA base Node Types derive from + attributes: + tosca_id: + type: string + tosca_name: + type: string + state: + type: string + capabilities: + feature: + type: tosca.capabilities.Node + requirements: + - dependency: + capability: tosca.capabilities.Node + node: tosca.nodes.Root + relationship: tosca.relationships.DependsOn + occurrences: [ 0, UNBOUNDED ] + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + +#from 5.9.2 + tosca.nodes.Compute: + derived_from: tosca.nodes.Root + attributes: + private_address: + type: string + public_address: + type: string + networks: + type: map +#entry schema for attribute has a string value as per A.5.9 .. +#the standard document defines it as a map similar to the property definition .. + entry_schema: tosca.datatypes.network.NetworkInfo + ports: + type: map + entry_schema: tosca.datatypes.network.PortInfo + requirements: + - local_storage: + capability: tosca.capabilities.Attachment + node: tosca.nodes.BlockStorage + relationship: tosca.relationships.AttachesTo + occurrences: [0, UNBOUNDED] + capabilities: + host: + type: tosca.capabilities.compute.Container + valid_source_types: [tosca.nodes.SoftwareComponent] + endpoint: + type: tosca.capabilities.Endpoint.Admin + os: + type: tosca.capabilities.OperatingSystem + scalable: + type: tosca.capabilities.Scalable + binding: + type: tosca.capabilities.network.Bindable + +#from 5.9.3 + tosca.nodes.SoftwareComponent: + derived_from: tosca.nodes.Root + properties: + # domain-specific software component version + component_version: + type: version + required: false + admin_credential: + type: tosca.datatypes.Credential + required: false + requirements: + - host: + capability: tosca.capabilities.compute.Container + node: tosca.nodes.Compute + relationship: tosca.relationships.HostedOn + +#from 5.9.4 + tosca.nodes.WebServer: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + # Private, layer 4 endpoints + data_endpoint: tosca.capabilities.Endpoint + admin_endpoint: tosca.capabilities.Endpoint.Admin + host: + type: tosca.capabilities.compute.Container + valid_source_types: [ tosca.nodes.WebApplication ] + +#from 5.9.5 + tosca.nodes.WebApplication: + derived_from: tosca.nodes.Root + properties: + context_root: + type: string + capabilities: + app_endpoint: + type: tosca.capabilities.Endpoint + requirements: + - host: + capability: tosca.capabilities.compute.Container + node: tosca.nodes.WebServer + relationship: tosca.relationships.HostedOn + +#from 5.9.6 + tosca.nodes.DBMS: + derived_from: tosca.nodes.SoftwareComponent + properties: + root_password: + type: string + required: false + description: the optional root password for the DBMS service + port: + type: integer + required: false + description: the port the DBMS service will listen to for data and requests + capabilities: + host: + type: tosca.capabilities.compute.Container + valid_source_types: [ tosca.nodes.Database ] + +#from 5.9.7 + tosca.nodes.Database: + derived_from: tosca.nodes.Root + properties: + name: + type: string + description: the logical name of the database + port: + type: integer + description: the port the underlying database service will listen to for data + user: + type: string + description: the optional user account name for DB administration + required: false + password: + type: string + description: the optional password for the DB user account + required: false + requirements: + - host: + capability: tosca.capabilities.compute.Container + node: tosca.nodes.DBMS + relationship: tosca.relationships.HostedOn + capabilities: + database_endpoint: + type: tosca.capabilities.Endpoint.Database + +#from 5.9.8 + tosca.nodes.ObjectStorage: + derived_from: tosca.nodes.Root + properties: + name: + type: string + size: + type: scalar-unit.size + constraints: + - greater_or_equal: 0 GB + maxsize: + type: scalar-unit.size + constraints: + - greater_or_equal: 0 GB + capabilities: + storage_endpoint: + type: tosca.capabilities.Endpoint + +#from 5.9.9 + tosca.nodes.BlockStorage: + derived_from: tosca.nodes.Root + properties: + size: + type: scalar-unit.size + constraints: + - greater_or_equal: 1 MB + volume_id: + type: string + required: false + snapshot_id: + type: string + required: false + capabilities: + attachment: + type: tosca.capabilities.Attachment + +#from 5.9.10 + tosca.nodes.Container.Runtime: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + host: + type: tosca.capabilities.compute.Container + scalable: + type: tosca.capabilities.Scalable + +#from 5.9.11 + tosca.nodes.Container.Application: + derived_from: tosca.nodes.Root + requirements: + - host: + capability: tosca.capabilities.compute.Container + # node: tosca.nodes.Container !invalid node reference! + relationship: tosca.relationships.HostedOn + +#from 5.9.12 + tosca.nodes.LoadBalancer: + derived_from: tosca.nodes.Root + properties: + # TBD + algorithm: + type: string + required: false + status: experimental + capabilities: + client: + type: tosca.capabilities.Endpoint.Public + occurrences: [0, UNBOUNDED] + description: the Floating (IP) clients on the public network can connect to + requirements: + - application: + capability: tosca.capabilities.Endpoint + relationship: tosca.relationships.RoutesTo + occurrences: [0, UNBOUNDED] +# correction by jora: requirement defintion does not allow for a description entry +# description: Connection to one or more load balanced applications + +artifact_types: + +#from 5.4.1 + tosca.artifacts.Root: + description: The TOSCA Artifact Type all other TOSCA Artifact Types derive from + +#from 5.4.2 + tosca.artifacts.File: + derived_from: tosca.artifacts.Root + +#from 5.4.3 + tosca.artifacts.Deployment: + derived_from: tosca.artifacts.Root + description: TOSCA base type for deployment artifacts + +#from 5.4.3.3 + tosca.artifacts.Deployment.Image: + derived_from: tosca.artifacts.Deployment + +#from 5.4.3.4 + tosca.artifacts.Deployment.Image.VM: + derived_from: tosca.artifacts.Deployment.Image + description: Virtual Machine (VM) Image + +#from 5.4.4 + tosca.artifacts.Implementation: + derived_from: tosca.artifacts.Root + description: TOSCA base type for implementation artifacts + +#from 5.4.4.3 + tosca.artifacts.Implementation.Bash: + derived_from: tosca.artifacts.Implementation + description: Script artifact for the Unix Bash shell + mime_type: application/x-sh + file_ext: [ sh ] + +#from 5.4.4.4 + tosca.artifacts.Implementation.Python: + derived_from: tosca.artifacts.Implementation + description: Artifact for the interpreted Python language + mime_type: application/x-python + file_ext: [ py ] + + +#from 5.9 +group_types: + + tosca.groups.Root: + description: The TOSCA Group Type all other TOSCA Group Types derive from + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + +#from 5.10 +policy_types: + + tosca.policies.Root: + description: The TOSCA Policy Type all other TOSCA Policy Types derive from + + tosca.policies.Placement: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern placement of TOSCA nodes or groups of nodes. + + tosca.policies.Scaling: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern scaling of TOSCA nodes or groups of nodes. + + tosca.policies.Update: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern update of TOSCA nodes or groups of nodes. + + tosca.policies.Performance: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to declare performance requirements for TOSCA nodes or groups of nodes. + diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca-examples-types.yaml b/dcaedt_validator/checker/src/main/resources/tosca/tosca-examples-types.yaml new file mode 100644 index 0000000..5eee538 --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca-examples-types.yaml @@ -0,0 +1,117 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +description: > + Non-normative type definitions, as per section 8 of TOSCA simple profile. + + +artifact_types: + + tosca.artifacts.Deployment.Image.Container.Docker: + derived_from: tosca.artifacts.Deployment.Image + description: Docker Container Image + + tosca.artifacts.Deployment.Image.VM.ISO: + derived_from: tosca.artifacts.Deployment.Image.VM + description: Virtual Machine (VM) image in ISO disk format + mime_type: application/octet-stream + file_ext: [ iso ] + + tosca.artifacts.Deployment.Image.VM.QCOW2: + derived_from: tosca.artifacts.Deployment.Image.VM + description: Virtual Machine (VM) image in QCOW v2 standard disk format + mime_type: application/octet-stream + file_ext: [ qcow2 ] + + +capability_types: + + tosca.capabilities.Container.Docker: + derived_from: tosca.capabilities.Container + properties: + version: + type: list + required: false + entry_schema: version + publish_all: + type: boolean + default: false + required: false + publish_ports: + type: list + entry_schema: tosca.datatypes.network.PortSpec + required: false + expose_ports: + type: list + entry_schema: tosca.datatypes.network.PortSpec + required: false + volumes: + type: list + entry_schema: string + required: false + + +node_types: + + tosca.nodes.Database.MySQL: + derived_from: tosca.nodes.Database + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.DBMS.MySQL + + tosca.nodes.DBMS.MySQL: + derived_from: tosca.nodes.DBMS + properties: + port: + type: integer + description: reflect the default MySQL server port + default: 3306 + root_password: + type: string + # MySQL requires a root_password for configuration + # Override parent DBMS definition to make this property required + required: true + capabilities: + # Further constrain the ‘host’ capability to only allow MySQL databases + host: + type: tosca.capabilities.Container + valid_source_types: [ tosca.nodes.Database.MySQL ] + + tosca.nodes.WebServer.Apache: + derived_from: tosca.nodes.WebServer + + tosca.nodes.WebApplication.WordPress: + derived_from: tosca.nodes.WebApplication + properties: + admin_user: + type: string + admin_password: + type: string + db_host: + type: string + requirements: + - database_endpoint: + capability: tosca.capabilities.Endpoint.Database + node: tosca.nodes.Database + relationship: tosca.relationships.ConnectsTo + + tosca.nodes.WebServer.Nodejs: + derived_from: tosca.nodes.WebServer + properties: + # Property to supply the desired implementation in the Github repository + github_url: + required: no + type: string + description: location of the application on the github. + default: https://github.com/mmm/testnode.git + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + inputs: + github_url: + type: string + + tosca.nodes.Container.Application.Docker: + derived_from: tosca.nodes.Container.Application + requirements: + - host: + capability: tosca.capabilities.Container.Docker
\ No newline at end of file diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca-network-types.yaml b/dcaedt_validator/checker/src/main/resources/tosca/tosca-network-types.yaml new file mode 100644 index 0000000..e4930e0 --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca-network-types.yaml @@ -0,0 +1,103 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +description: > + TOSCA simple profile for networking. + +metadata: + template_name: tosca_simple_networking + template_version: "1.0" + +#imports: +# - tosca-common-types.yaml + +node_types: + + tosca.nodes.network.Network: + derived_from: tosca.nodes.Root + properties: + ip_version: + type: integer + required: false + default: 4 + constraints: + - valid_values: [ 4, 6 ] + cidr: + type: string + required: false + start_ip: + type: string + required: false + end_ip: + type: string + required: false + gateway_ip: + type: string + required: false + network_name: + type: string + required: false + network_id: + type: string + required: false + segmentation_id: + type: string + required: false + network_type: + type: string + required: false + physical_network: + type: string + required: false + capabilities: + link: + type: tosca.capabilities.network.Linkable + + tosca.nodes.network.Port: + derived_from: tosca.nodes.Root + properties: + ip_address: + type: string + required: false + order: + type: integer + required: true + default: 0 + constraints: + - greater_or_equal: 0 + is_default: + type: boolean + required: false + default: false + ip_range_start: + type: string + required: false + ip_range_end: + type: string + required: false + requirements: + - link: + capability: tosca.capabilities.network.Linkable + relationship: tosca.relationships.network.LinksTo + - binding: + capability: tosca.capabilities.network.Bindable + relationship: tosca.relationships.network.BindsTo + + +capability_types: + + tosca.capabilities.network.Linkable: + derived_from: tosca.capabilities.Node + + # also part of common types - used in Compute node type + tosca.capabilities.network.Bindable: + derived_from: tosca.capabilities.Node + +relationship_types: + + tosca.relationships.network.LinksTo: + derived_from: tosca.relationships.DependsOn + valid_target_types: [ tosca.capabilities.network.Linkable ] + + tosca.relationships.network.BindsTo: + derived_from: tosca.relationships.DependsOn + valid_target_types: [ tosca.capabilities.network.Bindable ] + diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca-nfv-types.yaml b/dcaedt_validator/checker/src/main/resources/tosca/tosca-nfv-types.yaml new file mode 100644 index 0000000..fd52f6b --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca-nfv-types.yaml @@ -0,0 +1,143 @@ +tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0 +description: > + TOSCA simple profile for Network Function Virtualization (NFV). + +metadata: + template_name: http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd01/tosca-nfv-v1.0-csd01.pdf + template_version: "1.0" + +#imports: +# - tosca-common-types.yaml + +capability_types: + +#from 6.3 + tosca.capabilities.nfv.VirtualLinkable: + derived_from: tosca.capabilities.Root + +#from 7.2.1 + tosca.capabilities.nfv.VirtualBindable: + derived_from: tosca.capabilities.Root +# breaks the virtualbinding requirement in the node type nfv.CP +# valid_source_types: [ tosca.nodes.nfv.VDU ] + +#from 7.2.2 + tosca.capabilities.nfv.HA: + derived_from: tosca.capabilities.Root + valid_source_types: [ tosca.nodes.nfv.VDU ] + +#from 7.2.3 + tosca.capabilities.nfv.HA.ActiveActive: + derived_from: tosca.capabilities.nfv.HA + +#from 7.2.4 + tosca.capabilities.nfv.HA.ActivePassive: + derived_from: tosca.capabilities.nfv.HA + +#from 7.2.5 + tosca.capabilities.nfv.Metric: + derived_from: tosca.capabilities.Root + + +relationship_types: + +#from 6.4 + tosca.relationships.nfv.VirtualLinksTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] + +#from 7.3.1 + tosca.relationships.nfv.VirtualBindsTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.VirtualBindable] + +#from 7.3.2 + tosca.relationships.nfv.HA: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.nfv.HA] + +#from 7.3.3 + tosca.relationships.nfv.Monitor: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.Metric] + + +node_types: + +#from 7.4.1 + tosca.nodes.nfv.VNF: + derived_from: tosca.nodes.Root + properties: + id: + type: string + description: ID of this VNF + vendor: + type: string + description: name of the vendor who generate this VNF + version: + type: version + description: version of the software for this VNF + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + +#from 7.4.2 + tosca.nodes.nfv.VDU: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + high_availability: + type: tosca.capabilities.nfv.HA + Virtualbinding: + type: tosca.capabilities.nfv.VirtualBindable + monitoring_parameter: + type: tosca.capabilities.nfv.Metric + requirements: + - high_availability: + capability: tosca.capabilities.nfv.HA + relationship: tosca.relationships.nfv.HA + occurrences: [ 0, 1 ] + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.Compute + relationship: tosca.relationships.HostedOn + +#from 7.4.3 + tosca.nodes.nfv.CP: + derived_from: tosca.nodes.Root + properties: + type: + type: string + required: false + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + - virtualbinding: + capability: tosca.capabilities.nfv.VirtualBindable + attributes: + IP_address: + type: string +#!attributes do not take required .. required: false + +#from 8.1 + tosca.nodes.nfv.VL: + derived_from: tosca.nodes.Root + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VL + capabilities: + virtual_linkable: + type: tosca.capabilities.nfv.VirtualLinkable + +#from 8.2 + tosca.nodes.nfv.VL.ELine: + derived_from: tosca.nodes.nfv.VL + +#from 8.3 + tosca.nodes.nfv.VL.ELAN: + derived_from: tosca.nodes.nfv.VL + +#from + tosca.nodes.nfv.VL.ETree: + derived_from: tosca.nodes.nfv.VL diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar b/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar new file mode 100644 index 0000000..9653086 --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar @@ -0,0 +1,1262 @@ +_status_values: &status_values + enum: + - supported + - unsupported + - experimental + - deprecated + +#I do not know that the lists and maps qualify as 'primitive' .. +_primitive_types: &primitive_types + enum: [string,integer,float,boolean,timestamp,list,map,version,range,scalar-unit.size,scalar_unit.frequency,scalar_unit.time] + +#needs custom validation as we have to make sure there are 2 elements and allow for the +#UNBOUNDED keyword as second element +_range_definition: &range_definition + type: seq + name: range_definition + sequence: + - type: scalar + +#see A.5.2 +#this is where the need of verifying the size of a collection (sequence/map) came from +#this is specified as a sequence where each entry is a map with one entry?? +_constraints_sequence: &constraints_sequence + name: constraints_sequence + short: 0 + type: seq + sequence: + - type: map +# length: 1 + mapping: + equal: + desc: "Constrains a property or parameter to a value equal to the value declared." + type: any + required: no + greater_than: + desc: "Constrains a property or parameter to a value greater than the value declared" + type: scalar + required: no + greater_or_equal: + desc: "Constrains a property or parameter to a value greater than or equal to the value declared." + type: scalar + required: no + less_than: + desc: "Constrains a property or parameter to a value less than the value declared" + type: scalar + required: no + less_or_equal: + desc: "Constrains a property or parameter to a value less than or equal to the value declared." + type: scalar + required: no + in_range: + desc: "Constrains a property or parameter to a value in range of (inclusive) the two values declared. +" + type: seq +# length: 2 + sequence: + - type: scalar + required: no + valid_values: + desc: "Constrains a property or parameter to a value that is in the list of declared values" + type: seq + sequence: + - type: scalar + required: no + length: + desc: "Constrains the property or parameter to a value of a given length." + type: int + required: no + min_length: + desc: "Constrains the property or parameter to a value to a minimum length" + type: scalar + required: no + max_length: + desc: "Constrains the property or parameter to a value to a maximum length" + type: scalar + required: no + pattern: + desc: "Constrains the property or parameter to a value that is allowed by the provided regular expression." + type: str + required: no + +# section A.5.3 property_filter_definition +# it is a constraints sequence that gets attached to a property .. +_property_filter_definition: &property_filter_definition + name: property_filter_definition + type: map + mapping: + =: + *constraints_sequence + +#section A.5.4 node_filter_definition +_node_filter_definition: &node_filter_definition + type: map + name: node_filter_definition + mapping: + properties: + desc: "property names to constraints to be applied to those properties" + required: no + type: seq + sequence: + - *property_filter_definition +# - type: map +# mapping: +# =: +# *constraints_sequence + capabilities: + desc: "" + required: no + type: seq + sequence: + - type: map + name: node_filter_capabilities_sequence + desc: "the key is a capability name or type" + mapping: + =: + name: node_filter_capabilities_entry + type: map + mapping: + properties: + desc: "the capability properties and their constraints" + name: node_filter_capabilities_properties + type: seq + sequence: + - type: map + name: node_filter_capabilities_property + mapping: + =: *constraints_sequence + +#used in property and attribute definitions +_entry_schema_definition: &entry_schema_definition + desc: "The optional key that is used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map" + name: entry_schema_definition + required: no + type: map + short: type + mapping: + "type": + desc: "collection element type" + required: yes + type: str + description: + required: no + type: str + constraints: + *constraints_sequence + +# see section A.5.5 +_artifact_definition: &artifact_definition + type: map + name: artifact_definition + short: implementation # assumes type can be inferred .. + mapping: + "type": + desc: "The required artifact type for the artifact definition" + required: yes + type: str + description: + desc: "The optional description for the artifact definition" + required: no + type: str + implementation: + desc: "The optional URI string (relative or absolute) which can be used to locate the artifacts file. +" + required: no + type: str + repository: + desc: "The optional name of the repository definition which contains the location of the external repository that contains the artifact" + required: no + type: str + deploy_path: + desc: "The file path the associated file would be deployed into within the target nodes container." + required: no + type: str + +# see section A.5.6 +_repository_definition: &repository_definition + type: map + name: repository_definition + short: url + mapping: + description: + desc: "The optional description for the repository." + required: no + type: str + url: + desc: "The required URL or network address used to access the repository" + required: yes + type: str + credential: + desc: "The optional Credential used to authorize access to the repository" + required: no + type: str + +#see section 3.5.7 +_import_definition: &import_definition + type: map + name: import_definition + short: file + mapping: + file: + desc: "file URI" + required: yes + type: str + repository: + desc: "symbolic name of the repository definition where the imported file can be found" + required: no + type: str + namespace_uri: + desc: "namespace URI to that will be applied to type definitions found within the imported file" + required: no + type: str + namespace_prefix: + desc: "optional namespace prefix (alias) that will be used to indicate the namespace_uri when forming a qualified name (i.e., qname) when referencing type definitions from the imported" + required: no + type: str + +#see section A.5.7 +_property_definition: &property_definition + type: map + name: property_definition + mapping: + "type": + type: str + required: yes +#not as easy, it can be an user defined data type +# <<: *primitive_types + description: + type: str + required: no + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + required: no + <<: *constraints_sequence + default: + type: any + required: no + "required": + type: bool + required: no + status: + type: str + required: no + <<: *status_values + entry_schema: + <<: *entry_schema_definition +# desc: "used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map." +# type: str +# required: no + +#see section A.5.8 +#_property_assignment_definition: &property_assignment_definition + +#see A.5.9 +_attribute_definition: &attribute_definition + type: map + name: attribute_definition + mapping: + "type": + type: str + required: yes +# <<: *primitive_types + description: + type: str + required: no + default: + type: any + required: no + status: + desc: "The optional status of the attribute relative to the specification or implementation" + type: str + required: no + <<: *status_values + entry_schema: + <<: *entry_schema_definition + +#see section A.5.10 +#here again, we must support the short form which is the most common +_attribute_assignment_definition: &attribute_assignment_definition + type: map + name: attribute_assignment_definition + mapping: + description: + desc: "The optional description of the attribute." + required: no + type: str + value: +#actually 'value | value_expression' + desc: "represent the type-compatible value to assign to the named attribute. Attribute values may be provided as the result from the evaluation of an expression or a function" + required: yes + type: any + + +# see spec section A.5.11 + +# see spec section A.5.11.1: variant to be used in node or relationship type definitions +_type_operation_definition: &type_operation_definition + type: map + name: type_operation_definition + short: implementation + mapping: + description: + desc: "The optional description string for the associated named operation." + required: no + type: str + implementation: + desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)" + required: no + type: str + inputs: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a input value" + name: property_assignment + type: any + +# from A.5.11.2 +_template_operation_definition: &template_operation_definition + type: map + name: template_operation_definition + short: implementation + mapping: + description: + desc: "The optional description string for the associated named operation." + required: no + type: str + implementation: + desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)" + name: template_operation_implementation_definition + required: no + short: primary + type: map + mapping: + primary: + desc: "The optional implementation artifact name (e.g., the primary script file name within a TOSCA CSAR file). " + required: no + type: str + dependencies: + desc: "The optional list of one or more dependent or secondary implementation artifact name which are referenced by the primary implementation artifact (e.g., a library the script installs or a secondary script)" + required: no + type: seq + sequence: + - type: str + inputs: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a input value" + name: property_assignment + type: any + + +# see section A.5.12, specifically A.5.12.2.1 : definition to be used in node or relationship type definition +_type_interface_definition: &type_interface_definition + type: map + name: type_interface_definition + mapping: + "type": + desc: "represents the required name of the Interface Type for the interface definition +" + required: yes + type: str + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + *property_definition + =: + *type_operation_definition + +# see section A.5.12.2.2, extended notation to be used in node or relationship template definitions +_template_interface_definition: &template_interface_definition + type: map + name: template_interface_definition + mapping: + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + =: + *template_operation_definition + + +# A.6 section: type specific definitions + +# see section A.6.1 +_capability_definition: &capability_definition + type: map + name: capability_definition + short: type + mapping: + "type": + desc: "The required name of the Capability Type the capability definition is based upon" + required: yes + type: str + description: + desc: "The optional description of the Capability definition" + required: no + type: str + properties: + desc: "" + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of property definitions for the Capability definition" + required: no + type: map + mapping: + =: + *attribute_definition + valid_source_types: + desc: "" + required: no + type: seq + sequence: + - type: str + occurrences: + desc: "The optional minimum and maximum occurrences for the capability." + required: no + <<: *range_definition + +# see section A.6.2 +# +_requirement_definition: &requirement_definition + type: map + name: requirement_definition + short: capability #as per A.6.2.2.1 + mapping: + capability: + desc: "The required reserved keyname used that can be used to provide the name of a valid Capability Type that can fulfil the requirement" + required: yes + type: str + node: + desc: "The optional reserved keyname used to provide the name of a valid Node Type that contains the capability definition that can be used to fulfil the requirement. " + required: no + type: str + relationship: +# and from section A.6.2.1, this one is an oddball + desc: "The optional reserved keyname used to provide the name of a valid Relationship Type to construct when fulfilling the requirement." + required: no + name: requirement_relationship_definition + short: type + type: map + mapping: + type: + desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement definitions relationship keyname. +" + required: yes + type: str + interfaces: + #not clear which interface definition is to be used here + desc: "allows augmentation (additional properties and operations) of the interfaces defined by the relationship type indicated above" + required: no + type: map + mapping: + =: + *type_interface_definition + occurrences: + desc: "The optional minimum and maximum occurrences for the requirement." + required: no + <<: *range_definition + +# see section A.6.3 +_artifact_type_definition: &artifact_type_definition + type: map + name: artifact_type_definition + mapping: + derived_from: + desc: "An optional parent Artifact Type name the Artifact Type derives from" + required: no + type: str + description: + desc: "An optional description for the Artifact Type." + required: no + type: str + mime_type: + desc: "The required mime type property for the Artifact Type." + required: no + type: str + file_ext: + desc: "The required file extension property for the Artifact Type" + required: no + type: seq + sequence: + - type: str + properties: + desc: "An optional list of property definitions for the Artifact Type" + required: no + type: map + mapping: + =: + *property_definition + +#see spec section #A.6.4 +_interface_type_definition: &interface_type_definition + type: map + name: interface_type_definition + mapping: + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + type: str + desc: "property_name to property_value(_expression) mapping" + =: + *type_operation_definition + +# A.6.5 +_data_type_definition: &data_type_definition + type: map + name: data_type_definition + mapping: + derived_from: + desc: "The optional key used when a datatype is derived from an existing TOSCA Data Type. +" + required: no + type: str + description: + desc: "The optional description for the Data Type. +" + required: no + type: str + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + <<: *constraints_sequence + properties: + desc: "The optional list property definitions that comprise the schema for a complex Data Type in TOSCA" + type: map + mapping: + =: + *property_definition + +# see section A.6.6 +_capability_type_definition: &capability_type_definition + type: map + name: capability_type_definition + mapping: + derived_from: + desc: "An optional parent capability type name this new Capability Type derives from." + required: no + type: str + description: + desc: "An optional description for the Capability Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Capability Type." + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Capability Type" + required: no + type: map + mapping: + =: + *attribute_definition + valid_source_types: + desc: "An optional list of one or more valid names of Node Types that are supported as valid sources of any relationship established to the declared Capability Type" + required: no + type: seq + sequence: + - type: str + +# section A.6.7 requirement definition: TOSCA YAML profile relies on capability types to +# define requirements + +# see section A.6.9 +_relationship_type_definition: &relationship_type_definition + type: map + name: relationship_type_definition + mapping: + derived_from: + desc: "An optional parent Relationship Type name the Relationship Type derives from" + required: no + type: str + description: + desc: "An optional description for the Relationship Type." + required: no + type: str + properties: + desc: "An optional list of property definitions for the Relationship Type" + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Relationship Type" + required: no + type: map + mapping: + =: + *attribute_definition + interfaces: + desc: "An optional list of interface definitions interfaces supported by the Relationship Type" + required: no + type: map + mapping: + =: + *type_interface_definition + valid_target_types: + desc: "An optional list of one or more names of Capability Types that are valid targets for this relationship. " + required: no + type: seq + sequence: + - type: str + +#see section 3.6.10 +_group_type_definition: &group_type_definition + type: map + name: group_type_definition + mapping: + derived_from: + desc: "An optional parent Group Type name this new Group Type derives from" + required: no + type: str + version: + desc: "An optional version for the Group Type definition" + required: no + type: str + description: + desc: "An optional description for the Group Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Group Type." + required: no + type: map + mapping: + =: + *property_definition + targets: + desc: "An optional list of one or more names of Node Types that are valid +(allowed) as members of the Group Type." + required: no + type: seq + sequence: + - type: str + interfaces: + desc: "An optional list of interface definitions supported by the Group Type" + required: no + type: map + mapping: + =: + *type_interface_definition + +#see section 3.6.11 +_policy_type_definition: &policy_type_definition + type: map + name: policy_type_definition + mapping: + derived_from: + desc: "An optional parent Policy Type name this new Policy Type derives from" + required: no + type: str + version: + desc: "An optional version for the Policy Type definition" + required: no + type: str + description: + desc: "An optional description for the Policy Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Policy Type." + required: no + type: map + mapping: + =: + *property_definition + targets: + desc: "An optional list of valid Node Types or Group Types the Policy Type +can be applied to" + required: no + type: seq + sequence: + - type: str + +# see section A.6.8 +_node_type_definition: &node_type_definition + type: map + name: node_type_definition + mapping: + derived_from: + desc: "An optional parent Node Type name this new Node Type derives from" + required: no + type: str + description: + desc: "An optional description for the Node Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Node Type." + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Node Type. +" + required: no + type: map + mapping: + =: + *attribute_definition + requirements: + desc: "An optional sequenced list of requirement definitions for the Node Type. +" + required: no + type: seq + sequence: + - type: map + mapping: + =: + *requirement_definition + capabilities: + desc: "An optional list of capability definitions for the Node Type" + required: no + type: map + mapping: + =: + *capability_definition + interfaces: + desc: "" + required: no + type: map + mapping: + =: + *type_interface_definition + artifacts: + desc: "An optional list of named artifact definitions for the Node Type" + required: no + type: map + mapping: + =: + *artifact_definition + +# A.7 Template specific definitions + +# see section A.7.1 +_capability_assignment_definition: &capability_assignment_definition + type: map + name: capability_assignment_definition + mapping: + properties: + # list of property assignments + desc: "An optional list of property definitions for the Capability definition" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + attributes: + # list of attribute assignments + desc: "An optional list of attribute definitions for the Capability definition" + required: no + type: map + mapping: + =: + desc: "" + name: attribute_assignment + type: any + +# see section A.7.2 +_requirement_assignment_definition: &requirement_assignment_definition + type: map + name: requirement_assignment_definition + short: node + mapping: + capability: + desc: " used to provide the name of either a: Capability definition within a target node template that can fulfill the requirement or Capability Type that the provider will use to select a type-compatible target node template to fulfill the requirement at runtime." + required: no + type: str + node: +#why is this a reference to a node type and not to a node template?? + desc: "used to identify the target node of a relationship: Node Template name that can fulfil the target node requirement or Node Type name that the provider will use to select a type-compatible node template to fulfil the requirement at runtime" + required: no + type: str + relationship: + desc: "" + required: no +#fins a better name name: relationship_definition + type: map + short: type + mapping: + "type": + desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement assignments relationship keyname" + required: no + type: str + properties: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + interfaces: + desc: "from A.5.12.2.2, right?" + required: no + type: map + mapping: + =: + *template_interface_definition + node_filter: + desc: "The optional filter definition that TOSCA orchestrators or providers would use to select a type-compatible target node that can fulfill the associated abstract requirement at runtime." + required: no + <<: *node_filter_definition + +# see section A.7.3 +_node_template_definition: &node_template_definition + type: map + name: node_template_definition + mapping: + "type": + desc: "The required name of the Node Type the Node Template is based upon" + required: yes + type: str + description: + desc: "An optional description for the Node Template" + required: no + type: str + directives: + desc: "An optional list of directive values to provide processing instructions to orchestrators and tooling." + required: no + type: seq + sequence: + - type: str + properties: +#custom check needs to be added: the value or expression providing the property value +#needs to be compatible with the property definition + desc: "An optional list of property value assignments for the Node Template." + required: no + type: map + mapping: + =: + type: any + name: property_assignment + desc: "a property value or an expression providing a property value" + attributes: + desc: "An optional list of attribute value assignments for the Node Template" + required: no + type: map + mapping: + =: + *attribute_assignment_definition + requirements: + desc: "An optional sequenced list of requirement assignments for the Node Template." + required: no + type: seq + sequence: + - type: map + mapping: + =: + *requirement_assignment_definition + capabilities: + desc: "An optional list of capability assignments for the Node Template." + required: no + type: map + mapping: + =: + *capability_assignment_definition + interfaces: + desc: "An optional list of named interface definitions for the Node Template" + required: no + type: map + mapping: + =: + *template_interface_definition + artifacts: + desc: "An optional list of named artifact definitions for the Node Template. +" + required: no + type: map + mapping: + =: + *artifact_definition + node_filter: + desc: "The optional filter definition that TOSCA orchestrators would use to select the correct target node. This keyname is only valid if the directive has the value of 'selectable' set." + required: no + <<: *node_filter_definition + copy: + desc: "The optional (symbolic) name of another node template to copy into (all keynames and values) and use as a basis for this node template." + required: no + type: str + +# see section A.7.4 +_relationship_template_definition: &relationship_template_definition + type: map + name: relationship_template_definition + mapping: + "type": + desc: "The required name of the Relationship Type the Relationship Template is based upon" + required: yes + type: str + alias: + desc: "The optional name of a different Relationship Template definition whose values are (effectively) copied into the definition for this Relationship Template (prior to any other overrides)." + required: no + type: str + description: + desc: "An optional description for the Relationship Template" + required: no + type: str + properties: + desc: "An optional list of property assignments for the Relationship Template." + required: no + name: properties_assignment_validation + type: map + mapping: + =: + type: any +#scalar + desc: "an expression providing a property value" + attributes: + desc: "An optional list of attribute value assignments for the Relationship Template" + required: no + name: attributes_assignment_validation + type: map + mapping: + =: + type: scalar + desc: "an expression providing an attribute value" + interfaces: + desc: "An optional list of named interface definitions for the Relationship Template ('augmentation' is allowed here)" + required: no + type: map + mapping: + =: + *template_interface_definition + copy: + desc: "The optional (symbolic) name of another relationship template to copy into (all keynames and values) and use as a basis for this relationship template." + required: no + type: str + + +# see section 3.7.5 +_group_definition: &group_definition + type: map + name: group_definition + mapping: + "type": + desc: "The required name of the group type the group definition is based upon" + required: yes + type: str + description: + desc: "The optional description for the group definition" + required: no + properties: + desc: " represents the optional list of property assignments for the group definition that provide values for properties defined in its declared Group Type" + required: no + type: map + mapping: + =: + type: any + name: property_assignment + targets: + desc: "contains the required list of one or more node template names (within the same topology template) that are members of this logical group" + required: yes + type: seq + sequence: + - type: str + interfaces: + desc: "represents the optional list of interface definitions for the group definition that augment those provided by its declared Group Type" + required: no + type: map + mapping: + =: + *template_interface_definition + +# see section 3.7.6 +_policy_template_definition: &policy_template_definition + type: map + name: policy_definition + mapping: + "type": + desc: "The required name of the policy type the policy definition is based upon" + required: yes + type: str + description: + desc: "The optional description for the policy definition" + required: no + properties: + desc: "represents the optional list of property assignments for the policy definition that provide values for properties defined in its declared Policy Type" + required: no + type: map + mapping: + =: + type: any + name: property_assignment + targets: + desc: "represents the optional list of names of node templates or groups that the policy is to applied to" + required: no + type: seq + sequence: + - type: str + +# see section 3.8 Topology Template definition: defines the topology template of a cloud application. +# described as a a reusable grammar as it can be a part of a service template definition +_topology_template_definition: &topology_template_definition + type: map + name: topology_template_definition + mapping: + description: + desc: "a description of the topology template" + required: no + type: str + inputs: + desc: "definition of input parameters for the topology template" + name: inputs + required: no + type: map + mapping: + =: + *property_definition + node_templates: + desc: "definition of the node templates of the topology" + name: node_templates + required: no + type: map + mapping: + =: + *node_template_definition + relationship_templates: + desc: "definition of the relationship templates of the topology" + required: no + name: relationship_templates + type: map + mapping: + =: + *relationship_template_definition + outputs: + desc: "definition of output parameters for the topology template" + name: outputs + required: no + type: map + mapping: + =: + *attribute_assignment_definition + groups: + desc: "An optional list of Group definitions whose members are node templates defined within this same Topology Template" + name: groups + required: no + type: map + mapping: + =: + *group_definition + policies: + # see 8.2.3, initially the list is not described as sequenced but then the grammar shows it as such !? + desc: "An optional sequenced?? list of Policy definitions for the Topology Template." + name: policies + required: no + type: seq + sequence: + - type: map + mapping: + =: + *policy_template_definition + substitution_mappings: +# one possible short-coming that is visible here is that the definition of the capability +# and requirements mappings are given in the spec only with the short/inline version of a +# YAML list/sequence, which cannot be enforced here .. + desc: " a description of the topology template" + name: substitution_mappings + required: no + type: map + mapping: + node_type: + desc: "node type name" + required: yes + type: str + capabilities: + desc: "map_of_capability_mappings_to_expose" + type: map + mapping: + =: + type: seq + sequence: + - type: str + requirements: + desc: "map_of_requirement_mapping_to_expose" + type: map + mapping: + =: + type: seq + sequence: + - type: str + + +# see A.9 Service Template definition: A TOSCA Service Template (YAML) document contains +# element definitions of building blocks for cloud application, or complete models of cloud applications. + +type: map +name: service_template_definition +mapping: + tosca_definitions_version: + desc: "Required TOSCA Definitions version string" + required: yes + type: str + enum: [tosca_simple_yaml_1_0_0] + + tosca_default_namespace: + desc: "Optional. default namespace (for type schema)" + required: no + type: str + + metadata: + desc: "Optional metadata keyname: value pairs" + name: metadata + required: no + type: map + mapping: + template_name: + desc: "Optional name of this service template" + required: no + type: str + template_author: + desc: "Optional author of this service template" + required: no + type: str + template_version: + desc: "Optional version of this service template" + required: no + type: str + =: + desc: "User defined entry" + required: no + type: str + +#to add, the spec says: "Optional list of domain or profile specific metadata keynames" + + description: + desc: "Optional description of the definitions inside the file" + required: no + type: str + + imports: + desc: "ordered list of import statements for importing other definitions files" + name: imports + required: no + type: seq + sequence: + - type: map + mapping: + =: + *import_definition + + dsl_definitions: + desc: "list of YAML alias anchors (or macros)" + name: dsl_definitions + required: no + type: map + mapping: + =: + desc: "some piece of valid yaml that makes the anchor/alias definition" + type: any + required: no + + repositories: + desc: "list of external repository definitions which host TOSCA artifacts" + name: repositories + required: no + type: map + mapping: + =: + *repository_definition + + data_types: + desc: "list of TOSCA datatype definitions" + name: data_types + required: no + type: map + mapping: + =: + *data_type_definition + + node_types: + desc: "list of node type definitions" + name: node_types + required: no + type: map + mapping: + =: + *node_type_definition + + capability_types: + desc: "list of capability type definitions" + name: capability_types + required: no + type: map + mapping: + =: + *capability_type_definition + + relationship_types: + desc: "list of relationship type definitions" + name: relationship_types + required: no + type: map + mapping: + =: + *relationship_type_definition + + artifact_types: + desc: "list of artifact type definitions" + name: artifact_types + required: no + type: map + mapping: + =: + *artifact_type_definition + + interface_types: + desc: "list of interface type definitions" + name: interface_types + required: no + type: map + mapping: + =: + *interface_type_definition + + group_types: + desc: "list of group type definitions" + name: group_types + required: no + type: map + mapping: + =: + *group_type_definition + + policy_types: + desc: "list of policy type definitions" + name: policy_types + required: no + type: map + mapping: + =: + *policy_type_definition + + topology_template: + desc: "topology template definition of the cloud application or service" + required: no + <<: *topology_template_definition diff --git a/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar b/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar new file mode 100644 index 0000000..e0645d5 --- /dev/null +++ b/dcaedt_validator/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar @@ -0,0 +1,1583 @@ +_status_values: &status_values + enum: + - supported + - unsupported + - experimental + - deprecated + +#I do not know that the lists and maps qualify as 'primitive' .. +_primitive_types: &primitive_types + enum: [string,integer,float,boolean,timestamp,list,map,version,range,scalar-unit.size,scalar_unit.frequency,scalar_unit.time] + +#needs custom validation as we have to make sure there are 2 elements and allow for the +#UNBOUNDED keyword as second element +_range_definition: &range_definition + desc: "Section used to declare additional metadata information" + required: no + type: seq + name: range_definition + sequence: + - type: scalar + +_version_definition: &version_definition + desc: "An optional TOSCA version number" + required: no + type: str + name: version_definition +# pattern: <major_version>.<minor_version>[.<fix_version>[.<qualifier>[-<build_version] ] ] + +#common metadata defintion syntax +_metadata_definition: &metadata_definition + desc: "Section used to declare additional metadata information" + required: no + type: map + mapping: + =: + type: str + +#see A.5.2 +#this is where the need of verifying the size of a collection (sequence/map) came from +#this is specified as a sequence where each entry is a map with one entry?? +_constraints_sequence: &constraints_sequence + name: constraints_sequence + short: 0 + type: seq + sequence: + - type: map +# length: 1 + mapping: + equal: + desc: "Constrains a property or parameter to a value equal to the value declared." + type: any + required: no + greater_than: + desc: "Constrains a property or parameter to a value greater than the value declared" + type: scalar + required: no + greater_or_equal: + desc: "Constrains a property or parameter to a value greater than or equal to the value declared." + type: scalar + required: no + less_than: + desc: "Constrains a property or parameter to a value less than the value declared" + type: scalar + required: no + less_or_equal: + desc: "Constrains a property or parameter to a value less than or equal to the value declared." + type: scalar + required: no + in_range: + desc: "Constrains a property or parameter to a value in range of (inclusive) the two values declared." + type: seq +# length: 2 + sequence: + - type: scalar + required: no + valid_values: + desc: "Constrains a property or parameter to a value that is in the list of declared values" + type: seq + sequence: + - type: scalar + required: no + length: + desc: "Constrains the property or parameter to a value of a given length." + type: int + required: no + min_length: + desc: "Constrains the property or parameter to a value to a minimum length" + type: scalar + required: no + max_length: + desc: "Constrains the property or parameter to a value to a maximum length" + type: scalar + required: no + pattern: + desc: "Constrains the property or parameter to a value that is allowed by the provided regular expression." + type: str + required: no + +# section A.5.3 property_filter_definition +# it is a constraints sequence that gets attached to a property .. +_property_filter_definition: &property_filter_definition + name: property_filter_definition + type: map + mapping: + =: + *constraints_sequence + +#section A.5.4 node_filter_definition +_node_filter_definition: &node_filter_definition + type: map + name: node_filter_definition + mapping: + properties: + desc: "property names to constraints to be applied to those properties" + required: no + type: seq + sequence: + - *property_filter_definition +# - type: map +# mapping: +# =: +# *constraints_sequence + capabilities: + desc: "" + required: no + type: seq + sequence: + - type: map + name: node_filter_capabilities_sequence + desc: "the key is a capability name or type" + mapping: + =: + name: node_filter_capabilities_entry + type: map + mapping: + properties: + desc: "the capability properties and their constraints" + name: node_filter_capabilities_properties + type: seq + sequence: + - type: map + name: node_filter_capabilities_property + mapping: + =: *constraints_sequence + +#used in property and attribute definitions +_entry_schema_definition: &entry_schema_definition + desc: "The optional key that is used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map" + name: entry_schema_definition + required: no + type: map + short: type + mapping: + "type": + desc: "collection element type" + required: yes + type: str + description: + required: no + type: str + constraints: + *constraints_sequence + +# see section A.5.5 +_artifact_definition: &artifact_definition + type: map + name: artifact_definition + short: implementation # assumes type can be inferred .. + mapping: + "type": + desc: "The required artifact type for the artifact definition" + required: yes + type: str + description: + desc: "The optional description for the artifact definition" + required: no + type: str + file: + desc: "The optional URI string (relative or absolute) which can be used to locate the artifacts file." + required: no + type: str + repository: + desc: "The optional name of the repository definition which contains the location of the external repository that contains the artifact" + required: no + type: str + deploy_path: + desc: "The file path the associated file would be deployed into within the target nodes container." + required: no + type: str + +# see section 3.5.5 +_repository_definition: &repository_definition + type: map + name: repository_definition + short: url + mapping: + description: + desc: "The optional description for the repository." + required: no + type: str + url: + desc: "The required URL or network address used to access the repository" + required: yes + type: str + credential: + desc: "The optional Credential used to authorize access to the repository" + required: no + type: str + +#see section 3.5.7 +_import_definition: &import_definition + type: map + name: import_definition + short: file + mapping: + file: + desc: "file URI" + required: yes + type: str + repository: + desc: "symbolic name of the repository definition where the imported file can be found" + required: no + type: str + namespace_uri: + desc: "namespace URI to that will be applied to type definitions found within the imported file" + required: no + type: str + namespace_prefix: + desc: "optional namespace prefix (alias) that will be used to indicate the namespace_uri when forming a qualified name (i.e., qname) when referencing type definitions from the imported" + required: no + type: str + +#see section 3.5.8 +_property_definition: &property_definition + type: map + name: property_definition + mapping: + "type": + type: str + required: yes +#not as easy, it can be an user defined data type +# <<: *primitive_types + description: + type: str + required: no + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + required: no + <<: *constraints_sequence + default: + type: any + required: no + "required": + type: bool + required: no + status: + type: str + required: no + <<: *status_values + entry_schema: + <<: *entry_schema_definition +# desc: "used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map." +# type: str +# required: no + +#see section A.5.8 +#_property_assignment_definition: &property_assignment_definition + +#see 3.5.10 +_attribute_definition: &attribute_definition + type: map + name: attribute_definition + mapping: + "type": + type: str + required: yes +# <<: *primitive_types + description: + type: str + required: no + default: + type: any + required: no + status: + desc: "The optional status of the attribute relative to the specification or implementation" + type: str + required: no + <<: *status_values + entry_schema: + <<: *entry_schema_definition + +#see section 3.5.11 +#here again, we must support the short form which is the most common +_attribute_assignment_definition: &attribute_assignment_definition + type: map + name: attribute_assignment_definition + mapping: + description: + desc: "The optional description of the attribute." + required: no + type: str + value: +#actually 'value | value_expression' + desc: "represent the type-compatible value to assign to the named attribute. Attribute values may be provided as the result from the evaluation of an expression or a function" + required: yes + type: any + + +#see 3.5.12 +_parameter_definition: ¶meter_definition + type: map + name: parameter_definition + mapping: + "type": + type: str + required: no +#not as easy, it can be an user defined data type +# <<: *primitive_types + description: + type: str + required: no + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + required: no + <<: *constraints_sequence + default: + type: any + required: no + "required": + type: bool + required: no + status: + type: str + required: no + <<: *status_values + value: + desc: "represent the type-compatible value to assign to the named parameter. Parameter values may be provided as the result from the evaluation of an expression or a function" + required: yes + type: any + entry_schema: + <<: *entry_schema_definition + + + + +# see spec section 3.5.13 + +# see spec section 3.5.13.2: variant to be used in node or relationship type definitions +_type_operation_definition: &type_operation_definition + type: map + name: type_operation_definition + short: implementation + mapping: + description: + desc: "The optional description string for the associated named operation." + required: no + type: str + implementation: + desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)" + required: no + type: str + inputs: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a input value" + name: property_assignment + type: any + +# from 3.5.13.2.3 +_template_operation_definition: &template_operation_definition + type: map + name: template_operation_definition + short: implementation + mapping: + description: + desc: "The optional description string for the associated named operation." + required: no + type: str + implementation: + desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)" + name: template_operation_implementation_definition + required: no + short: primary + type: map + mapping: + primary: + desc: "The optional implementation artifact name (e.g., the primary script file name within a TOSCA CSAR file). " + required: no + type: str + dependencies: + desc: "The optional list of one or more dependent or secondary implementation artifact name which are referenced by the primary implementation artifact (e.g., a library the script installs or a secondary script)" + required: no + type: seq + sequence: + - type: str + inputs: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a input value" + name: property_assignment + type: any + + +# see section 3.5.14, specifically 3.5.14.2.1 : definition to be used in node or relationship type definition +_type_interface_definition: &type_interface_definition + type: map + name: type_interface_definition + mapping: + "type": + desc: "represents the required name of the Interface Type for the interface definition" + required: yes + type: str + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + *property_definition + =: + *type_operation_definition + +# see section 3.5.14.2.2, extended notation to be used in node or relationship template definitions +_template_interface_definition: &template_interface_definition + type: map + name: template_interface_definition + mapping: + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + =: + *template_operation_definition + +# see section 3.5.15 +_event_filter_definition: &event_filter_definition + type: map + name: event_filter_definition + mapping: + node: + desc: "The required name of the node type or template that contains either the attribute to be monitored or contains the requirement that references the node that contains the attribute to be monitored" + required: yes + type: str + requirement: + desc: "The optional name of the requirement within the filter’s node that can be used to locate a referenced node that contains an attribute to monitor." + required: no + type: str + capability: + desc: "The optional name of a capability within the filter’s node or within the node referenced by its requirement that contains the attribute to monitor." + required: no + type: str + +# see section 3.5.16 +# to be revised, based on working version +_trigger_definition: &trigger_definition + type: map + name: trigger_definition + mapping: + description: + desc: "" + required: no + type: str + event_type: + desc: "The required name of the event type that activates the trigger’s action." + required: yes + short: type + type: map + mapping: + type: + required: yes + type: str + schedule: + desc: "" + required: no + type: str + target_filter: + desc: "" + required: no + <<: *event_filter_definition +#the section made up of condition/constraint/period/evaluations/method has a mistmatching example in the spec doc .. + condition: + desc: "" + required: no + <<: *constraints_sequence + constraint: + desc: "" + required: no + <<: *constraints_sequence + period: + desc: "The optional period to use to evaluate for the condition" + required: no + type: int + evaluations: + desc: "" + required: no + type: int + method: + desc: "The optional statistical method name to use to perform the evaluation of the condition" + required: no + type: str + actions: + desc: "The if of the workflow to be invoked when the event is triggered and the condition is met (i.e, evaluates to true) OR the required operation to invoke when the event is triggered and the condition is met (i.e., evaluates to true)." + required: yes + type: str # or operation definition? + + +# see section 3.5.17 +# to be revised, based on working version +# example on 3.5.17.4 shows this as a sequence of maps, each with one entry @!? +_workflow_activity_definition: &workflow_activity_definition + type: map + name: workflow_activity_definition + mapping: + delegate: + desc: "" + required: no + type: str + set_state: + desc: "" + required: no + type: str + call_operation: + desc: "" + required: no + type: str + inline: + desc: "" + required: no + type: str + + +# see section 3.5.18 +# to be revised, based on working version +_workflow_assertion_definition: &workflow_assertion_definition + name: workflow_assertion_definition + type: map + mapping: + =: + *constraints_sequence + + +# see section 3.5.19 +# to be revised, based on working version +# it employs a recursive definition which yaml allows but teh parsers we've tried complain about, i.e the pattern +# foo: &foo +# bar: *foo +# is theoretically allowed in YAML +_workflow_condition_clause_definition: &workflow_condition_clause_definition + name: workflow_condition_clause_definition + type: seq + sequence: + - type: map + name: workflow_condition_clause_entry + # here too length=1 would be good as a map can have only one entry + mapping: + assert: + <<: *workflow_assertion_definition + and: + # recursive definition ! yaml parser cannot deal with it + # <<: *workflow_condition_clause_definition + type: seq + sequence: + -type: map + or: + # recursive definition ! yaml parser cannot deal with it + # <<: *workflow_condition_clause_definition + type: seq + sequence: + -type: map + + + +# see section 3.5.20 +# to be revised, based on working version +# here too, not clear if this a map or a sequence .. there is no example +_workflow_precondition_definition: &workflow_precondition_definition + name: workflow_precondition_definition + type: map + mapping: + target: + required: yes + type: str + target_relationship: + required: no + type: str + condition: + desc: "list of condition clause definition .. !@#$%" + <<: *workflow_condition_clause_definition + + +# see section 3.5.21 +# to be revised, based on working version +_workflow_step_definition: &workflow_step_definition + name: workflow_step_definition + desc: "A workflow step allows to define one or multiple sequenced activities in a workflow and how they are connected to other steps in the workflow. They are the building blocks of a declarative workflow." + type: map + mapping: + target: + required: yes + type: str + desc: "The target of the step (this can be a node template name, a group name)" + target_relationship: + required: no + type: str + operation_host: + required: no + type: str + filter: + desc: "Filter is a map of attribute name, list of constraint clause that allows to provide a filtering logic" + type: map + mapping: + =: + *constraints_sequence + activities: + desc: "" + type: seq + sequence: + <<: *workflow_activity_definition + on_success: + desc: "The optional list of step names to be performed after this one has been completed with success (all activities has been correctly processed)." + type: seq + sequence: + - type: str + on_failure: + desc: "The optional list of step names to be called after this one in case one of the step activity failed." + type: seq + sequence: + - type: str + + + + +# 3.6 section: type specific definitions + +# see section 3.6.1 +_capability_definition: &capability_definition + type: map + name: capability_definition + short: type + mapping: + "type": + desc: "The required name of the Capability Type the capability definition is based upon" + required: yes + type: str + description: + desc: "The optional description of the Capability definition" + required: no + type: str + properties: + desc: "" + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of property definitions for the Capability definition" + required: no + type: map + mapping: + =: + *attribute_definition + valid_source_types: + desc: "" + required: no + type: seq + sequence: + - type: str + occurrences: + desc: "The optional minimum and maximum occurrences for the capability." + required: no + <<: *range_definition + + + + +# see section A.6.2 +# +_requirement_definition: &requirement_definition + type: map + name: requirement_definition + short: capability #as per A.6.2.2.1 + mapping: + capability: + desc: "The required reserved keyname used that can be used to provide the name of a valid Capability Type that can fulfil the requirement" + required: yes + type: str + node: + desc: "The optional reserved keyname used to provide the name of a valid Node Type that contains the capability definition that can be used to fulfil the requirement. " + required: no + type: str + relationship: +# and from section A.6.2.1, this one is an oddball + desc: "The optional reserved keyname used to provide the name of a valid Relationship Type to construct when fulfilling the requirement." + required: no + name: requirement_relationship_definition + short: type + type: map + mapping: + type: + desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement definitions relationship keyname. +" + required: yes + type: str + interfaces: + #not clear which interface definition is to be used here + desc: "allows augmentation (additional properties and operations) of the interfaces defined by the relationship type indicated above" + required: no + type: map + mapping: + =: + *type_interface_definition + occurrences: + desc: "The optional minimum and maximum occurrences for the requirement." + required: no + <<: *range_definition + +# see section 3.6.4 +_artifact_type_definition: &artifact_type_definition + type: map + name: artifact_type_definition + mapping: + derived_from: + desc: "An optional parent Artifact Type name the Artifact Type derives from" + required: no + type: str + version: + <<: *version_definition + metadata: + <<: *metadata_definition + description: + desc: "An optional description for the Artifact Type." + required: no + type: str + mime_type: + desc: "The required mime type property for the Artifact Type." + required: no + type: str + file_ext: + desc: "The required file extension property for the Artifact Type" + required: no + type: seq + sequence: + - type: str + properties: + desc: "An optional list of property definitions for the Artifact Type" + required: no + type: map + mapping: + =: + *property_definition + +#see spec section #3.6.5 +_interface_type_definition: &interface_type_definition + type: map + name: interface_type_definition + mapping: + derived_from: + desc: "The name of the Interface Type this Interface Type definition derives from" + required: no + type: str + description: + desc: "The optional description for the Interface Type." + required: no + type: str + version: + <<: *version_definition + metadata: + <<: *metadata_definition + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + type: str + desc: "property_name to property_value(_expression) mapping" + =: + *type_operation_definition + +#see spec section #3.6.6 +_data_type_definition: &data_type_definition + type: map + name: data_type_definition + mapping: + derived_from: + desc: "The optional key used when a datatype is derived from an existing TOSCA Data Type." + required: no + type: str + description: + desc: "The optional description for the Data Type." + required: no + type: str + version: + <<: *version_definition + metadata: + <<: *metadata_definition + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + <<: *constraints_sequence + properties: + desc: "The optional list property definitions that comprise the schema for a complex Data Type in TOSCA" + type: map + mapping: + =: + *property_definition + +# see section A.6.6 +_capability_type_definition: &capability_type_definition + type: map + name: capability_type_definition + mapping: + derived_from: + desc: "An optional parent capability type name this new Capability Type derives from." + required: no + type: str + description: + desc: "An optional description for the Capability Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Capability Type." + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Capability Type" + required: no + type: map + mapping: + =: + *attribute_definition + valid_source_types: + desc: "An optional list of one or more valid names of Node Types that are supported as valid sources of any relationship established to the declared Capability Type" + required: no + type: seq + sequence: + - type: str + +# section A.6.7 requirement definition: TOSCA YAML profile relies on capability types to +# define requirements + +# see section #3.6.10 +_relationship_type_definition: &relationship_type_definition + type: map + name: relationship_type_definition + mapping: + derived_from: + desc: "An optional parent Relationship Type name the Relationship Type derives from" + required: no + type: str + description: + desc: "An optional description for the Relationship Type." + required: no + type: str + version: + <<: *version_definition + metadata: + <<: *metadata_definition + properties: + desc: "An optional list of property definitions for the Relationship Type" + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Relationship Type" + required: no + type: map + mapping: + =: + *attribute_definition + interfaces: + desc: "An optional list of interface definitions interfaces supported by the Relationship Type" + required: no + type: map + mapping: + =: + *type_interface_definition + valid_target_types: + desc: "An optional list of one or more names of Capability Types that are valid targets for this relationship. " + required: no + type: seq + sequence: + - type: str + +#see section 3.6.11 +_group_type_definition: &group_type_definition + type: map + name: group_type_definition + mapping: + derived_from: + desc: "An optional parent Group Type name this new Group Type derives from" + required: no + type: str + description: + desc: "An optional description for the Group Type" + required: no + type: str + version: + <<: *version_definition + metadata: + <<: *metadata_definition + properties: + desc: "An optional list of property definitions for the Group Type." + required: no + type: map + mapping: + =: + *property_definition + targets: + desc: "An optional list of one or more names of Node Types that are valid +(allowed) as members of the Group Type." + required: no + type: seq + sequence: + - type: str + interfaces: + desc: "An optional list of interface definitions supported by the Group Type" + required: no + type: map + mapping: + =: + *type_interface_definition +#TODO: group types have gained capabilities and requirements + + +#see section 3.6.12 +_policy_type_definition: &policy_type_definition + type: map + name: policy_type_definition + mapping: + derived_from: + desc: "An optional parent Policy Type name this new Policy Type derives from" + required: no + type: str + description: + desc: "An optional description for the Policy Type" + required: no + type: str + version: + <<: *version_definition + metadata: + <<: *metadata_definition + properties: + desc: "An optional list of property definitions for the Policy Type." + required: no + type: map + mapping: + =: + *property_definition + targets: + desc: "An optional list of valid Node Types or Group Types the Policy Type +can be applied to" + required: no + type: seq + sequence: + - type: str +#TODO: +# triggers: + +# see section #3.6.9 +_node_type_definition: &node_type_definition + type: map + name: node_type_definition + mapping: + derived_from: + desc: "An optional parent Node Type name this new Node Type derives from" + required: no + type: str + description: + desc: "An optional description for the Node Type" + required: no + type: str + version: + <<: *version_definition + metadata: + <<: *metadata_definition + properties: + desc: "An optional list of property definitions for the Node Type." + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Node Type. +" + required: no + type: map + mapping: + =: + *attribute_definition + requirements: + desc: "An optional sequenced list of requirement definitions for the Node Type. +" + required: no + type: seq + sequence: + - type: map + mapping: + =: + *requirement_definition + capabilities: + desc: "An optional list of capability definitions for the Node Type" + required: no + type: map + mapping: + =: + *capability_definition + interfaces: + desc: "" + required: no + type: map + mapping: + =: + *type_interface_definition + artifacts: + desc: "An optional list of named artifact definitions for the Node Type" + required: no + type: map + mapping: + =: + *artifact_definition + +# A.7 Template specific definitions + +# see section A.7.1 +_capability_assignment_definition: &capability_assignment_definition + type: map + name: capability_assignment_definition + mapping: + properties: + # list of property assignments + desc: "An optional list of property definitions for the Capability definition" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + attributes: + # list of attribute assignments + desc: "An optional list of attribute definitions for the Capability definition" + required: no + type: map + mapping: + =: + desc: "" + name: attribute_assignment + type: any + +# see section A.7.2 +_requirement_assignment_definition: &requirement_assignment_definition + type: map + name: requirement_assignment_definition + short: node + mapping: + capability: + desc: " used to provide the name of either a: Capability definition within a target node template that can fulfill the requirement or Capability Type that the provider will use to select a type-compatible target node template to fulfill the requirement at runtime." + required: no + type: str + node: +#why is this a reference to a node type and not to a node template?? + desc: "used to identify the target node of a relationship: Node Template name that can fulfil the target node requirement or Node Type name that the provider will use to select a type-compatible node template to fulfil the requirement at runtime" + required: no + type: str + relationship: + desc: "" + required: no +#fins a better name name: relationship_definition + type: map + short: type + mapping: + "type": + desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement assignments relationship keyname" + required: no + type: str + properties: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + interfaces: + desc: "from A.5.12.2.2, right?" + required: no + type: map + mapping: + =: + *template_interface_definition + node_filter: + desc: "The optional filter definition that TOSCA orchestrators or providers would use to select a type-compatible target node that can fulfill the associated abstract requirement at runtime." + required: no + <<: *node_filter_definition + +# see section 3.7.3 +_node_template_definition: &node_template_definition + type: map + name: node_template_definition + mapping: + "type": + desc: "The required name of the Node Type the Node Template is based upon" + required: yes + type: str + description: + desc: "An optional description for the Node Template" + required: no + type: str + metadata: + <<: *metadata_definition + directives: + desc: "An optional list of directive values to provide processing instructions to orchestrators and tooling." + required: no + type: seq + sequence: + - type: str + properties: +#custom check needs to be added: the value or expression providing the property value +#needs to be compatible with the property definition + desc: "An optional list of property value assignments for the Node Template." + required: no + type: map + mapping: + =: + type: any + name: property_assignment + desc: "a property value or an expression providing a property value" + attributes: + desc: "An optional list of attribute value assignments for the Node Template" + required: no + type: map + mapping: + =: + *attribute_assignment_definition + requirements: + desc: "An optional sequenced list of requirement assignments for the Node Template." + required: no + type: seq + sequence: + - type: map + mapping: + =: + *requirement_assignment_definition + capabilities: + desc: "An optional list of capability assignments for the Node Template." + required: no + type: map + mapping: + =: + *capability_assignment_definition + interfaces: + desc: "An optional list of named interface definitions for the Node Template" + required: no + type: map + mapping: + =: + *template_interface_definition + artifacts: + desc: "An optional list of named artifact definitions for the Node Template. +" + required: no + type: map + mapping: + =: + *artifact_definition + node_filter: + desc: "The optional filter definition that TOSCA orchestrators would use to select the correct target node. This keyname is only valid if the directive has the value of 'selectable' set." + required: no + <<: *node_filter_definition + copy: + desc: "The optional (symbolic) name of another node template to copy into (all keynames and values) and use as a basis for this node template." + required: no + type: str + +# see section 3.7.4 +_relationship_template_definition: &relationship_template_definition + type: map + name: relationship_template_definition + mapping: + "type": + desc: "The required name of the Relationship Type the Relationship Template is based upon" + required: yes + type: str + description: + desc: "An optional description for the Relationship Template" + required: no + type: str + metadata: + <<: *metadata_definition + properties: + desc: "An optional list of property assignments for the Relationship Template." + required: no + name: properties_assignment_validation + type: map + mapping: + =: + type: any +#scalar + desc: "an expression providing a property value" + attributes: + desc: "An optional list of attribute value assignments for the Relationship Template" + required: no + name: attributes_assignment_validation + type: map + mapping: + =: + type: scalar + desc: "an expression providing an attribute value" + interfaces: + desc: "An optional list of named interface definitions for the Relationship Template ('augmentation' is allowed here)" + required: no + type: map + mapping: + =: + *template_interface_definition + copy: + desc: "The optional (symbolic) name of another relationship template to copy into (all keynames and values) and use as a basis for this relationship template." + required: no + type: str + + +# see section 3.7.5 +_group_definition: &group_definition + type: map + name: group_definition + mapping: + "type": + desc: "The required name of the group type the group definition is based upon" + required: yes + type: str + description: + desc: "The optional description for the group definition" + required: no + metadata: + <<: *metadata_definition + properties: + desc: " represents the optional list of property assignments for the group definition that provide values for properties defined in its declared Group Type" + required: no + type: map + mapping: + =: + type: any + name: property_assignment + members: + desc: "contains the required list of one or more node template names (within the same topology template) that are members of this logical group" + required: yes + type: seq + sequence: + - type: str + interfaces: + desc: "represents the optional list of interface definitions for the group definition that augment those provided by its declared Group Type" + required: no + type: map + mapping: + =: + *template_interface_definition + +# see section 3.7.6 +_policy_template_definition: &policy_template_definition + type: map + name: policy_definition + mapping: + "type": + desc: "The required name of the policy type the policy definition is based upon" + required: yes + type: str + description: + desc: "The optional description for the policy definition" + required: no + metadata: + <<: *metadata_definition + properties: + desc: "represents the optional list of property assignments for the policy definition that provide values for properties defined in its declared Policy Type" + required: no + type: map + mapping: + =: + type: any + name: property_assignment + targets: + desc: "represents the optional list of names of node templates or groups that the policy is to applied to" + required: no + type: seq + sequence: + - type: str + +#see section 3.7.7, obviously incomplete spec +_workflow_definition: &workflow_definition + type: map + name: workflow_definition + mapping: + description: + desc: "The optional description for the workflow definition" + required: no + metadata: + <<: *metadata_definition + inputs: + desc: "optional list of input parameter definitions" + required: no + type: map + mapping: + =: + *property_definition + preconditions: + desc: "List of preconditions to be validated before the workflow can be processed." + required: no + type: any #incomplete spec + steps: + desc: "" + required: no + type: any #incomplete spec + + +# see section 3.8 Topology Template definition: defines the topology template of a cloud application. +# described as a a reusable grammar as it can be a part of a service template definition +_topology_template_definition: &topology_template_definition + type: map + name: topology_template_definition + mapping: + description: + desc: "a description of the topology template" + required: no + type: str + inputs: + desc: "definition of input parameters for the topology template" + name: inputs + required: no + type: map + mapping: + =: + *property_definition + node_templates: + desc: "definition of the node templates of the topology" + name: node_templates + required: no + type: map + mapping: + =: + *node_template_definition + relationship_templates: + desc: "definition of the relationship templates of the topology" + required: no + name: relationship_templates + type: map + mapping: + =: + *relationship_template_definition + outputs: + desc: "definition of output parameters for the topology template" + name: outputs + required: no + type: map + mapping: + =: + *attribute_assignment_definition + groups: + desc: "An optional list of Group definitions whose members are node templates defined within this same Topology Template" + name: groups + required: no + type: map + mapping: + =: + *group_definition + policies: + # see 8.2.3, initially the list is not described as sequenced but then the grammar shows it as such !? + desc: "An optional sequenced?? list of Policy definitions for the Topology Template." + name: policies + required: no + type: seq + sequence: + - type: map + mapping: + =: + *policy_template_definition + workflows: + desc: "optional map of imperative workflow definition for the Topology Template" + name: workflows + required: no + type: map + mapping: + =: + *workflow_definition + substitution_mappings: +# one possible short-coming that is visible here is that the definition of the capability +# and requirements mappings are given in the spec only with the short/inline version of a +# YAML list/sequence, which cannot be enforced here .. + desc: " a description of the topology template" + name: substitution_mappings + required: no + type: map + mapping: + node_type: + desc: "node type name" + required: yes + type: str + capabilities: + desc: "map_of_capability_mappings_to_expose" + type: map + mapping: + =: + type: seq + sequence: + - type: str + requirements: + desc: "map_of_requirement_mapping_to_expose" + type: map + mapping: + =: + type: seq + sequence: + - type: str + + +# see A.9 Service Template definition: A TOSCA Service Template (YAML) document contains +# element definitions of building blocks for cloud application, or complete models of cloud applications. + +type: map +name: service_template_definition +mapping: + tosca_definitions_version: + desc: "Required TOSCA Definitions version string" + required: yes + type: str + enum: [tosca_simple_yaml_1_1, tosca_simple_yaml_1_1_0] + + tosca_default_namespace: + desc: "Optional. default namespace (for type schema)" + required: no + type: str + + metadata: + desc: "Optional metadata keyname: value pairs" + name: metadata + required: no + type: map + mapping: + template_name: + desc: "Optional name of this service template" + required: no + type: str + template_author: + desc: "Optional author of this service template" + required: no + type: str + template_version: + desc: "Optional version of this service template" + required: no + type: str + =: + desc: "User defined entry" + required: no + type: str + +#to add, the spec says: "Optional list of domain or profile specific metadata keynames" + + description: + desc: "Optional description of the definitions inside the file" + required: no + type: str + + imports: + desc: "ordered list of import statements for importing other definitions files" + name: imports + required: no + type: seq + sequence: + - type: map + mapping: + =: + *import_definition + + dsl_definitions: + desc: "list of YAML alias anchors (or macros)" + name: dsl_definitions + required: no + type: map + mapping: + =: + desc: "some piece of valid yaml that makes the anchor/alias definition" + type: any + required: no + + repositories: + desc: "list of external repository definitions which host TOSCA artifacts" + name: repositories + required: no + type: map + mapping: + =: + *repository_definition + + data_types: + desc: "list of TOSCA datatype definitions" + name: data_types + required: no + type: map + mapping: + =: + *data_type_definition + + node_types: + desc: "list of node type definitions" + name: node_types + required: no + type: map + mapping: + =: + *node_type_definition + + capability_types: + desc: "list of capability type definitions" + name: capability_types + required: no + type: map + mapping: + =: + *capability_type_definition + + relationship_types: + desc: "list of relationship type definitions" + name: relationship_types + required: no + type: map + mapping: + =: + *relationship_type_definition + + artifact_types: + desc: "list of artifact type definitions" + name: artifact_types + required: no + type: map + mapping: + =: + *artifact_type_definition + + interface_types: + desc: "list of interface type definitions" + name: interface_types + required: no + type: map + mapping: + =: + *interface_type_definition + + group_types: + desc: "list of group type definitions" + name: group_types + required: no + type: map + mapping: + =: + *group_type_definition + + policy_types: + desc: "list of policy type definitions" + name: policy_types + required: no + type: map + mapping: + =: + *policy_type_definition + + topology_template: + desc: "topology template definition of the cloud application or service" + required: no + <<: *topology_template_definition diff --git a/dcaedt_validator/configure-and-run.sh b/dcaedt_validator/configure-and-run.sh new file mode 100644 index 0000000..db6fb4b --- /dev/null +++ b/dcaedt_validator/configure-and-run.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +LOGFILE=/opt/app/validator/logs/validator-`date +%Y%m%d`.log +echo "`date`:<-------------------- Starting -------------------->" >> $LOGFILE +exec java -cp .:ASC-Validator.jar ${INTROSCOPE} ${INTRONAME} org.springframework.boot.loader.JarLauncher 2>&1 | tee -a $LOGFILE +#exec java -cp .:ASC-Validator-Service-0.0.1704-SNAPSHOT.jar org.springframework.boot.loader.JarLauncher 2>&1 | tee -a $LOGFILE + diff --git a/dcaedt_validator/kwalify/.gitignore b/dcaedt_validator/kwalify/.gitignore new file mode 100644 index 0000000..b83d222 --- /dev/null +++ b/dcaedt_validator/kwalify/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/dcaedt_validator/kwalify/pom.xml b/dcaedt_validator/kwalify/pom.xml new file mode 100644 index 0000000..fe68f17 --- /dev/null +++ b/dcaedt_validator/kwalify/pom.xml @@ -0,0 +1,80 @@ +<project + xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>DCAE-DT-Validator</artifactId> + <version>1806.0.1-SNAPSHOT</version> + </parent> + <artifactId>kwalify</artifactId> + <packaging>jar</packaging> + <name>kwalify</name> + <build> + <sourceDirectory>src/main/java</sourceDirectory> + <plugins> + <plugin> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.1</version> + <configuration> + <source>1.8</source> + <target>1.8</target> + <encoding>${project.build.sourceEncoding}</encoding> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-dependency-plugin</artifactId> + <version>2.10</version> + <executions> + <execution> + <id>copy-dependencies</id> + <phase>package</phase> + <goals> + <goal>copy-dependencies</goal> + </goals> + <configuration> + <outputDirectory>${project.build.directory}/deps</outputDirectory> + <overWriteReleases>false</overWriteReleases> + <overWriteSnapshots>false</overWriteSnapshots> + <overWriteIfNewer>true</overWriteIfNewer> + </configuration> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>buildnumber-maven-plugin</artifactId> + <version>1.4</version> + <executions> + <execution> + <phase>validate</phase> + <goals> + <goal>create</goal> + </goals> + </execution> + </executions> + <configuration> + <doCheck>false</doCheck> + <doUpdate>false</doUpdate> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>2.1</version> + <configuration> + <archive> + <manifest> + <addDefaultImplementationEntries>true</addDefaultImplementationEntries> + </manifest> + <manifestEntries> + <Implementation-Build>${buildNumber}</Implementation-Build> + </manifestEntries> + </archive> + </configuration> + </plugin> + </plugins> + </build> + <dependencies></dependencies> +</project> diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/BaseException.java b/dcaedt_validator/kwalify/src/main/java/kwalify/BaseException.java new file mode 100644 index 0000000..a578ba4 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/BaseException.java @@ -0,0 +1,32 @@ +/* + * @(#)BaseException.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +public abstract class BaseException extends KwalifyRuntimeException { + + private final String yPath; + private final transient Object value; + private final transient Rule rule; + private int lineNum = -1; + + BaseException(String message, String ypath, Object value, Rule rule) { + super(message); + this.yPath = ypath; + this.value = value; + this.rule = rule; + } + + public String getPath() { return "".equals(yPath) ? "/" : yPath; } + + public Object getValue() { return value; } + + public Rule getRule() { return rule; } + + public int getLineNumber() { return lineNum; } + + public void setLineNumber(int lineNum) { this.lineNum = lineNum; } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/CommandOptionException.java b/dcaedt_validator/kwalify/src/main/java/kwalify/CommandOptionException.java new file mode 100644 index 0000000..e35be85 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/CommandOptionException.java @@ -0,0 +1,33 @@ +/* + * @(#)CommandOptionException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown if command-line option is wrong + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class CommandOptionException extends KwalifyException { + private static final long serialVersionUID = 6433387612335104714L; + + private String _error_symbol = null; + private char _option; + + public CommandOptionException(String message, char option, String error_symbol) { + super(message); + _option = option; + _error_symbol = error_symbol; + } + + public String getErrorSymbol() { return _error_symbol; } + public void setErrorSymbol(String error_symbol) { _error_symbol = error_symbol; } + + public char getOption() { return _option; } + public void setOption(char option) { _option = option; } + +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Defaultable.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Defaultable.java new file mode 100644 index 0000000..f1de3fc --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Defaultable.java @@ -0,0 +1,13 @@ +/* + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * interface to have default value + */ +public interface Defaultable { + Object getDefault(); + void setDefault(Object value); +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/DefaultableHashMap.java b/dcaedt_validator/kwalify/src/main/java/kwalify/DefaultableHashMap.java new file mode 100644 index 0000000..c2c625c --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/DefaultableHashMap.java @@ -0,0 +1,31 @@ +/* + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.io.Serializable; +import java.util.HashMap; + +/** + * hash map which can have default value + */ +public class DefaultableHashMap extends HashMap implements Defaultable { + + private static final long serialVersionUID = -5224819562023897380L; + + private Object defaultValue = null; + + public DefaultableHashMap() { + super(); + } + + public Object getDefault() { return defaultValue; } + + public void setDefault(Object value) { defaultValue = value; } + + @Override + public Object get(Object key) { + return containsKey(key) ? super.get(key) : defaultValue; + } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/InvalidPathException.java b/dcaedt_validator/kwalify/src/main/java/kwalify/InvalidPathException.java new file mode 100644 index 0000000..94eeca2 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/InvalidPathException.java @@ -0,0 +1,23 @@ +/* + * @(#)InvalidPathException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown by YamlParser#setErrorsLineNumber() when path is wrong + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class InvalidPathException extends KwalifyRuntimeException { + private static final long serialVersionUID = -4601461998104850880L; + + //private int _linenum; + + public InvalidPathException(String message) { + super(message); + } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/InvalidTypeException.java b/dcaedt_validator/kwalify/src/main/java/kwalify/InvalidTypeException.java new file mode 100644 index 0000000..fe60ca0 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/InvalidTypeException.java @@ -0,0 +1,21 @@ +/* + * @(#)InvalidTypeException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown by Util.compareValues() when comparing different type values. + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class InvalidTypeException extends KwalifyRuntimeException { + private static final long serialVersionUID = -6937887618526171845L; + + public InvalidTypeException(String message) { + super(message); + } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/KwalifyException.java b/dcaedt_validator/kwalify/src/main/java/kwalify/KwalifyException.java new file mode 100644 index 0000000..976a263 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/KwalifyException.java @@ -0,0 +1,20 @@ +/* + * @(#)KwalifyException.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * base exception class of all exception in Kwalify + * + * @revision $Rev: 3 $ + * @release $Release: 0.5.1 $ + * @see KwalifyRuntimeException + */ +public abstract class KwalifyException extends Exception { + public KwalifyException(String message) { + super(message); + } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/KwalifyRuntimeException.java b/dcaedt_validator/kwalify/src/main/java/kwalify/KwalifyRuntimeException.java new file mode 100644 index 0000000..75e4764 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/KwalifyRuntimeException.java @@ -0,0 +1,19 @@ +/* + * @(#)KwalifyRuntimeException.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * base class of all runtime exception class in Kwalify + * + * @revision $Rev: 3 $ + * @release $Release: 0.5.1 $ + */ +public abstract class KwalifyRuntimeException extends RuntimeException { + public KwalifyRuntimeException(String message) { + super(message); + } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Main.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Main.java new file mode 100644 index 0000000..d2f1881 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Main.java @@ -0,0 +1,311 @@ +/* + * @(#)Main.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +import java.util.List; +import java.util.Map; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Collections; +import java.util.regex.Matcher; +import java.io.IOException; + +/** + * class for main program + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class Main { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private String _command; + private boolean _flag_help = false; // print help + private boolean _flag_version = false; // print version + private boolean _flag_silent = false; // suppress messages + private boolean _flag_meta = false; // meta validation + private boolean _flag_untabify = false; // expand tab character to spaces + private boolean _flag_emacs = false; // show errors in emacs style + private boolean _flag_linenum = false; // show line number on where errors happened + private boolean _flag_debug = false; // internal use only + private String _schema_filename = null; // schema filename + private Map _properties = new HashMap(); + + + boolean isDebug() { return _flag_debug; } + + + public String inspect() { + StringBuffer sb = new StringBuffer(); + sb.append("command : ").append(_command ).append('\n'); + sb.append("flag_help : ").append(_flag_help ).append('\n'); + sb.append("flag_version : ").append(_flag_version ).append('\n'); + sb.append("flag_silent : ").append(_flag_silent ).append('\n'); + sb.append("flag_meta : ").append(_flag_meta ).append('\n'); + sb.append("flag_untabify : ").append(_flag_untabify ).append('\n'); + sb.append("flag_emacs : ").append(_flag_emacs ).append('\n'); + sb.append("flag_linenum : ").append(_flag_linenum ).append('\n'); + sb.append("flag_debug : ").append(_flag_debug ).append('\n'); + sb.append("schema_filename : ").append(_schema_filename).append('\n'); + sb.append("properties:\n"); + for (Iterator it = _properties.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Object val = _properties.get(key); + sb.append(" ").append(key).append(": ").append(val).append('\n'); + } + return sb.toString(); + } + + + private static final String REVISION = "$Release: 0.5.1 $"; + private static final String HELP = "" + + "Usage1: %s [-hvstlE] -f schema.yaml doc.yaml [doc2.yaml ...]\n" + + "Usage2: %s [-hvstlE] -m schema.yaml [schema2.yaml ...]\n" + + " -h, --help : help\n" + + " -v : version\n" + + " -s : silent\n" + + " -f schema.yaml : schema definition file\n" + + " -m : meta-validation mode\n" + + " -t : expand tab character automatically\n" + + " -l : show linenumber when errored (experimental)\n" + + " -E : show errors in emacs-style (implies '-l')\n" + ; + + + public Main(String command) { + _command = command; + } + + + public String execute(String[] args) throws IOException, CommandOptionException, SyntaxException { + // parse command-line options + String[] filenames = parseOptions(args); + + // help or version + StringBuffer sb = new StringBuffer(); + if (_flag_version) { + sb.append(version()).append('\n'); + } + if (_flag_help) { + sb.append(help()); + } + if (sb.length() > 0) { + return sb.toString(); + } + + // main + String s = null; + if (_flag_meta) { + s = metaValidate(filenames); + } else if (_schema_filename == null) { + throw optionError("command.option.noaction", '\0'); + } else if (_flag_debug) { + s = inspectSchemaFile(_schema_filename); + } else { + s = validate(filenames, _schema_filename); + } + + // + return s; + } + + + private String[] parseOptions(String[] args) throws CommandOptionException { + Object[] ret = null; + try { + ret = Util.parseCommandOptions(args, "hvsmtlED", "f", null); + } catch (CommandOptionException ex) { + String error_symbol = ex.getErrorSymbol(); + if (error_symbol.equals("command.option.noarg")) { + switch (ex.getOption()) { + case 'f': error_symbol = "command.option.noschema"; break; + default: + assert false; + } + } + throw optionError(error_symbol, ex.getOption()); + } + // + Map options = (Map)ret[0]; + Map properties = (Map)ret[1]; + String[] filenames = (String[])ret[2]; + // + _flag_help = options.get("h") != null; + _flag_version = options.get("v") != null; + _flag_silent = options.get("s") != null; + _flag_meta = options.get("m") != null; + _flag_untabify = options.get("t") != null; + _flag_emacs = options.get("E") != null; + _flag_linenum = options.get("l") != null || _flag_emacs; + _flag_debug = options.get("D") != null; + _schema_filename = (String)options.get("f"); + // + // + _properties = properties; + if (_properties.get("help") != null) { + _flag_help = true; + } + // + return filenames; + } + + + private String validate(String[] filenames, String schema_filename) throws IOException, SyntaxException { + String str = Util.readFile(schema_filename); + if (_flag_untabify) { + str = Util.untabify(str); + } + YamlParser parser = new YamlParser(str); + Object schema = parser.parse(); + Validator validator = new Validator(schema); + String s = validateFiles(validator, filenames); + return s; + } + + + private String validateFiles(Validator validator, String[] filenames) throws IOException, SyntaxException { + if (filenames.length == 0) { + filenames = new String[] { null }; + } + StringBuffer sb = new StringBuffer(); + for (int j = 0; j < filenames.length; j++) { + String filename = filenames[j]; + String str = null; + if (filename == null) { + str = Util.readInputStream(System.in); + filename = "(stdin)"; + } else { + str = Util.readFile(filename); + } + if (_flag_untabify) { + str = Util.untabify(str); + } + YamlParser parser = new YamlParser(str); + int i = 0; + while (parser.hasNext()) { + Object doc = parser.parse(); + validateDocument(sb, validator, doc, filename, i, parser); + i++; + } + } + return sb.toString(); + } + + + private void validateDocument(StringBuffer sb, Validator validator, Object doc, String filename, int i, YamlParser parser) { + if (doc == null) { + Object[] args = { filename, new Integer(i) }; + String msg = Messages.buildMessage("validation.empty", null, args); + sb.append(msg).append('\n'); + return; + } + List errors = validator.validate(doc); + Object[] args = { filename, new Integer(i) }; + if (errors == null || errors.size() == 0) { + if (! _flag_silent) { + String msg = Messages.buildMessage("validation.valid", args); + sb.append(msg).append('\n'); + } + } else { + String msg = Messages.buildMessage("validation.invalid", args); + sb.append(msg).append('\n'); + if (_flag_linenum) { + assert parser != null; + parser.setErrorsLineNumber(errors); + Collections.sort(errors); + } + for (Iterator it = errors.iterator(); it.hasNext(); ) { + ValidationException error = (ValidationException)it.next(); + if (_flag_emacs) { + assert _flag_linenum; + sb.append(filename).append(":").append(error.getLineNumber()).append(":"); + } else if (_flag_linenum) { + sb.append(" - (line ").append(error.getLineNumber()).append(")"); + } else { + sb.append(" -"); + } + sb.append(" [").append(error.getPath()).append("] ").append(error.getMessage()).append('\n'); + } + } + } + + + private String metaValidate(String[] filenames) throws IOException, SyntaxException { + Validator meta_validator = MetaValidator.instance(); + String s = validateFiles(meta_validator, filenames); + return s; + } + + + private String inspectSchemaFile(String schema_filename) throws IOException, SyntaxException { + String filename = schema_filename; + String content = filename != null ? Util.readFile(filename) : Util.readInputStream(System.in); + YamlParser parser = new YamlParser(content); + Object schema = parser.parse(); + if (schema == null) { + return null; + } + Validator validator = new Validator(schema); // SchemaException is thrown when schema is wrong + String s = validator.getRule().inspect(); + if (s.charAt(s.length() - 1) != '\n') { + s = s + '\n'; + } + return s; + } + + + private static CommandOptionException optionError(String error_symbol, char option) { + Object[] args = { Character.toString(option) }; + String message = Messages.buildMessage(error_symbol, null, args); + return new CommandOptionException(message, option, error_symbol); + } + + + private String version() { + Matcher m = Util.matcher(REVISION, "[.\\d]+"); + m.find(); + String version = m.group(0); + return version; + } + + + private String help() { + String help_msg = Messages.buildMessage("command.help", null, new Object[] { _command, _command }); + //String help = HELP.replaceAll("%s", _command); + return help_msg; + } + + + public static void main(String[] args) throws Exception { + int status = 0; + Main main = null; + try { + main = new Main("kwalify-java"); + String result = main.execute(args); + if (result != null) { + debugLogger.log(LogLevel.DEBUG, Main.class.getName(), result); + } } catch (Exception ex) { + if (main != null && main.isDebug()) { + throw ex; + } + if ( ex instanceof CommandOptionException + || ex instanceof SyntaxException + || ex instanceof IOException) { + errLogger.log(LogLevel.ERROR, Main.class.getName(), "ERROR: {}", ex.getMessage()); + status = 1; + } + } + System.exit(status); + } + +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Messages.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Messages.java new file mode 100644 index 0000000..b77f04b --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Messages.java @@ -0,0 +1,51 @@ +/* + * @(#)Messages.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.ResourceBundle; +//import java.util.Locale; + +/** + * set of utility methods around messages. + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class Messages { + + private static final String __basename = "kwalify.messages"; + private static ResourceBundle __messages = ResourceBundle.getBundle(__basename); + //private static ResourceBundle __messages = ResourceBundle.getBundle(__basename, Locale.getDefault()); + + public static String message(String key) { + return __messages.getString(key); + } + + public static String buildMessage(String key, Object[] args) { + return buildMessage(key, null, args); + } + + public static String buildMessage(String key, Object value, Object[] args) { + String msg = message(key); + assert msg != null; + if (args != null) { + for (int i = 0; i < args.length; i++) { // don't use MessageFormat + msg = msg.replaceFirst("%[sd]", escape(args[i])); + } + } + if (value != null && !Types.isCollection(value)) { + msg = "'" + escape(value) + "': " + msg; + } + return msg; + } + + private static String escape(Object obj) { + //return obj.toString().replaceAll("\\", "\\\\").replace("\n", "\\n"); // J2SK1.4 doesn't support String#replace(CharSequence, CharSequence)! + return obj.toString().replaceAll("\\\\", "\\\\\\\\").replaceAll("\\n", "\\\\n"); + } + +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/MetaValidator.java b/dcaedt_validator/kwalify/src/main/java/kwalify/MetaValidator.java new file mode 100644 index 0000000..c8c21a7 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/MetaValidator.java @@ -0,0 +1,445 @@ +/* + * @(#)MetaValidator.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import org.onap.sdc.common.onaplog.OnapLoggerDebug; +import org.onap.sdc.common.onaplog.OnapLoggerError; +import org.onap.sdc.common.onaplog.Enums.LogLevel; + +import java.util.Map; +import java.util.List; +import java.util.Iterator; +import java.util.regex.Pattern; +import java.util.regex.Matcher; +import java.util.regex.PatternSyntaxException; + +/** + * meta validator to validate schema definition + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class MetaValidator extends Validator { + + private static OnapLoggerError errLogger = OnapLoggerError.getInstance(); + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + public static final String META_SCHEMA = "" + + "name: MAIN\n" + + "type: map\n" + + "required: yes\n" + + "mapping: &main-rule\n" + + " \"name\":\n" + + " type: str\n" + + " \"desc\":\n" + + " type: str\n" + + " \"type\":\n" + + " type: str\n" + + " #required: yes\n" + + " enum:\n" + + " - seq\n" + + " #- sequence\n" + + " #- list\n" + + " - map\n" + + " #- mapping\n" + + " #- hash\n" + + " - str\n" + + " #- string\n" + + " - int\n" + + " #- integer\n" + + " - float\n" + + " - number\n" + + " #- numeric\n" + + " - bool\n" + + " #- boolean\n" + + " - text\n" + + " - date\n" + + " - time\n" + + " - timestamp\n" + + " #- object\n" + + " - any\n" + + " - scalar\n" + + " #- collection\n" + + " \"required\":\n" + + " type: bool\n" + + " \"enum\":\n" + + " type: seq\n" + + " sequence:\n" + + " - type: scalar\n" + + " unique: yes\n" + + " \"pattern\":\n" + + " type: str\n" + + " \"assert\":\n" + + " type: str\n" + + " pattern: /\\bval\\b/\n" + + " \"range\":\n" + + " type: map\n" + + " mapping:\n" + + " \"max\":\n" + + " type: scalar\n" + + " \"min\":\n" + + " type: scalar\n" + + " \"max-ex\":\n" + + " type: scalar\n" + + " \"min-ex\":\n" + + " type: scalar\n" + + " \"length\":\n" + + " type: map\n" + + " mapping:\n" + + " \"max\":\n" + + " type: int\n" + + " \"min\":\n" + + " type: int\n" + + " \"max-ex\":\n" + + " type: int\n" + + " \"min-ex\":\n" + + " type: int\n" + + " \"ident\":\n" + + " type: bool\n" + + " \"unique\":\n" + + " type: bool\n" + + " \"sequence\":\n" + + " name: SEQUENCE\n" + + " type: seq\n" + + " sequence:\n" + + " - type: map\n" + + " mapping: *main-rule\n" + + " name: MAIN\n" + + " #required: yes\n" + + " \"mapping\":\n" + + " name: MAPPING\n" + + " type: map\n" + + " mapping:\n" + + " =:\n" + + " type: map\n" + + " mapping: *main-rule\n" + + " name: MAIN\n" + + " #required: yes\n" + ; + + + /** + * + * ex. + * <pre> + * MetaValidator meta_validator = MetaValidator(); + * Map schema = YamlUtil.loadFile("schema.yaml"); + * List errors = meta_validator.validate(schema); + * if (errors != null && errors.size() > 0) { + * for (Iterator it = errors.iterator(); it.hasNext(); ) { + * ValidationException error = (ValidationException)it.next(); + * System.err.println(" - [" + error.getPath() + "] " + error.getMessage()); + * } + * } + * </pre> + */ + + private static Validator __instance; + + public static Validator instance() { + synchronized (MetaValidator.class) { + if (__instance == null) { + try { + Map schema = (Map) YamlUtil.load(META_SCHEMA); + __instance = new MetaValidator(schema); + } catch (SyntaxException ex) { + assert false; + } + } + } + + return __instance; + } + + private MetaValidator(Map schema) { + super(schema); + } + + public void postValidationHook(Object value, Rule rule, ValidationContext theContext) { + if (value == null) { + return; // realy? + } + if (! "MAIN".equals(rule.getName())) { + return; + } + // + assert value instanceof Map; + Map map = (Map)value; + String type = (String)map.get("type"); + if (type == null) { + type = Types.getDefaultType(); + } + //Class type_class = Types.typeClass(type); + //if (type_class == null) { + // theContext.addError(validationError("type.unknown", rule, path + "/type", type, null)); + //} + // + //String pattern; + //if ((pattern = (String)map.get("pattern")) != null) { + if (map.containsKey("pattern")) { + String pattern = (String)map.get("pattern"); + Matcher m = Util.matcher(pattern, "\\A\\/(.*)\\/([mi]?[mi]?)\\z"); + String pat = m.find() ? m.group(1) : pattern; + try { + Pattern.compile(pat); + } catch (PatternSyntaxException ex) { + theContext.addError("pattern.syntaxerr", rule, "pattern", pattern, null); + } + } + // + //List enum_list; + //if ((enum_list = (List)map.get("enum")) != null) { + if (map.containsKey("enum")) { + List enum_list = (List)map.get("enum"); + if (Types.isCollectionType(type)) { + theContext.addError("enum.notscalar", rule, "enum:", (Object[])null); + } else { + for (Iterator it = enum_list.iterator(); it.hasNext(); ) { + Object elem = it.next(); + if (! Types.isCorrectType(elem, type)) { + theContext.addError("enum.type.unmatch", rule, "enum", elem, new Object[] { Types.typeName(type) }); + } + } + } + } + // + //String assert_str; + //if ((assert_str = (String)map.get("assert")) != null) { + if (map.containsKey("assert")) { + errLogger.log(LogLevel.ERROR, this.getClass().getName(), "*** warning: sorry, 'assert:' is not supported in current version of Kwalify-java."); + //String assert_str = (String)map.get("assert"); + //if (! Util.matches(assert_str, "\\bval\\b")) { + // theContext.addError(validationError("assert.noval", rule, path + "/assert", assert_str, null); + //} + //try { + // Expression.parse(assert_str); + //} catch (InvalidExpressionException ex) { + // theContext.addError(validationError("assert.syntaxerr", rule, path + "/assert", assert_str, null)); + //} + } + // + //Map range; + //if ((range = (Map)map.get("range")) != null) { + if (map.containsKey("range")) { + Map range = (Map)map.get("range"); + //if (! (range instanceof Map)) { + // theContext.addError(validtionError("range.notmap", rule, path + "/range", range, null)); + //} else + if (Types.isCollectionType(type) || type.equals("bool") || type.equals("any")) { + theContext.addError("range.notscalar", rule, "range:", null, null); + } else { + for (Iterator it = range.keySet().iterator(); it.hasNext(); ) { + String k = (String)it.next(); + Object v = range.get(k); + if (! Types.isCorrectType(v, type)) { + theContext.addError("range.type.unmatch", rule, "range/" + k, v, new Object[] { Types.typeName(type) }); + } + } + } + if (range.containsKey("max") && range.containsKey("max-ex")) { + theContext.addError("range.twomax", rule, "range", null, null); + } + if (range.containsKey("min") && range.containsKey("min-ex")) { + theContext.addError("range.twomin", rule, "range", null, null); + } + Object max = range.get("max"); + Object min = range.get("min"); + Object max_ex = range.get("max-ex"); + Object min_ex = range.get("min-ex"); + Object[] args = null; + //String error_symbol = null; + if (max != null) { + if (min != null && Util.compareValues(max, min) < 0) { + args = new Object[] { max, min }; + theContext.addError("range.maxltmin", rule, "range", null, args); + } else if (min_ex != null && Util.compareValues(max, min_ex) <= 0) { + args = new Object[] { max, min_ex }; + theContext.addError("range.maxleminex", rule, "range", null, args); + } + } else if (max_ex != null) { + if (min != null && Util.compareValues(max_ex, min) <= 0) { + args = new Object[] { max_ex, min }; + theContext.addError("range.maxexlemin", rule, "range", null, args); + } else if (min_ex != null && Util.compareValues(max_ex, min_ex) <= 0) { + args = new Object[] { max_ex, min_ex }; + theContext.addError("range.maxexleminex", rule, "range", null, args); + } + } + } + // + //Map length; + //if ((length = (Map)map.get("length")) != null) { + if (map.containsKey("length")) { + Map length = (Map)map.get("length"); + //if (! (length instanceof Map)) { + // theContext.addError(validtionError("length.notmap", rule, path + "/length", length, null)); + //} else + if (! (type.equals("str") || type.equals("text"))) { + theContext.addError("length.nottext", rule, "length:", (Object[])null); + } + //for (Iterator it = length.keySet().iterator(); it.hasNext(); ) { + // String k = (String)it.next(); + // Object v = length.get(k); + // if (k == null || ! (k.equals("max") || k.equals("min") || k.equals("max-ex") || k.equals("min-ex"))) { + // theContext.addError(validationError("length.undefined", rule, path + "/length/" + k, "" + k + ":", null)); + // } else if (! (v instanceof Integer)) { + // theContext.addError(validationError("length.notint", rule, path + "/length/" + k, v, null)); + // } + //} + if (length.containsKey("max") && length.containsKey("max-ex")) { + theContext.addError("length.twomax", rule, "length", (Object[])null); + } + if (length.containsKey("min") && length.containsKey("min-ex")) { + theContext.addError("length.twomin", rule, "length", (Object[])null); + } + Integer max = (Integer)length.get("max"); + Integer min = (Integer)length.get("min"); + Integer max_ex = (Integer)length.get("max-ex"); + Integer min_ex = (Integer)length.get("min-ex"); + Object[] args = null; + //String error_symbol = null; + if (max != null) { + if (min != null && max.compareTo(min) < 0) { + args = new Object[] { max, min }; + theContext.addError("length.maxltmin", rule, "length", null, args); + } else if (min_ex != null && max.compareTo(min_ex) <= 0) { + args = new Object[] { max, min_ex }; + theContext.addError("length.maxleminex", rule, "length", null, args); + } + } else if (max_ex != null) { + if (min != null && max_ex.compareTo(min) <= 0) { + args = new Object[] { max_ex, min }; + theContext.addError("length.maxexlemin", rule, "length", null, args); + } else if (min_ex != null && max_ex.compareTo(min_ex) <= 0) { + args = new Object[] { max_ex, min_ex }; + theContext.addError("length.maxexleminex", rule, "length", null, args); + } + } + } + // + //Boolean unique; + //if ((unique = (Boolean)map.get("unique")) != null) { + if (map.containsKey("unique")) { + Boolean unique = (Boolean)map.get("unique"); + if (unique.booleanValue() == true && Types.isCollectionType(type)) { + theContext.addError("unique.notscalar", rule, "unique:", (Object[])null); + } + if (theContext.getPath().length() == 0) { + theContext.addError("unique.onroot", rule, "", "unique:", null); + } + } + // + //Boolean ident; + //if ((ident = (Boolean)map.get("ident")) != null) { + if (map.containsKey("ident")) { + Boolean ident = (Boolean)map.get("ident"); + if (ident.booleanValue() == true && Types.isCollectionType(type)) { + theContext.addError("ident.notscalar", rule, "ident:", (Object[])null); + } + if (theContext.getPath().length() == 0) { + theContext.addError("ident.onroot", rule, "/", "ident:", (Object[])null); + } + } + // + //List seq; + //if ((seq = (List)map.get("sequence")) != null) { + if (map.containsKey("sequence")) { + List seq = (List)map.get("sequence"); + //if (! (seq instanceof List)) { + // theContext.addError(validationError("sequence.notseq", rule, path + "/sequence", seq, null)); + //} else + if (seq == null || seq.size() == 0) { + theContext.addError("sequence.noelem", rule, "sequence", seq, null); + } else if (seq.size() > 1) { + theContext.addError("sequence.toomany", rule, "sequence", seq, null); + } else { + Object item = seq.get(0); + assert item instanceof Map; + Map m = (Map)item; + Boolean ident2 = (Boolean)m.get("ident"); + if (ident2 != null && ident2.booleanValue() == true && ! "map".equals(m.get("type"))) { + theContext.addError("ident.notmap", null, "sequence/0", "ident:", null); + } + } + } + // + //Map mapping; + //if ((mapping = (Map)map.get("mapping")) != null) { + if (map.containsKey("mapping")) { + Map mapping = (Map)map.get("mapping"); + //if (mapping != null && ! (mapping instanceof Map)) { + // theContext.addError(validationError("mapping.notmap", rule, path + "/mapping", mapping, null)); + //} else + Object default_value = null; + if (mapping != null && mapping instanceof Defaultable) { + default_value = ((Defaultable)mapping).getDefault(); + } + if (mapping == null || (mapping.size() == 0 && default_value == null)) { + theContext.addError("mapping.noelem", rule, "mapping", mapping, null); + } + } + // + if (type.equals("seq")) { + if (! map.containsKey("sequence")) { + theContext.addError("seq.nosequence", rule, null, (Object[])null); + } + //if (map.containsKey("enum")) { + // theContext.addError(validationError("seq.conflict", rule, path, "enum:", null)); + //} + if (map.containsKey("pattern")) { + theContext.addError("seq.conflict", rule, "pattern:", (Object[])null); + } + if (map.containsKey("mapping")) { + theContext.addError("seq.conflict", rule, "mapping:", (Object[])null); + } + //if (map.containsKey("range")) { + // theContext.addError(validationError("seq.conflict", rule, path, "range:", null)); + //} + //if (map.containsKey("length")) { + // theContext.addError(validationError("seq.conflict", rule, path, "length:", null)); + //} + } else if (type.equals("map")) { + if (! map.containsKey("mapping")) { + theContext.addError("map.nomapping", rule, null, (Object[])null); + } + //if (map.containsKey("enum")) { + // theContext.addError(validationError("map.conflict", rule, path, "enum:", null)); + //} + if (map.containsKey("pattern")) { + theContext.addError("map.conflict", rule, "pattern:", (Object[])null); + } + if (map.containsKey("sequence")) { + theContext.addError("map.conflict", rule, "sequence:", (Object[])null); + } + //if (map.containsKey("range")) { + // theContext.addError(validationError("map.conflict", rule, path, "range:", null)); + //} + //if (map.containsKey("length")) { + // theContext.addError(validationError("map.conflict", rule, path, "length:", null)); + //} + } else { + if (map.containsKey("sequence")) { + theContext.addError("scalar.conflict", rule, "sequence:", (Object[])null); + } + if (map.containsKey("mapping")) { + theContext.addError("scalar.conflict", rule, "mapping:", (Object[])null); + } + if (map.containsKey("enum")) { + if (map.containsKey("range")) { + theContext.addError("enum.conflict", rule, "range:", (Object[])null); + } + if (map.containsKey("length")) { + theContext.addError("enum.conflict", rule, "length:", (Object[])null); + } + if (map.containsKey("pattern")) { + theContext.addError("enum.conflict", rule, "pattern:", (Object[])null); + } + } + } + } + +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Parser.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Parser.java new file mode 100644 index 0000000..53c6272 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Parser.java @@ -0,0 +1,19 @@ +/* + * @(#)Parser.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * interface for any parser + * + * @revision $Rev: 3 $ + * @release $Release: 0.5.1 $ + */ +public interface Parser { + + public Object parse() throws SyntaxException; + +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/PlainYamlParser.java b/dcaedt_validator/kwalify/src/main/java/kwalify/PlainYamlParser.java new file mode 100644 index 0000000..5f23a19 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/PlainYamlParser.java @@ -0,0 +1,742 @@ +/* + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.regex.Matcher; +import java.util.Calendar; +import java.util.TimeZone; + +/** + * plain yaml parser class which is a parent of YamlParser class. + */ +public class PlainYamlParser implements Parser { + + private static final String ANCHOR = "anchor '"; + private static final String ENDFLAG_EOF = "<EOF>"; + private static final String ENDFLAG_DOC_BEGIN = "---"; + private static final String ENDFLAG_DOC_END = "..."; + private static final String REGEXP1 = "^( *)(.*)"; + private static final String REGEXP2 = "^((?::?[-.\\w]+|'.*?'|\".*?\"|=|<<) *):(( +)(.*))?$"; + + public static class Alias { + private String label; + private int lineNum; + + Alias(String label, int lineNum) { + this.label = label; + this.lineNum = lineNum; + } + + String getLabel() { return label; } + + int getLineNumber() { return lineNum; } + } + + + private String[] lines; + private String line = null; + private int linenum = 0; + private Map<String,Object> anchors = new HashMap<>(); + private Map<String,Integer> aliases = new HashMap<>(); + private String endFlag = null; + private String sbuf = null; + private int index = 0; + + PlainYamlParser(String yamlStr) { + List list = Util.toListOfLines(yamlStr); + int len = list.size(); + lines = new String[len + 1]; + for (int i = 0; i < len; i++) { + lines[i + 1] = (String)list.get(i); + } + } + + public Object parse() throws SyntaxException { + Object data = parseChild(0); + if (data == null && endFlag.equals(ENDFLAG_DOC_BEGIN)) { + data = parseChild(0); + } + if (aliases.size() > 0) { + resolveAliases(data); + } + return data; + } + + public boolean hasNext() { + return !endFlag.equals(ENDFLAG_EOF); + } + + private List createSequence() { + return new ArrayList(); + } + + private void addSequenceValue(List seq, Object value) { + seq.add(value); + } + + private void setSequenceValueAt(List seq, int index, Object value) { + seq.set(index, value); + } + + Map createMapping() { + return new DefaultableHashMap(); + } + + private void setMappingValueWith(Map map, Object key, Object value) { + map.put(key, value); + } + + void setMappingDefault(Map map, Object value) { + if (map instanceof Defaultable) { + ((Defaultable)map).setDefault(value); + } + } + + private void mergeMapping(Map map, Map map2) { + for (Object key : map2.keySet()) { + if (!map.containsKey(key)) { + Object value = map2.get(key); + map.put(key, value); + } + } + } + + private void mergeList(Map map, List maplist) throws SyntaxException { + for (Object elem : maplist) { + mergeCollection(map, elem); + } + } + + private void mergeCollection(Map map, Object collection) throws SyntaxException { + if (collection instanceof Map) { + mergeMapping(map, (Map)collection); + } else if (collection instanceof List) { + mergeList(map, (List)collection); + } else { + throw syntaxError("'<<' requires collection (mapping, or sequence of mapping)."); + } + } + + private Object createScalar(Object value) { + return value; + } + + private String currentLine() { + return line; + } + + int currentLineNumber() { + return linenum; + } + + protected String getLine() { + String currentLine; + do { + currentLine = getCurrentLine(); + } while (currentLine != null && Util.matches(currentLine, "^\\s*($|#)")); + return currentLine; + } + + private String getCurrentLine() { + if (++linenum < lines.length) { + line = lines[linenum]; + if (Util.matches(line, "^\\.\\.\\.$")) { + line = null; + endFlag = ENDFLAG_DOC_END; + } else if (Util.matches(line, "^---( [!%].*)?$")) { + line = null; + endFlag = ENDFLAG_DOC_BEGIN; + } + } else { + line = null; + endFlag = ENDFLAG_EOF; + } + return line; + } + + private void resetBuffer(String str) { + sbuf = str.charAt(str.length() - 1) == '\n' ? str : str + "\n"; + index = -1; + } + + private int getCurrentCharacter() { + if (index + 1 < sbuf.length()) { + index++; + } else { + String currentLine = getLine(); + if (currentLine == null) { + return -1; + } + resetBuffer(currentLine); + index++; + } + return sbuf.charAt(index); + } + + private int getChar() { + int ch; + do { + ch = getCurrentCharacter(); + } while (ch >= 0 && isWhite(ch)); + return ch; + } + + private int getCharOrNewline() { + int ch; + do { + ch = getCurrentCharacter(); + } while (ch >= 0 && isWhite(ch) && ch != '\n'); + return ch; + } + + private int currentChar() { + return sbuf.charAt(index); + } + + private SyntaxException syntaxError(String message, int linenum) { + return new YamlSyntaxException(message, linenum); + } + + private SyntaxException syntaxError(String message) { + return new SyntaxException(message, linenum); + } + + private Object parseChild(int column) throws SyntaxException { + String currentLine = getLine(); + if (currentLine == null) { + return createScalar(null); + } + Matcher m = Util.matcher(currentLine, REGEXP1); + if (! m.find()) { + assert false; + return null; + } + int indent = m.group(1).length(); + if (indent < column) { + return createScalar(null); + } + String value = m.group(2); + return parseValue(column, value, indent); + } + + private Object parseValue(int column, String value, int valueStartColumn) throws SyntaxException { + Object data; + if (Util.matches(value, "^-( |$)")) { + data = parseSequence(valueStartColumn, value); + } else if (Util.matches(value, REGEXP2)) { + data = parseMapping(valueStartColumn, value); + } else if (Util.matches(value, "^[\\[\\{]")) { + data = parseFlowStyle(value); + } else if (Util.matches(value, "^\\&[-\\w]+( |$)")) { + data = parseAnchor(column, value); + } else if (Util.matches(value, "^\\*[-\\w]+( |$)")) { + data = parseAlias(value); + } else if (Util.matches(value, "^[|>]")) { + data = parseBlockText(column, value); + } else if (Util.matches(value, "^!")) { + data = parseTag(column, value); + } else if (Util.matches(value, "^\\#")) { + data = parseChild(column); + } else { + data = parseScalar(value); + } + return data; + } + + private static boolean isWhite(int ch) { + return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r'; + } + + + private Object parseFlowStyle(String value) throws SyntaxException { + resetBuffer(value); + getChar(); + Object data = parseFlow(0); + int ch = currentChar(); + assert ch == ']' || ch == '}'; + ch = getCharOrNewline(); + if (ch != '\n' && ch != '#' && ch >= 0) { + throw syntaxError("flow style sequence is closed buf got '" + ((char)ch) + "'."); + } + if (ch >= 0) { + getLine(); + } + return data; + } + + private Object parseFlow(int depth) throws SyntaxException { + int ch = currentChar(); + if (ch < 0) { + throw syntaxError("found EOF when parsing flow style."); + } + Object data; + if (ch == '[') { + data = parseFlowSequence(depth); + } else if (ch == '{') { + data = parseFlowMapping(depth); + } else { + data = parseFlowScalar(); + } + return data; + } + + private List parseFlowSequence(int depth) throws SyntaxException { + assert currentChar() == '['; + List seq = createSequence(); + int ch = getChar(); + if (ch != '}') { + addSequenceValue(seq, parseFlowSequenceItem(depth + 1)); + while ((ch = currentChar()) == ',') { + ch = getChar(); + if (ch == '}') { + throw syntaxError("sequence item required (or last comma is extra)."); + } + addSequenceValue(seq, parseFlowSequenceItem(depth + 1)); + } + } + if (currentChar() != ']') { + throw syntaxError("flow style sequence requires ']'."); + } + if (depth > 0) { + getChar(); + } + return seq; + } + + private Object parseFlowSequenceItem(int depth) throws SyntaxException { + return parseFlow(depth); + } + + private Map parseFlowMapping(int depth) throws SyntaxException { + assert currentChar() == '{'; + Map map = createMapping(); + int ch = getChar(); + if (ch != '}') { + Object[] pair = parseFlowMappingItem(depth + 1); + Object key = pair[0]; + Object value = pair[1]; + setMappingValueWith(map, key, value); + while ((ch = currentChar()) == ',') { + ch = getChar(); + if (ch == '}') { + throw syntaxError("mapping item required (or last comman is extra."); + } + pair = parseFlowMappingItem(depth + 1); + key = pair[0]; + value = pair[1]; + setMappingValueWith(map, key, value); + } + } + if (currentChar() != '}') { + throw syntaxError("flow style mapping requires '}'."); + } + if (depth > 0) { + getChar(); + } + return map; + } + + private Object[] parseFlowMappingItem(int depth) throws SyntaxException { + Object key = parseFlow(depth); + int ch = currentChar(); + if (ch != ':') { + String s = ch >= 0 ? "'" + ((char)ch) + "'" : "EOF"; + throw syntaxError("':' expected but got " + s); + } + getChar(); + Object value = parseFlow(depth); + return new Object[] { key, value }; + } + + private Object parseFlowScalar() { + int ch = currentChar(); + Object scalar; + StringBuilder sb = new StringBuilder(); + if (ch == '"' || ch == '\'') { + int endch = ch; + while ((ch = getCurrentCharacter()) >= 0 && ch != endch) { + sb.append((char)ch); + } + getChar(); + scalar = sb.toString(); + } else { + sb.append((char)ch); + while ((ch = getCurrentCharacter()) >= 0 && ch != ':' && ch != ',' && ch != ']' && ch != '}') { + sb.append((char)ch); + } + scalar = toScalar(sb.toString().trim()); + } + return createScalar(scalar); + } + + private Object parseTag(int column, String value) throws SyntaxException { + assert Util.matches(value, "^!\\S+"); + Matcher m = Util.matcher(value, "^!(\\S+)((\\s+)(.*))?$"); + if (! m.find()) { + assert false; + return null; + } + String tag = m.group(1); + String space = m.group(3); + String value2 = m.group(4); + Object data; + if (value2 != null && value2.length() > 0) { + int valueStartColumn = column + 1 + tag.length() + space.length(); + data = parseValue(column, value2, valueStartColumn); + } else { + data = parseChild(column); + } + return data; + } + + private Object parseAnchor(int column, String value) throws SyntaxException { + assert Util.matches(value, "^\\&([-\\w]+)(( *)(.*))?$"); + Matcher m = Util.matcher(value, "^\\&([-\\w]+)(( *)(.*))?$"); + if (! m.find()) { + assert false; + return null; + } + String label = m.group(1); + String space = m.group(3); + String value2 = m.group(4); + Object data; + if (value2 != null && value2.length() > 0) { + int valueStartColumn = column + 1 + label.length() + space.length(); + data = parseValue(column, value2, valueStartColumn); + } else { + data = parseChild(column); + } + registerAnchor(label, data); + return data; + } + + private void registerAnchor(String label, Object data) throws SyntaxException { + if (anchors.containsKey(label)) { + throw syntaxError(ANCHOR + label + "' is already used."); + } + anchors.put(label, data); + } + + private Object parseAlias(String value) throws SyntaxException { + assert value.matches("^\\*([-\\w]+)(( *)(.*))?$"); + Matcher m = Util.matcher(value, "^\\*([-\\w]+)(( *)(.*))?$"); + if (! m.find()) { + assert false; + return null; + } + String label = m.group(1); + String value2 = m.group(4); + if (value2 != null && value2.length() > 0 && value2.charAt(0) != '#') { + throw syntaxError("alias cannot take any data."); + } + Object data = anchors.get(label); + if (data == null) { + data = registerAlias(label); + } + getLine(); + return data; + } + + private Alias registerAlias(String label) { + aliases.merge(label, 1, (a, b) -> a + b); + return new Alias(label, linenum); + } + + + private void resolveAliases(Object data) throws SyntaxException { + Map resolved = new IdentityHashMap(); + resolveAliases(data, resolved); + } + + + private void resolveAliases(Object data, Map resolved) throws SyntaxException { + if (resolved.containsKey(data)) { + return; + } + resolved.put(data, data); + if (data instanceof List) { + resolveAliases((List)data, resolved); + } else if (data instanceof Map) { + resolveAliases((Map)data, resolved); + } else { + assert !(data instanceof Alias); + } + if (data instanceof Defaultable) { + Object defaultValue = ((Defaultable)data).getDefault(); + if (defaultValue != null) { + resolveAliases(defaultValue, resolved); + } + } + } + + private void resolveAliases(List seq, Map resolved) throws SyntaxException { + int len = seq.size(); + for (int i = 0; i < len; i++) { + Object val = seq.get(i); + if (val instanceof Alias) { + Alias alias = (Alias)val; + String label = alias.getLabel(); + if (anchors.containsKey(label)) { + setSequenceValueAt(seq, i, anchors.get(label)); + } else { + throw syntaxError(ANCHOR + alias.getLabel() + "' not found."); + } + } else if (val instanceof List || val instanceof Map) { + resolveAliases(val, resolved); + } + } + } + + private void resolveAliases(Map map, Map resolved) throws SyntaxException { + for (Object key : map.keySet()) { + Object val = map.get(key); + if (val instanceof Alias) { + Alias alias = (Alias) val; + String label = alias.getLabel(); + if (anchors.containsKey(label)) { + setMappingValueWith(map, key, anchors.get(label)); + } else { + throw syntaxError(ANCHOR + alias.getLabel() + "' not found.", alias.getLineNumber()); + } + } else if (val instanceof List || val instanceof Map) { + resolveAliases(val, resolved); + } + } + } + + private Object parseBlockText(int column, String value) throws SyntaxException { + assert Util.matches(value, "^[>|]"); + Matcher m = Util.matcher(value, "^([>|])([-+]?)(\\d*)\\s*(.*)$"); + if (! m.find()) { + assert false; + return null; + } + char blockChar = m.group(1).length() > 0 ? m.group(1).charAt(0) : '\0'; + char indicator = m.group(2).length() > 0 ? m.group(2).charAt(0) : '\0'; + int indent = m.group(3).length() > 0 ? Integer.parseInt(m.group(3)) : -1; + String text = m.group(4); + char sep = blockChar == '|' ? '\n' : ' '; + String currentLine; + StringBuilder sb = new StringBuilder(); + int n = 0; + while ((currentLine = getCurrentLine()) != null) { + m = Util.matcher(currentLine, "^( *)(.*)$"); + m.find(); + String space = m.group(1); + String str = m.group(2); + if (indent < 0) { + indent = space.length(); + } + if (str.length() == 0) { + n++; + } else { + int slen = space.length(); + if (slen < column) { + break; + } else if (slen < indent) { + throw syntaxError("invalid indent in block text."); + } else { + if (n > 0) { + if (blockChar == '>' && sb.length() > 0) { + sb.deleteCharAt(sb.length() - 1); + } + for (int i = 0; i < n; i++) { + sb.append('\n'); + } + n = 0; + } + str = currentLine.substring(indent); + } + } + sb.append(str); + if ((blockChar == '>') && (sb.charAt(sb.length() - 1) == '\n')) { + sb.setCharAt(sb.length() - 1, ' '); + } + } + if (currentLine != null && Util.matches(currentLine, "^ *#")) { + getLine(); + } + switch (indicator) { + case '+': + handlePlus(blockChar, sb, n); + break; + case '-': + handleMinus(sep, sb); + break; + default: + if (blockChar == '>') { + sb.setCharAt(sb.length() - 1, '\n'); + } + } + return createScalar(text + sb.toString()); + } + + private void handleMinus(char sep, StringBuilder sb) { + if (sb.charAt(sb.length() - 1) == sep) { + sb.deleteCharAt(sb.length() - 1); + } + } + + private void handlePlus(char blockChar, StringBuilder sb, int n) { + if (n > 0) { + if (blockChar == '>') { + sb.setCharAt(sb.length() - 1, '\n'); + } + for (int i = 0; i < n; i++) { + sb.append('\n'); + } + } + } + + + private List parseSequence(int column, String value) throws SyntaxException { + assert Util.matches(value, "^-(( +)(.*))?$"); + List seq = createSequence(); + while (true) { + Matcher m = Util.matcher(value, "^-(( +)(.*))?$"); + if (! m.find()) { + throw syntaxError("sequence item is expected."); + } + String space = m.group(2); + String value2 = m.group(3); + int column2 = column + 1; + + Object elem; + if (value2 == null || value2.length() == 0) { + elem = parseChild(column2); + } else { + int valueStartColumn = column2 + space.length(); + elem = parseValue(column2, value2, valueStartColumn); + } + addSequenceValue(seq, elem); + + String currentLine = currentLine(); + if (currentLine == null) { + break; + } + Matcher m2 = Util.matcher(currentLine, REGEXP1); + m2.find(); + int indent = m2.group(1).length(); + if (indent < column) { + break; + } else if (indent > column) { + throw syntaxError("invalid indent of sequence."); + } + value = m2.group(2); + } + return seq; + } + + + private Map parseMapping(int column, String value) throws SyntaxException { + assert Util.matches(value, REGEXP2); + Map map = createMapping(); + while (true) { + Matcher m = Util.matcher(value, REGEXP2); + if (! m.find()) { + throw syntaxError("mapping item is expected."); + } + String v = m.group(1).trim(); + Object key = toScalar(v); + String value2 = m.group(4); + int column2 = column + 1; + + Object elem; + if (value2 == null || value2.length() == 0) { + elem = parseChild(column2); + } else { + int valueStartColumn = column2 + m.group(1).length() + m.group(3).length(); + elem = parseValue(column2, value2, valueStartColumn); + } + if ("=".equals(v)) { + setMappingDefault(map, elem); + } else if ("<<".equals(v)) { + mergeCollection(map, elem); + } else { + setMappingValueWith(map, key, elem); + } + + String currentLine = currentLine(); + if (currentLine == null) { + break; + } + Matcher m2 = Util.matcher(currentLine, REGEXP1); + m2.find(); + int indent = m2.group(1).length(); + if (indent < column) { + break; + } else if (indent > column) { + throw syntaxError("invalid indent of mapping."); + } + value = m2.group(2); + } + return map; + } + + + private Object parseScalar(String value) { + Object data = createScalar(toScalar(value)); + getLine(); + return data; + } + + + private Object toScalar(String value) { + Matcher m; + if ((m = Util.matcher(value, "^\"(.*)\"([ \t]*#.*$)?")).find()) { + return m.group(1); + } else if ((m = Util.matcher(value, "^'(.*)'([ \t]*#.*$)?")).find()) { + return m.group(1); + } else if ((m = Util.matcher(value, "^(.*\\S)[ \t]*#")).find()) { + value = m.group(1); + } + + if (Util.matches(value, "^-?0x\\d+$")) { + return Integer.parseInt(value, 16); + } else if (Util.matches(value, "^-?0\\d+$")) { + return Integer.parseInt(value, 8); + } else if (Util.matches(value, "^-?\\d+$")) { + return Integer.parseInt(value, 10); + } else if (Util.matches(value, "^-?\\d+\\.\\d+$")) { + return Double.parseDouble(value); + } else if (Util.matches(value, "^(true|yes|on)$")) { + return Boolean.TRUE; + } else if (Util.matches(value, "^(false|no|off)$")) { + return Boolean.FALSE; + } else if (Util.matches(value, "^(null|~)$")){ + return null; + } else if (Util.matches(value, "^:(\\w+)$")) { + return value; + } else if ((m = Util.matcher(value, "^(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d)$")).find()) { + int year = Integer.parseInt(m.group(1)); + int month = Integer.parseInt(m.group(2)); + int day = Integer.parseInt(m.group(3)); + Calendar cal = Calendar.getInstance(); + cal.set(year, month, day, 0, 0, 0); + return cal.getTime(); + } else if ((m = Util.matcher(value, "^(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d)(?:[Tt]|[ \t]+)(\\d\\d?):(\\d\\d):(\\d\\d)(\\.\\d*)?(?:Z|[ \t]*([-+]\\d\\d?)(?::(\\d\\d))?)?$")).find()) { + int year = Integer.parseInt(m.group(1)); + int month = Integer.parseInt(m.group(2)); + int day = Integer.parseInt(m.group(3)); + int hour = Integer.parseInt(m.group(4)); + int min = Integer.parseInt(m.group(5)); + int sec = Integer.parseInt(m.group(6)); + + String timezone = "GMT" + m.group(8) + ":" + m.group(9); + Calendar cal = Calendar.getInstance(); + cal.set(year, month, day, hour, min, sec); + cal.setTimeZone(TimeZone.getTimeZone(timezone)); + return cal.getTime(); + } else { + return value; + } + } + +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Rule.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Rule.java new file mode 100644 index 0000000..8dbe0b7 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Rule.java @@ -0,0 +1,750 @@ +/* + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.io.Serializable; +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.regex.Pattern; +import java.util.regex.Matcher; +import java.util.regex.PatternSyntaxException; + +import org.onap.sdc.common.onaplog.Enums.LogLevel; +import org.onap.sdc.common.onaplog.OnapLoggerDebug; + +/** + * rule for validation. + * Validator class generates rule instances. + * + */ +public class Rule implements Serializable{ + private static final String RANGE1 = "/range"; + private static final String RANGE2 = "range:"; + private static final String ENUM_CONFLICT = "enum.conflict"; + private static final String MAP_CONFLICT = "map.conflict"; + private static final String LENGTH1 = "/length"; + private static final String LENGTH2 = "length:"; + private static final String LENGTH3 = "/length/"; + private static final String SEQ_CONFLICT = "seq.conflict"; + private static final String PATTERN1 = "pattern:"; + private static final String MAPPING1 = "mapping:"; + private static final String SEQUENCE1 = "/sequence"; + private static final String MAX_EX = "max-ex"; + private static final String MIN_EX = "min-ex"; + private static final String TYPE1 = "/type"; + private static final String TYPE_NOTSTR = "type.notstr"; + private static final String TYPE_UNKNOWN = "type.unknown"; + private static final String IDENT1 = "ident:"; + private static final String UNIQUE1 = "unique:"; + private static final String MAPPING2 = "/mapping"; + private static final String MAPPING3 = "/mapping/="; + private static final String MAPPING4 = "/mapping/"; + private static final String SEQUENCE2 = "sequence:"; + private static final String SCALAR_CONFLICT = "scalar.conflict"; + private static final String UNIQUE_NOTBOOL = "unique.notbool"; + private static final String UNIQUE_NOTSCALAR = "unique.notscalar"; + private static final String UNIQUE_ONROOT = "unique.onroot"; + private static final String UNIQUE2 = "/unique"; + private static final String IDENT_ONROOT = "ident.onroot"; + private static final String IDENT_NOTSCALAR = "ident.notscalar"; + private static final String IDENT_NOTMAP = "ident.notmap"; + private static final String MAP = "map"; + private static final String EMPTY_STRING = ""; + private static final String SLASH = "/"; + private static final String SCHEMA_NOTMAP = "schema.notmap"; + private static final String SCHEMA_NOTMAP1 = "schema.notmap: {}"; + private static final String PATTERN2 = "/pattern"; + private static final String PATTERN_NOTSTR = "pattern.notstr"; + private static final String PATTERN_NOTMATCH = "pattern.notmatch"; + private static final String REQUIRED_NOTBOOL = "required.notbool"; + private static final String REQUIRED1 = "/required"; + private static final String PATTERN_SYNTAXERR = "pattern.syntaxerr"; + private static final String PATTERN_SYNTAX_EXCEPTION = "PatternSyntaxException: {}"; + private static final String SEQUENCE_NOTSEQ = "sequence.notseq"; + private static final String SEQUENCE_NOELEM = "sequence.noelem"; + private static final String SEQUENCE_TOOMANY = "sequence.toomany"; + private static final String SEQUENCE3 = "/sequence/"; + private static final String MAPPING_NOTMAP = "mapping.notmap"; + private static final String MAPPING_NOELEM = "mapping.noelem"; + private static final String IDENT2 = "/ident"; + private static final String IDENT_NOTBOOL = "ident.notbool"; + private static final String LENGTH_MAXEXLEMINEX = "length.maxexleminex"; + private static final String LENGTH_MAXEXLEMIN = "length.maxexlemin"; + private static final String TWO_SPACES = " "; + private static final String NAME1 = "name: "; + private static final String DESC1 = "desc: "; + private static final String TYPE2 = "type: "; + private static final String REQUIRED2 = "required: "; + private static final String PATTERN3 = "pattern: "; + private static final String REGEXP = "regexp: "; + private static final String ASSERT1 = "assert: "; + private static final String IDENT3 = "ident: "; + private static final String UNIQUE3 = "unique: "; + private static final String ENUM2 = "enum:\n"; + private static final String RANGE3 = "range: { "; + private static final String NAME = "name"; + private static final String DESC = "desc"; + private static final String SHORT = "short"; + private static final String REQUIRED = "required"; + private static final String TYPE = "type"; + private static final String PATTERN = "pattern"; + private static final String SEQUENCE = "sequence"; + private static final String MAPPING = "mapping"; + private static final String ASSERT = "assert"; + private static final String RANGE = "range"; + private static final String LENGTH = "length"; + private static final String IDENT = "ident"; + private static final String UNIQUE = "unique"; + private static final String ENUM = "enum:"; + private static final String ENUM1 = "/enum"; + public static final String MAX = "max"; + public static final String MIN = "min"; + + private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance(); + + private Rule parent; + private String name = null; + private String desc = null; + private String _short = null; //added by jora: only used for map types + private boolean required = false; + private String _type = null; + private Class typeClass = null; + private String pattern = null; + private Pattern patternRegexp = null; + private List enumList = null; + private List sequence = null; + private DefaultableHashMap _mapping = null; + private String _assert = null; + private Map<String,Object> range = null; + private Map<String,Integer> length = null; + private boolean ident = false; + private boolean unique = false; + + private static final int CODE_NAME = NAME.hashCode(); + private static final int CODE_DESC = DESC.hashCode(); + private static final int CODE_SHORT = SHORT.hashCode(); + private static final int CODE_REQUIRED = REQUIRED.hashCode(); + private static final int CODE_TYPE = TYPE.hashCode(); + private static final int CODE_PATTERN = PATTERN.hashCode(); + private static final int CODE_LENGTH = LENGTH.hashCode(); + private static final int CODE_RANGE = RANGE.hashCode(); + private static final int CODE_ASSERT = ASSERT.hashCode(); + private static final int CODE_IDENT = IDENT.hashCode(); + private static final int CODE_UNIQUE = UNIQUE.hashCode(); + private static final int CODE_ENUM = ENUM.hashCode(); + private static final int CODE_MAPPING = MAPPING.hashCode(); + private static final int CODE_SEQUENCE = SEQUENCE.hashCode(); + + public Rule(Object schema, Rule parent) { + if (schema != null) { + if (! (schema instanceof Map)) { + throw schemaError(SCHEMA_NOTMAP, null, SLASH, null, null); + } + Map ruleTable = new IdentityHashMap(); + init((Map)schema, EMPTY_STRING, ruleTable); + } + this.parent = parent; + } + + public Rule(Object schema) { + this(schema, null); + } + + public Rule(Map schema, Rule parent) { + if (schema != null) { + Map ruleTable = new IdentityHashMap(); + init(schema, EMPTY_STRING, ruleTable); + } + this.parent = parent; + } + + public Rule(Map schema) { + this(schema, null); + } + + public Rule() { + this(null, null); + } + + public String getName() { return name; } + public void setName(String name) { this.name = name; } + + public String getShort() { return _short; } + public void setShort(String key) { _short = key; } + + public boolean isRequired() { return required; } + public void setRequired(boolean required) { this.required = required; } + + public String getType() { return _type; } + public void setType(String type) { this._type = type; } + + public String getPattern() { return pattern; } + public void setPattern(String pattern) { this.pattern = pattern; } + + public Pattern getPatternRegexp() { return patternRegexp; } + + public List getEnum() { return enumList; } + public void setEnum(List enumList) { this.enumList = enumList; } + + public List getSequence() { return sequence; } + public void setSequence(List sequence) { this.sequence = sequence; } + + public DefaultableHashMap getMapping() { return _mapping; } + public void setMapping(DefaultableHashMap mapping) { _mapping = mapping; } + + public String getAssert() { return _assert; } + public void setAssert(String assertString) { _assert = assertString; } + + public Map getRange() { return range; } + public void setRange(Map range) { this.range = range; } + + public Map getLength() { return length; } + public void setLength(Map length) { this.length = length; } + + public boolean isIdent() { return ident; } + + public boolean isUnique() { return unique; } + public void setUnique(boolean unique) { this.unique = unique; } + + private static SchemaException schemaError(String errorSymbol, Rule rule, String path, Object value, Object[] args) { + String msg = Messages.buildMessage(errorSymbol, value, args); + return new SchemaException(msg, path, value, rule); + } + + private void init(Object elem, String path, Map ruleTable) { + assert elem != null; + if (! (elem instanceof Map)) { + if (path == null || path.isEmpty()) { + path = SLASH; + } + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), SCHEMA_NOTMAP1, elem); + throw schemaError(SCHEMA_NOTMAP, null, path, null, null); + } + init((Map)elem, path, ruleTable); + } + + private void init(Map hash, String path, Map ruleTable) { + Rule rule = this; + ruleTable.put(hash, rule); + + // 'type:' entry + Object type = hash.get(TYPE); + initTypeValue(type, rule, path); + + // other entries + for (Iterator it = hash.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Object value = hash.get(key); + int code = key.hashCode(); + + if (code == CODE_TYPE && key.equals(TYPE)) { + // done + } else if (code == CODE_NAME && key.equals(NAME)) { + initNameValue(value); + } else if (code == CODE_DESC && key.equals(DESC)) { + initDescValue(value); + } else if (code == CODE_SHORT && key.equals(SHORT)) { + initShortValue(value, rule, path); + } else if (code == CODE_REQUIRED && key.equals(REQUIRED)) { + initRequiredValue(value, rule, path); + } else if (code == CODE_PATTERN && key.equals(PATTERN)) { + initPatternValue(value, rule, path); + } else if (code == CODE_ENUM && key.equals(ENUM)) { + initEnumValue(value, rule, path); + } else if (code == CODE_ASSERT && key.equals(ASSERT)) { + initAssertValue(value, rule, path); + } else if (code == CODE_RANGE && key.equals(RANGE)) { + initRangeValue(value, rule, path); + } else if (code == CODE_LENGTH && key.equals(LENGTH)) { + initLengthValue(value, rule, path); + } else if (code == CODE_IDENT && key.equals(IDENT)) { + initIdentValue(value, rule, path); + } else if (code == CODE_UNIQUE && key.equals(UNIQUE)) { + initUniqueValue(value, rule, path); + } else if (code == CODE_SEQUENCE && key.equals(SEQUENCE)) { + rule = initSequenceValue(value, rule, path, ruleTable); + } else if (code == CODE_MAPPING && key.equals(MAPPING)) { + rule = initMappingValue(value, rule, path, ruleTable); + } + } + + checkConfliction(hash, rule, path); + } + + private void initTypeValue(Object value, Rule rule, String path) { + if (value == null) { + value = Types.getDefaultType(); + } + if (! (value instanceof String)) { + throw schemaError(TYPE_NOTSTR, rule, path + TYPE1, _type, null); + } + _type = (String)value; + typeClass = Types.typeClass(_type); + if (! Types.isBuiltinType(_type)) { + throw schemaError(TYPE_UNKNOWN, rule, path + TYPE1, _type, null); + } + } + + + private void initNameValue(Object value) { + name = value.toString(); + } + + + private void initDescValue(Object value) { + desc = value.toString(); + } + + private void initShortValue(Object value, Rule rule, String path) { + + //the short form specification is to be interpreted as key if the type is a map or as an + //index if the target is a sequence (as index 0 actually) + if (!Types.isCollectionType(_type)) { + throw schemaError("range.notcollection", rule, path + "/short", value, null); + } + //we should also verify that it points to a declared key of the mapping .. not really, as it would + //fail the overall grammar + _short = value.toString(); + } + + private void initRequiredValue(Object value, Rule rule, String path) { + if (! (value instanceof Boolean)) { + throw schemaError(REQUIRED_NOTBOOL, rule, path + REQUIRED1, value, null); + } + required = (Boolean) value; + } + + + private void initPatternValue(Object value, Rule rule, String path) { + if (! (value instanceof String)) { + throw schemaError(PATTERN_NOTSTR, rule, path + PATTERN2, value, null); + } + pattern = (String)value; + Matcher m = Util.matcher(pattern, "\\A/(.*)/([mi]?[mi]?)\\z"); + if (! m.find()) { + throw schemaError(PATTERN_NOTMATCH, rule, path + PATTERN2, value, null); + } + String pat = m.group(1); + String opt = m.group(2); + int flag = 0; + if (opt.indexOf('i') >= 0) { + flag += Pattern.CASE_INSENSITIVE; + } + if (opt.indexOf('m') >= 0) { + flag += Pattern.DOTALL; // not MULTILINE + } + try { + patternRegexp = Pattern.compile(pat, flag); + } catch (PatternSyntaxException ex) { + debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), PATTERN_SYNTAX_EXCEPTION, ex); + throw schemaError(PATTERN_SYNTAXERR, rule, path + PATTERN2, value, null); + } + } + + + private void initEnumValue(Object value, Rule rule, String path) { + if (! (value instanceof List)) { + throw schemaError("enum.notseq", rule, path + ENUM1, value, null); + } + enumList = (List)value; + if (Types.isCollectionType(_type)) { + throw schemaError("enum.notscalar", rule, path, ENUM, null); + } + Map elemTable = new HashMap(); + for (Iterator it = enumList.iterator(); it.hasNext(); ) { + Object elem = it.next(); + if (! Util.isInstanceOf(elem, typeClass)) { + throw schemaError("enum.type.unmatch", rule, path + ENUM1, elem, new Object[] { Types.typeName(_type) }); + } + if (elemTable.containsKey(elem)) { + throw schemaError("enum.duplicate", rule, path + ENUM1, elem, null); + } + elemTable.put(elem, Boolean.TRUE); + } + } + + + private void initAssertValue(Object value, Rule rule, String path) { + if (! (value instanceof String)) { + throw schemaError("assert.notstr", rule, path + "/assert", value, null); + } + _assert = (String)value; + if (! Util.matches(_assert, "\\bval\\b")) { + throw schemaError("assert.noval", rule, path + "/assert", value, null); + } + } + + + private void initRangeValue(Object value, Rule rule, String path) { + if (! (value instanceof Map)) { + throw schemaError("range.notmap", rule, path + RANGE1, value, null); + } + if (Types.isCollectionType(_type) || "bool".equals(_type)) { + throw schemaError("range.notscalar", rule, path, RANGE2, null); + } + range = (Map)value; + for (Iterator it = range.keySet().iterator(); it.hasNext(); ) { + Object rkey = it.next(); + Object rval = range.get(rkey); + if (MAX.equals(rkey) || MIN.equals(rkey) || rkey.equals(MAX_EX) || rkey.equals(MIN_EX)) { + if (! Util.isInstanceOf(rval, typeClass)) { + String typename = Types.typeName(_type); + throw schemaError("range.type.unmatch", rule, path + "/range/" + rkey, rval, new Object[] { typename }); + } + } else { + throw schemaError("range.undefined", rule, path + "/range/" + rkey, rkey.toString() + ":", null); + } + } + if (range.containsKey(MAX) && range.containsKey(MAX_EX)) { + throw schemaError("range.twomax", rule, path + RANGE1, null, null); + } + if (range.containsKey(MIN) && range.containsKey(MIN_EX)) { + throw schemaError("range.twomin", rule, path + RANGE1, null, null); + } + // + Object max = range.get(MAX); + Object min = range.get(MIN); + Object maxEx = range.get(MAX_EX); + Object minEx = range.get(MIN_EX); + Object[] args; + + if (max != null) { + if (min != null && Util.compareValues(max, min) < 0) { + args = new Object[] { max, min }; + throw schemaError("range.maxltmin", rule, path + RANGE1, null, args); + } else if (minEx != null && Util.compareValues(max, minEx) <= 0) { + args = new Object[] { max, minEx }; + throw schemaError("range.maxleminex", rule, path + RANGE1, null, args); + } + } else if (maxEx != null) { + if (min != null && Util.compareValues(maxEx, min) <= 0) { + args = new Object[] { maxEx, min }; + throw schemaError("range.maxexlemin", rule, path + RANGE1, null, args); + } else if (minEx != null && Util.compareValues(maxEx, minEx) <= 0) { + args = new Object[] { maxEx, minEx }; + throw schemaError("range.maxexleminex", rule, path + RANGE1, null, args); + } + } + } + + + private void initLengthValue(Object value, Rule rule, String path) { + if (! (value instanceof Map)) { + throw schemaError("length.notmap", rule, path + LENGTH1, value, null); + } + length = (Map)value; + if (! ("str".equals(_type) || "text".equals(_type))) { + throw schemaError("length.nottext", rule, path, LENGTH2, null); + } + for (String k : length.keySet()) { + Integer v = length.get(k); + if (MAX.equals(k) || MIN.equals(k) || k.equals(MAX_EX) || k.equals(MIN_EX)) { + if (v != null) { + throw schemaError("length.notint", rule, path + LENGTH3 + k, v, null); + } + } else { + throw schemaError("length.undefined", rule, path + LENGTH3 + k, k + ":", null); + } + } + if (length.containsKey(MAX) && length.containsKey(MAX_EX)) { + throw schemaError("length.twomax", rule, path + LENGTH1, null, null); + } + if (length.containsKey(MIN) && length.containsKey(MIN_EX)) { + throw schemaError("length.twomin", rule, path + LENGTH1, null, null); + } + + Integer max = length.get(MAX); + Integer min = length.get(MIN); + Integer maxEx = length.get(MAX_EX); + Integer minEx = length.get(MIN_EX); + Object[] args; + + if (max != null) { + if (min != null && max.compareTo(min) < 0) { + args = new Object[] { max, min }; + throw schemaError("length.maxltmin", rule, path + LENGTH1, null, args); + } else if (minEx != null && max.compareTo(minEx) <= 0) { + args = new Object[] { max, minEx }; + throw schemaError("length.maxleminex", rule, path + LENGTH1, null, args); + } + } else if (maxEx != null) { + if (min != null && maxEx.compareTo(min) <= 0) { + args = new Object[] { maxEx, min }; + throw schemaError(LENGTH_MAXEXLEMIN, rule, path + LENGTH1, null, args); + } else if (minEx != null && maxEx.compareTo(minEx) <= 0) { + args = new Object[] { maxEx, minEx }; + throw schemaError(LENGTH_MAXEXLEMINEX, rule, path + LENGTH1, null, args); + } + } + } + + private void initIdentValue(Object value, Rule rule, String path) { + if (value == null || ! (value instanceof Boolean)) { + throw schemaError(IDENT_NOTBOOL, rule, path + IDENT2, value, null); + } + ident = (Boolean) value; + required = true; + if (Types.isCollectionType(_type)) { + throw schemaError(IDENT_NOTSCALAR, rule, path, IDENT1, null); + } + if (EMPTY_STRING.equals(path)) { + throw schemaError(IDENT_ONROOT, rule, SLASH, IDENT1, null); + } + if (parent == null || ! MAP.equals(parent.getType())) { + throw schemaError(IDENT_NOTMAP, rule, path, IDENT1, null); + } + } + + + private void initUniqueValue(Object value, Rule rule, String path) { + if (! (value instanceof Boolean)) { + throw schemaError(UNIQUE_NOTBOOL, rule, path + UNIQUE2, value, null); + } + unique = (Boolean) value; + if (Types.isCollectionType(_type)) { + throw schemaError(UNIQUE_NOTSCALAR, rule, path, UNIQUE1, null); + } + if (path.equals(EMPTY_STRING)) { + throw schemaError(UNIQUE_ONROOT, rule, SLASH, UNIQUE1, null); + } + } + + + private Rule initSequenceValue(Object value, Rule rule, String path, Map ruleTable) { + if (value != null && ! (value instanceof List)) { + throw schemaError(SEQUENCE_NOTSEQ, rule, path + SEQUENCE1, value.toString(), null); + } + sequence = (List)value; + if (sequence == null || sequence.isEmpty()) { + throw schemaError(SEQUENCE_NOELEM, rule, path + SEQUENCE1, value, null); + } + if (sequence.size() > 1) { + throw schemaError(SEQUENCE_TOOMANY, rule, path + SEQUENCE1, value, null); + } + Object elem = sequence.get(0); + if (elem == null) { + elem = new HashMap(); + } + int i = 0; + rule = (Rule)ruleTable.get(elem); + if (rule == null) { + rule = new Rule(null, this); + rule.init(elem, path + SEQUENCE3 + i, ruleTable); + } + sequence = new ArrayList(); + sequence.add(rule); + return rule; + } + + + private Rule initMappingValue(Object value, Rule rule, String path, Map ruleTable) { + // error check + if (value != null && !(value instanceof Map)) { + throw schemaError(MAPPING_NOTMAP, rule, path + MAPPING2, value.toString(), null); + } + Object defaultValue = null; + if (value instanceof Defaultable) { + defaultValue = ((Defaultable)value).getDefault(); + } + if (value == null || ((Map)value).size() == 0 && defaultValue == null) { + throw schemaError(MAPPING_NOELEM, rule, path + MAPPING2, value, null); + } + // create hash of rule + _mapping = new DefaultableHashMap(); + if (defaultValue != null) { + rule = (Rule)ruleTable.get(defaultValue); + if (rule == null) { + rule = new Rule(null, this); + rule.init(defaultValue, path + MAPPING3, ruleTable); + } + _mapping.setDefault(rule); + } + // put rules into _mapping + Map map = (Map)value; + for (Iterator it = map.keySet().iterator(); it.hasNext(); ) { + Object k = it.next(); + Object v = map.get(k); // DefaultableHashMap + if (v == null) { + v = new DefaultableHashMap(); + } + rule = (Rule)ruleTable.get(v); + if (rule == null) { + rule = new Rule(null, this); + rule.init(v, path + MAPPING4 + k, ruleTable); + } + if ("=".equals(k)) { + _mapping.setDefault(rule); + } else { + _mapping.put(k, rule); + } + } + return rule; + } + + + private void checkConfliction(Map hash, Rule rule, String path) { + if ("seq".equals(_type)) { + if (! hash.containsKey(SEQUENCE)) { + throw schemaError("seq.nosequence", rule, path, null, null); + } + if (enumList != null) { + throw schemaError(SEQ_CONFLICT, rule, path, ENUM, null); + } + if (pattern != null) { + throw schemaError(SEQ_CONFLICT, rule, path, PATTERN1, null); + } + if (_mapping != null) { + throw schemaError(SEQ_CONFLICT, rule, path, MAPPING1, null); + } + if (range != null) { + throw schemaError(SEQ_CONFLICT, rule, path, RANGE2, null); + } + if (length != null) { + throw schemaError(SEQ_CONFLICT, rule, path, LENGTH2, null); + } + } else if (_type.equals(MAP)) { + if (! hash.containsKey(MAPPING)) { + throw schemaError("map.nomapping", rule, path, null, null); + } + if (enumList != null) { + throw schemaError(MAP_CONFLICT, rule, path, ENUM, null); + } + if (pattern != null) { + throw schemaError(MAP_CONFLICT, rule, path, PATTERN1, null); + } + if (sequence != null) { + throw schemaError(MAP_CONFLICT, rule, path, SEQUENCE2, null); + } + if (range != null) { + throw schemaError(MAP_CONFLICT, rule, path, RANGE2, null); + } + if (length != null) { + throw schemaError(MAP_CONFLICT, rule, path, LENGTH2, null); + } + } else { + if (sequence != null) { + throw schemaError(SCALAR_CONFLICT, rule, path, SEQUENCE2, null); + } + if (_mapping != null) { + throw schemaError(SCALAR_CONFLICT, rule, path, MAPPING1, null); + } + if (enumList != null) { + if (range != null) { + throw schemaError(ENUM_CONFLICT, rule, path, RANGE2, null); + } + if (length != null) { + throw schemaError(ENUM_CONFLICT, rule, path, LENGTH2, null); + } + if (pattern != null) { + throw schemaError(ENUM_CONFLICT, rule, path, PATTERN1, null); + } + } + } + } + + public String inspect() { + StringBuilder sb = new StringBuilder(); + int level = 0; + Map done = new IdentityHashMap(); + inspect(sb, level, done); + return sb.toString(); + } + + private void inspect(StringBuilder sb, int level, Map done) { + done.put(this, Boolean.TRUE); + String indent = Util.repeatString(TWO_SPACES, level); + if (name != null) { + sb.append(indent).append(NAME1).append(name).append("\n"); + } + if (desc != null) { + sb.append(indent).append(DESC1).append(desc).append("\n"); + } + if (_type != null) { + sb.append(indent).append(TYPE2).append(_type).append("\n"); + } + if (required) { + sb.append(indent).append(REQUIRED2).append(required).append("\n"); + } + if (pattern != null) { + sb.append(indent).append(PATTERN3).append(pattern).append("\n"); + } + if (patternRegexp != null) { + sb.append(indent).append(REGEXP).append(patternRegexp).append("\n"); + } + if (_assert != null) { + sb.append(indent).append(ASSERT1).append(_assert).append("\n"); + } + if (ident) { + sb.append(indent).append(IDENT3).append(ident).append("\n"); + } + if (unique) { + sb.append(indent).append(UNIQUE3).append(unique).append("\n"); + } + if (enumList != null) { + appendEnums(sb, indent); + } + if (range != null) { + appendRange(sb, indent); + } + if (sequence != null) { + appendSequence(sb, level, done, indent); + } + if (_mapping != null) { + appendMapping(sb, level, done, indent); + } + } + + private void appendEnums(StringBuilder sb, String indent) { + sb.append(indent).append(ENUM2); + for (Object anEnumList : enumList) { + sb.append(indent).append(" - ").append(anEnumList.toString()).append("\n"); + } + } + + private void appendMapping(StringBuilder sb, int level, Map done, String indent) { + for (Object o : _mapping.entrySet()) { + Map.Entry entry = (Map.Entry) o; + Object key = entry.getKey(); + Rule rule = (Rule) entry.getValue(); + sb.append(indent).append(" ").append(Util.inspect(key)); + if (done.containsKey(rule)) { + sb.append(": ...\n"); + } else { + sb.append(":\n"); + rule.inspect(sb, level + 2, done); + } + } + } + + private void appendSequence(StringBuilder sb, int level, Map done, String indent) { + for (Object aSequence : sequence) { + Rule rule = (Rule) aSequence; + if (done.containsKey(rule)) { + sb.append(indent).append(" ").append("- ...\n"); + } else { + sb.append(indent).append(" ").append("- \n"); + rule.inspect(sb, level + 2, done); + } + } + } + + private void appendRange(StringBuilder sb, String indent) { + sb.append(indent).append(RANGE3); + String[] keys = new String[] {MAX, MAX_EX, MIN, MIN_EX, }; + String colon = EMPTY_STRING; + for (String key : keys) { + Object val = range.get(key); + if (val != null) { + sb.append(colon).append(key).append(": ").append(val); + colon = ", "; + } + } + sb.append(" }\n"); + } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/SchemaException.java b/dcaedt_validator/kwalify/src/main/java/kwalify/SchemaException.java new file mode 100644 index 0000000..70dafe9 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/SchemaException.java @@ -0,0 +1,17 @@ +/* + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown by Rule constructor + */ +public class SchemaException extends BaseException { + private static final long serialVersionUID = 4750598728284538818L; + + public SchemaException(String message, String ypath, Object value, Rule rule) { + super(message, ypath, value, rule); + } + +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/SyntaxException.java b/dcaedt_validator/kwalify/src/main/java/kwalify/SyntaxException.java new file mode 100644 index 0000000..8c36b66 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/SyntaxException.java @@ -0,0 +1,28 @@ +/* + * @(#)SyntaxException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown by parser when syntax is wrong. + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + * @see Parser, YamlSyntaxException + */ +public class SyntaxException extends KwalifyException { + private static final long serialVersionUID = 2480059811372002740L; + + private int _linenum; + + public SyntaxException(String message, int linenum) { + super(message); + _linenum = linenum; + } + + public int getLineNumber() { return _linenum; } + public void setLineNumber(int linenum) { _linenum = linenum; } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Types.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Types.java new file mode 100644 index 0000000..cce8e1e --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Types.java @@ -0,0 +1,105 @@ +/* + * @(#)Types.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.Map; +import java.util.HashMap; +import java.util.List; +import java.util.Date; + +/** + * utility methods for type (str, int, ...). + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class Types { + + public static Class typeClass(String type) { + return (Class)__type_classes.get(type); + } + + public static String typeName(String type) { + String name = (String)__type_names.get(type); + if (name == null) name = type; + return name; + } + + public static final String DEFAULT_TYPE = "str"; + + public static String getDefaultType() { return DEFAULT_TYPE; } + + private static Map __type_classes; + private static Map __type_names; + static { + // + __type_classes = new HashMap(); + __type_classes.put("str", String.class); + __type_classes.put("int", Integer.class); + __type_classes.put("float", Double.class); + __type_classes.put("number", Number.class); + __type_classes.put("text", null); + __type_classes.put("bool", Boolean.class); + __type_classes.put("map", Map.class); + __type_classes.put("seq", List.class); + __type_classes.put("timestamp", Date.class); + __type_classes.put("date", Date.class); + __type_classes.put("symbol", String.class); + __type_classes.put("scalar", null); + __type_classes.put("any", Object.class); + //__type_classes.put("null", null); + + // + __type_names = new HashMap(); + __type_names.put("map", "mapping"); + __type_names.put("seq", "sequence"); + __type_names.put("str", "string"); + __type_names.put("int", "integer"); + __type_names.put("bool", "boolean"); + } + + + public static boolean isBuiltinType(String type) { + return __type_classes.containsKey(type); + } + + public static boolean isCollectionType(String type) { + return type.equals("map") || type.equals("seq"); + } + + public static boolean isMapType(String type) { + return type.equals("map"); + } + + public static boolean isScalarType(String type) { + return !isCollectionType(type); + } + + public static boolean isCollection(Object obj) { + return obj instanceof Map || obj instanceof List; + } + + public static boolean isScalar(Object obj) { + return !isCollection(obj); + } + + public static boolean isCorrectType(Object obj, String type) { + Class type_class = typeClass(type); + if (type_class != null) { + return type_class.isInstance(obj); + } + if (type.equals("null")) { + return obj == null; + } else if (type.equals("text")) { + return obj instanceof String || obj instanceof Number; + } else if (type.equals("scalar")) { + return obj instanceof Number || obj instanceof String || obj instanceof Boolean || obj instanceof Date; + } + return false; + } + +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Util.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Util.java new file mode 100644 index 0000000..da34087 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Util.java @@ -0,0 +1,456 @@ +/* + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.Collections; +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.regex.Pattern; +import java.util.regex.Matcher; +import java.util.Date; +import java.io.Reader; +import java.io.InputStreamReader; +import java.io.InputStream; +import java.io.FileInputStream; +import java.io.IOException; + +public class Util { + private static final int VALUE_INTEGER = 1; + private static final int VALUE_DOUBLE = 2; + private static final int VALUE_STRING = 4; + private static final int VALUE_BOOLEAN = 8; + private static final int VALUE_DATE = 16; + private static final int VALUE_OBJECT = 32; + private static HashMap<String,Pattern> __patterns = new HashMap<>(); + + private Util(){ + // You shouldn't instantiate this class + } + + /** + * inspect List or Map + */ + public static String inspect(Object obj) { + StringBuilder sb = new StringBuilder(); + inspect(obj, sb, null); + return sb.toString(); + } + + private static void inspect(Object obj, StringBuilder sb, IdentityHashMap done) { + if (obj == null) { + sb.append("nil"); // null? + } else if (obj instanceof String) { + inspect((String)obj, sb); + } else if (obj instanceof IdentityHashMap) { + if (done == null) { + done = new IdentityHashMap(); + } + if (done.containsKey(obj)) { + sb.append("{...}"); + } else { + done.put(obj, Boolean.TRUE); + inspect((Map)obj, sb, done); + } + } else if (obj instanceof List) { + if (done == null) { + done = new IdentityHashMap(); + } + if (done.containsKey(obj)) { + sb.append("[...]"); + } else { + done.put(obj, Boolean.TRUE); + inspect((List)obj, sb); + } + } else { + sb.append(obj.toString()); + } + } + + private static void inspect(Map map, StringBuilder sb, IdentityHashMap done) { + sb.append('{'); + List list = new ArrayList(map.keySet()); + Collections.sort(list); + int i = 0; + for (Iterator it = list.iterator(); it.hasNext(); i++) { + Object key = it.next(); + Object value = map.get(key); + if (i > 0) { + sb.append(", "); + } + inspect(key, sb, done); + sb.append("=>"); + inspect(value, sb, done); + } + sb.append('}'); + } + + private static void inspect(List list, StringBuilder sb) { + sb.append('['); + int i = 0; + for (Iterator it = list.iterator(); it.hasNext(); i++) { + if (i > 0) { + sb.append(", "); + } + Object item = it.next(); + inspect(item, sb, null); + } + sb.append(']'); + } + + private static void inspect(String str, StringBuilder sb) { + sb.append('"'); + for (int i = 0; i < str.length(); i++) { + char ch = str.charAt(i); + switch (ch) { + case '"': + sb.append("\\\""); + break; + case '\n': + sb.append("\\n"); + break; + case '\r': + sb.append("\\r"); + break; + case '\t': + sb.append("\\t"); + break; + default: + sb.append(ch); + break; + } + } + sb.append('"'); + } + + /** + * match pattern and return Mather object. + * + * ex. + * <pre> + * String target = " name = foo\n mail = foo@mail.com\m"; + * Matcher m = Util.matcher(target, "^\\s*(\\w+)\\s*=\\s*(.*)$"); + * while (m.find()) { + * String key = m.group(1); + * String value = m.gropu(2); + * } + * </pre> + */ + public static Matcher matcher(String target, String regexp) { + Pattern pat = __patterns.get(regexp); + if (pat == null) { + pat = Pattern.compile(regexp); + __patterns.put(regexp, pat); + } + return pat.matcher(target); + } + + /** + * return if pattern matched or not. + * + * ex. + * <pre> + * String target = " name = foo\n"; + * if (Util.matches(target, "^\\s*(\\w+)\\s*=\\s*(.*)$")) { + * System.out.println("matched."); + * } + * </pre> + */ + public static boolean matches(String target, String regexp) { + Matcher m = matcher(target, regexp); + return m.find(); + } + + + public static boolean matches(String target, Pattern regexp) { + Matcher m = regexp.matcher(target); + return m.find(); + } + + /** + * split string into list of line + */ + public static List toListOfLines(String str) { + List<String> list = new ArrayList<>(); + int len = str.length(); + int head = 0; + for (int i = 0; i < len; i++) { + char ch = str.charAt(i); + if (ch == '\n') { + int tail = i + 1; + String line = str.substring(head, tail); + list.add(line); + head = tail; + } + } + if (head != len) { + String line = str.substring(head, len); + list.add(line); + } + return list; + } + + /** + * return true if 'instance' is an instance of 'klass' + */ + public static boolean isInstanceOf(Object instance, Class klass) { + if (instance == null || klass == null) { + return false; + } + Class c = instance.getClass(); + if (klass.isInterface()) { + while (c != null) { + Class[] interfaces = c.getInterfaces(); + for (Class anInterface : interfaces) { + if (anInterface == klass) { + return true; + } + } + c = c.getSuperclass(); + } + } else { + while (c != null) { + if (c == klass) { + return true; + } + c = c.getSuperclass(); + } + } + return false; + } + + + /** + * read file content with default encoding of system + */ + public static String readFile(String filename) throws IOException { + String charset = System.getProperty("file.encoding"); + return readFile(filename, charset); + } + + + /** + * read file content with specified encoding + */ + private static String readFile(String filename, String encoding) throws IOException { + String content; + try (InputStream stream = new FileInputStream(filename)){ + content = readInputStream(stream, encoding); + } + return content; + } + + public static String readInputStream(InputStream stream) throws IOException { + String encoding = System.getProperty("file.encoding"); + return readInputStream(stream, encoding); + } + + private static String readInputStream(InputStream stream, String encoding) throws IOException { + String content; + try (Reader reader = new InputStreamReader(stream, encoding)){ + StringBuilder sb = new StringBuilder(); + int ch; + while ((ch = reader.read()) >= 0) { + sb.append((char)ch); + } + content = sb.toString(); + } + return content; + } + + public static String untabify(CharSequence str) { + int tabWidth = 8; + StringBuilder sb = new StringBuilder(); + int len = str.length(); + int col = -1; + for (int i = 0; i < len; i++) { + col = ++col % tabWidth; + char ch = str.charAt(i); + + switch (ch) { + case '\t': + appendTabAsSpaces(tabWidth, sb, col); + col = -1; + break; + case '\n': + sb.append(ch); + col = -1; + break; + default: + sb.append(ch); + } + } + return sb.toString(); + } + + private static void appendTabAsSpaces(int tabWidth, StringBuilder sb, int col) { + int n = tabWidth - col; + while (--n >= 0) { + sb.append(' '); + } + } + + public static int compareValues(Object value1, Object value2) { + int vtype = (valueType(value1) << 8) | valueType(value2); + switch (vtype) { + case (VALUE_INTEGER << 8) | VALUE_INTEGER : + return ((Integer)value1).compareTo((Integer)value2); + case (VALUE_DOUBLE << 8) | VALUE_DOUBLE : + return ((Double)value1).compareTo((Double)value2); + case (VALUE_STRING << 8) | VALUE_STRING : + return ((String)value1).compareTo((String)value2); + case (VALUE_BOOLEAN << 8) | VALUE_BOOLEAN : + boolean b1 = (Boolean) value1; + boolean b2 = (Boolean) value2; + int ret = b1 ? 1 : -1; + return (b1 == b2) ? 0 : ret; + case (VALUE_DATE << 8) | VALUE_DATE : + return ((Date)value1).compareTo((Date)value2); + case (VALUE_DOUBLE << 8) | VALUE_INTEGER : + case (VALUE_INTEGER << 8) | VALUE_DOUBLE : + double d1 = ((Number)value1).doubleValue(); + double d2 = ((Number)value2).doubleValue(); + return Double.compare(d1, d2); + default: + throw new InvalidTypeException("cannot compare '" + value1.getClass().getName() + "' with '" + value2.getClass().getName()); + } + } + + private static int valueType(Object value) { + if (value instanceof Integer) { + return VALUE_INTEGER; + } + + if (value instanceof Double) { + return VALUE_DOUBLE; + } + + if (value instanceof String) { + return VALUE_STRING; + } + + if (value instanceof Boolean) { + return VALUE_BOOLEAN; + } + + if (value instanceof Date) { + return VALUE_DATE; + } + + return VALUE_OBJECT; + } + + public static String repeatString(String str, int times) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < times; i++) { + sb.append(str); + } + return sb.toString(); + } + + /** + * parse command-line options. + * + * ex. + * <pre> + * public static void main(String[] arg) { + * String singles = "hv"; // options which takes no argument. + * String requireds = "fI"; // options which requires an argument. + * String optionals = "i"; // options which can take optional argument. + * try { + * Object[] ret = parseCommandOptions(args, singles, requireds, optionals); + * Map options = (Map)ret[0]; + * Map properties = (Map)ret[1]; + * String[] filenames = (String[])ret[2]; + * //... + * } catch (CommandOptionException ex) { + * char option = ex.getOption(); + * String error_symbol = ex.getErrorSymbol(); + * Systen.err.println("*** error: " + ex.getMessage()); + * } + * } + * </pre> + * + * @param args command-line strings + * @param singles options which takes no argument + * @param requireds options which requires an argument. + * @param optionals otpions which can take optional argument. + * @return array of options(Map), properties(Map), and filenames(String[]) + */ + public static Object[] parseCommandOptions(String[] args, String singles, String requireds, String optionals) throws CommandOptionException { + Map<String, Object> options = new HashMap<>(); + Map<String, Object> properties = new HashMap<>(); + String[] filenames; + + int i; + for (i = 0; i < args.length; i++) { + if (args[i].length() == 0 || args[i].charAt(0) != '-') { + break; + } + String opt = args[i]; + int len = opt.length(); + if (len == 1) { // option '-' means "don't parse arguments!" + i++; + break; + } + assert len > 1; + if (opt.charAt(1) == '-') { // properties (--pname=pvalue) + String pname; + Object pvalue; + int idx = opt.indexOf('='); + if (idx >= 0) { + pname = opt.substring(2, idx); + pvalue = idx + 1 < opt.length() ? opt.substring(idx + 1) : ""; + } else { + pname = opt.substring(2); + pvalue = Boolean.TRUE; + } + properties.put(pname, pvalue); + } else { // command-line options + for (int j = 1; j < len; j++) { + char ch = opt.charAt(j); + String chstr = Character.toString(ch); + if (singles != null && singles.indexOf(ch) >= 0) { + options.put(chstr, Boolean.TRUE); + } else if (requireds != null && requireds.indexOf(ch) >= 0) { + String arg = null; + if (++j < len) { + arg = opt.substring(j); + } else if (++i < args.length) { + arg = args[i]; + } else { + throw new CommandOptionException("-" + ch + ": filename required.", ch, "command.option.noarg"); + } + options.put(chstr, arg); + break; + } else if (optionals != null && optionals.indexOf(ch) >= 0) { + Object arg; + if (++j < len) { + arg = opt.substring(j); + } else { + arg = Boolean.TRUE; + } + options.put(chstr, arg); + break; + } else { + throw new CommandOptionException("-" + ch + "invalid option.", ch, "command.option.invalid"); + } + } + } + } + + assert i <= args.length; + int n = args.length - i; + filenames = new String[n]; + for (int j = 0; i < args.length; i++, j++) { + filenames[j] = args[i]; + } + + return new Object[] { options, properties, filenames }; + } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/ValidationException.java b/dcaedt_validator/kwalify/src/main/java/kwalify/ValidationException.java new file mode 100644 index 0000000..911f9e1 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/ValidationException.java @@ -0,0 +1,15 @@ +/* + * @(#)ValidationException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +public class ValidationException extends BaseException { + private static final long serialVersionUID = -2991121377463453973L; + + public ValidationException(String message, String path, Object value, Rule rule) { + super(message, path, value, rule); + } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/Validator.java b/dcaedt_validator/kwalify/src/main/java/kwalify/Validator.java new file mode 100644 index 0000000..e0f5af0 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/Validator.java @@ -0,0 +1,382 @@ +/* + * @(#)Validator.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.Map; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.LinkedList; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.Collections; + +/** + * validation engine + * + * ex. + * <pre> + * + * // load YAML document + * String str = Util.readFile("document.yaml"); + * YamlParser parser = new YamlParser(str); + * Object document = parser.parse(); + * + * // load schema + * Object schema = YamlUtil.loadFile("schema.yaml"); + * + * // generate validator and validate document + * Validator validator = new Validator(shema); + * List errors = validator.validate(document); + * + * // show errors + * if (errors != null && errors.size() > 0) { + * parser.setErrorsLineNumber(errors); + * java.util.Collections.sort(errors); + * for (Iterator it = errors.iterator(); it.hasNext(); ) { + * ValidationError error = (ValidationError)it.next(); + * int linenum = error.getLineNumber(); + * String path = error.getPath(); + * String mesg = error.getMessage(); + * String s = "- (" + linenum + ") [" + path + "] " + mesg; + * System.err.println(s); + * } + * } + * </pre> + * + * @version $Rev: 3 $ + * @release $Release: 0.5.1 $ + */ +public class Validator { + private Rule _rule; + + public Validator(Map schema) throws SchemaException { + _rule = new Rule(schema); + } + + public Validator(Object schema) throws SchemaException { + _rule = new Rule(schema); + } + + public Rule getRule() { return _rule; } + //public void setRule(Rule rule) { _rule = rule; } + + public List validate(Object value) { + ValidationContext vctx = new ValidationContext(); + _validateRule(value, _rule, vctx); + return vctx.getErrors(); + } + + protected boolean preValidationHook(Object value, Rule rule, ValidationContext context) { + // nothing + return false; + } + + protected void postValidationHook(Object value, Rule rule, ValidationContext context) { + } + + private void _validateRule(Object value, Rule rule, ValidationContext context) { + //why is done necessary? why would one end up having to validate twice the same collection?? + if (Types.isCollection(value)) { + if (context.done(value)) + return; + } + if (rule.isRequired() && value == null) { + Object[] args = new Object[] { Types.typeName(rule.getType()) }; + context.addError("required.novalue", rule, value, args); + return; + } + + if (preValidationHook(value, rule, context)) { + /* a 'higher power says is ok */ + postValidationHook(value, rule, context); + return; + } + + //Class klass = rule.getTypeClass(); + //if (klass != null && value != null && !klass.isInstance(value)) { + + int n = context.errorCount(); + validateRule(value, rule, context); + if (context.errorCount() != n) { + return; + } + // + postValidationHook(value, rule, context); + } + + /* this is the default validation process */ + protected void validateRule(Object value, Rule rule, ValidationContext context) { + + if (value != null && ! Types.isCorrectType(value, rule.getType())) { + Object[] args = new Object[] { Types.typeName(rule.getType()) }; + context.addError("type.unmatch", rule, value, args); + return; + } + // + if (rule.getSequence() != null) { + assert value == null || value instanceof List; + validateSequence((List)value, rule, context); + } else if (rule.getMapping() != null) { + assert value == null || value instanceof Map; + validateMapping((Map)value, rule, context); + } else { + validateScalar(value, rule, context); + } + } + + private void validateScalar(Object value, Rule rule, ValidationContext context) { + assert rule.getSequence() == null; + assert rule.getMapping() == null; + if (rule.getAssert() != null) { + //boolean result = evaluate(rule.getAssert()); + //if (! result) { + // errors.add("asset.failed", rule, path, value, new Object[] { rule.getAssert() }); + //} + } + if (rule.getEnum() != null) { + if (! rule.getEnum().contains(value)) { + //if (Util.matches(keyname, "\\A\\d+\\z") keyname = "enum"; + context.addError("enum.notexist", rule, value, new Object[] { context.getPathElement() }); + } + } + // + if (value == null) { + return; + } + // + if (rule.getPattern() != null) { + if (! Util.matches(value.toString(), rule.getPatternRegexp())) { + context.addError("pattern.unmatch", rule, value, new Object[] { rule.getPattern() }); + } + } + if (rule.getRange() != null) { + assert Types.isScalar(value); + Map range = rule.getRange(); + Object v; + if ((v = range.get("max")) != null && Util.compareValues(v, value) < 0) { + context.addError("range.toolarge", rule, value, new Object[] { v.toString() }); + } + if ((v = range.get("min")) != null && Util.compareValues(v, value) > 0) { + context.addError("range.toosmall", rule, value, new Object[] { v.toString() }); + } + if ((v = range.get("max-ex")) != null && Util.compareValues(v, value) <= 0) { + context.addError("range.toolargeex", rule, value, new Object[] { v.toString() }); + } + if ((v = range.get("min-ex")) != null && Util.compareValues(v, value) >= 0) { + context.addError("range.toosmallex", rule, value, new Object[] { v.toString() }); + } + } + if (rule.getLength() != null) { + assert value instanceof String; + Map length = rule.getLength(); + int len = value.toString().length(); + Integer v; + if ((v = (Integer)length.get("max")) != null && v.intValue() < len) { + context.addError("length.toolong", rule, value, new Object[] { new Integer(len), v }); + } + if ((v = (Integer)length.get("min")) != null && v.intValue() > len) { + context.addError("length.tooshort", rule, value, new Object[] { new Integer(len), v }); + } + if ((v = (Integer)length.get("max-ex")) != null && v.intValue() <= len) { + context.addError("length.toolongex", rule, value, new Object[] { new Integer(len), v }); + } + if ((v = (Integer)length.get("min-ex")) != null && v.intValue() >= len) { + context.addError("length.tooshortex", rule, value, new Object[] { new Integer(len), v }); + } + } + } + + + private void validateSequence(List sequence, Rule seq_rule, ValidationContext context) { + assert seq_rule.getSequence() instanceof List; + assert seq_rule.getSequence().size() == 1; + if (sequence == null) { + return; + } + Rule rule = (Rule)seq_rule.getSequence().get(0); + int i = 0; + for (Iterator it = sequence.iterator(); it.hasNext(); i++) { + Object val = it.next(); + context.addPathElement(String.valueOf(i)); + _validateRule(val, rule, context); // validate recursively + context.removePathElement(); + } + if (rule.getType().equals("map")) { + Map mapping = rule.getMapping(); + List unique_keys = new ArrayList(); + for (Iterator it = mapping.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Rule map_rule = (Rule)mapping.get(key); + if (map_rule.isUnique() || map_rule.isIdent()) { + unique_keys.add(key); + } + } + // + if (unique_keys.size() > 0) { + for (Iterator it = unique_keys.iterator(); it.hasNext(); ) { + Object key = it.next(); + Map table = new HashMap(); // val => index + int j = 0; + for (Iterator it2 = sequence.iterator(); it2.hasNext(); j++) { + Map map = (Map)it2.next(); + Object val = map.get(key); + if (val == null) { + continue; + } + if (table.containsKey(val)) { + String path = context.getPath(); + String prev_path = path + "/" + table.get(val) + "/" + key; + context.addPathElement(String.valueOf(j)) + .addPathElement(key.toString()); + context.addError("value.notunique", rule, val, new Object[] { prev_path }); + context.removePathElement() + .removePathElement(); + } else { + table.put(val, new Integer(j)); + } + } + } + } + } else if (rule.isUnique()) { + Map table = new HashMap(); // val => index + int j = 0; + for (Iterator it = sequence.iterator(); it.hasNext(); j++) { + Object val = it.next(); + if (val == null) { + continue; + } + if (table.containsKey(val)) { + String path = context.getPath(); + String prev_path = path + "/" + table.get(val); + context.addPathElement(String.valueOf(j)) + .addError("value.notunique", rule, val, new Object[] { prev_path }) + .removePathElement(); + } else { + table.put(val, new Integer(j)); + } + } + } + } + + + private void validateMapping(Map mapping, Rule map_rule, ValidationContext context) { + assert map_rule.getMapping() instanceof Map; + if (mapping == null) { + return; + } + Map m = map_rule.getMapping(); + for (Iterator it = m.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Rule rule = (Rule)m.get(key); + if (rule.isRequired() && !mapping.containsKey(key)) { + context.addError("required.nokey", rule, mapping, new Object[] { key }); + } + } + for (Iterator it = mapping.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Object val = mapping.get(key); + Rule rule = (Rule)m.get(key); + context.addPathElement(key.toString()); + if (rule == null) { + context.addError("key.undefined", rule, mapping, new Object[] { key.toString() + ":", map_rule.getName() + m.keySet().toString() }); + } else { + _validateRule(val, rule, context); // validate recursively + } + context.removePathElement(); + } + } + + + public class ValidationContext { + + private StringBuilder path = new StringBuilder(""); + private List errors = new LinkedList(); + private Map done = new IdentityHashMap(); //completion tracker + + private ValidationContext() { + } + + public String getPath() { + return this.path.toString(); + } + + public Validator getValidator() { + return Validator.this; + } + + public ValidationContext addPathElement(String theElement) { + this.path.append("/") + .append(theElement); + return this; + } + + public String getPathElement() { + int index = this.path.lastIndexOf("/"); + return index >= 0 ? this.path.substring(index + 1) : this.path.toString(); + } + + public ValidationContext removePathElement() { + int index = this.path.lastIndexOf("/"); + if (index >= 0) + this.path.delete(index, this.path.length()); + return this; + } + + protected ValidationContext addError(String error_symbol, Rule rule, Object value, Object[] args) { + addError( + new ValidationException( + Messages.buildMessage(error_symbol, value, args), getPath(), value, rule)); + return this; + } + + protected ValidationContext addError(String error_symbol, Rule rule, String relpath, Object value, Object[] args) { + addError( + new ValidationException( + Messages.buildMessage(error_symbol, value, args), getPath()+"/"+relpath, value, rule)); + return this; + } + + public ValidationContext addError(String message, Rule rule, Object value, Throwable cause) { + addError( + new ValidationException( + message + ((cause == null) ? "" : ", cause " + cause), getPath(), value, rule)); + return this; + } + + public ValidationContext addError(ValidationException theError) { + this.errors.add(theError); + return this; + } + + + public List getErrors() { + return Collections.unmodifiableList(this.errors); + } + + public boolean hasErrors() { + return this.errors.isEmpty(); + } + + public int errorCount() { + return this.errors.size(); + } + + private boolean done(Object theTarget) { + if (this.done.get(theTarget) != null) { + return true; + } + this.done.put(theTarget, Boolean.TRUE); + return false; + } + + private boolean isDone(Object theTarget) { + return this.done.get(theTarget) != null; + } + } + +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/YamlParser.java b/dcaedt_validator/kwalify/src/main/java/kwalify/YamlParser.java new file mode 100644 index 0000000..b5789d3 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/YamlParser.java @@ -0,0 +1,101 @@ +/* + * copyright(c) 2005 kuwata-lab all rights reserved. + */ +package kwalify; + +import java.util.Map; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Iterator; + +/** + * yaml parser which can keep line number of path. + */ +public class YamlParser extends PlainYamlParser { + private Map linenumsTable = new IdentityHashMap(); // object => sequence or mapping + private int firstLinenum = -1; + private Object document = null; + + YamlParser(String yamlStr) { + super(yamlStr); + } + + public Object parse() throws SyntaxException { + document = super.parse(); + return document; + } + + protected String getLine() { + String line = super.getLine(); + if (firstLinenum < 0) { + firstLinenum = currentLineNumber(); + } + return line; + } + + + private int getPathLineNumber(String ypath) throws InvalidPathException { + if (document == null) { + return -1; + } + if (ypath.length() == 0 || "/".equals(ypath)) { + return 1; + } + String[] elems = ypath.split("/"); + String lastElem = elems.length > 0 ? elems[elems.length - 1] : null; + int i = ypath.charAt(0) == '/' ? 1 : 0; + int len = elems.length - 1; + Object documentCollection = this.document; // collection + for ( ; i < len ; i++) { + if (documentCollection == null) { + throw new InvalidPathException(ypath); + } else if (documentCollection instanceof Map) { + documentCollection = ((Map)documentCollection).get(elems[i]); + } else if (documentCollection instanceof List) { + int index = Integer.parseInt(elems[i]); + if (index < 0 || ((List)documentCollection).size() < index) { + throw new InvalidPathException(ypath); + } + documentCollection = ((List)documentCollection).get(index); + } else { + throw new InvalidPathException(ypath); + } + } + + if (documentCollection == null) { + throw new InvalidPathException(ypath); + } + Object linenums = linenumsTable.get(documentCollection); // Map or List + int linenum; + if (documentCollection instanceof Map) { + assert linenums instanceof Map; + Object d = ((Map)linenums).get(lastElem); + linenum = (Integer) d; + } else if (documentCollection instanceof List) { + assert linenums instanceof List; + int index = Integer.parseInt(lastElem); + if (index < 0 || ((List)linenums).size() <= index) { + throw new InvalidPathException(ypath); + } + Object d = ((List)linenums).get(index); + linenum = (Integer) d; + } else { + throw new InvalidPathException(ypath); + } + return linenum; + } + + public void setErrorsLineNumber(List errors) throws InvalidPathException { + for (Iterator it = errors.iterator(); it.hasNext(); ) { + ValidationException ex = (ValidationException)it.next(); + ex.setLineNumber(getPathLineNumber(ex.getPath())); + } + } + + protected Map createMapping() { + Map map = super.createMapping(); + linenumsTable.put(map, new HashMap()); + return map; + } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/YamlSyntaxException.java b/dcaedt_validator/kwalify/src/main/java/kwalify/YamlSyntaxException.java new file mode 100644 index 0000000..a8b1011 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/YamlSyntaxException.java @@ -0,0 +1,23 @@ +/* + * @(#)YamlSyntaxException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown by YamlParser when syntax of YAML document is wrong + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + * @see SyntaxException + */ +public class YamlSyntaxException extends SyntaxException { + private static final long serialVersionUID = 2951669148531823857L; + + public YamlSyntaxException(String message, int linenum) { + super(message, linenum); + } + +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/YamlUtil.java b/dcaedt_validator/kwalify/src/main/java/kwalify/YamlUtil.java new file mode 100644 index 0000000..4b15213 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/YamlUtil.java @@ -0,0 +1,20 @@ +/* + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * utilify class for yaml. + */ +public class YamlUtil { + + private YamlUtil() { + //hides implicit public + } + + public static Object load(String yamlStr) throws SyntaxException { + PlainYamlParser parser = new PlainYamlParser(yamlStr); + return parser.parse(); + } +} diff --git a/dcaedt_validator/kwalify/src/main/java/kwalify/messages.properties b/dcaedt_validator/kwalify/src/main/java/kwalify/messages.properties new file mode 100644 index 0000000..5734461 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/java/kwalify/messages.properties @@ -0,0 +1,108 @@ +command.help = \ + Usage1: %s [-hvstlE] -f schema.yaml doc.yaml [doc2.yaml ...]\n\ + Usage2: %s [-hvstlE] -m schema.yaml [schema2.yaml ...]\n\ + \ \ -h, --help : help\n\ + \ \ -v : version\n\ + \ \ -s : silent\n\ + \ \ -f schema.yaml : schema definition file\n\ + \ \ -m : meta-validation mode\n\ + \ \ -t : expand tab character automatically\n\ + \ \ -l : show linenumber when errored (experimental)\n\ + \ \ -E : show errors in emacs-style (implies '-l')\n +command.option.noaction = command-line option '-f' or '-m' required. +meta.empty = %s: empty. +meta.valid = %s: ok. +meta.invalid = %s: NG! +schema.empty = %s#%d: empty. +validation.empty = %s#%d: empty. +validation.valid = %s#%d: valid. +validation.invalid = %s#%d: INVALID +command.property.invalid = %s: invalid property. +command.option.noarg = -%s: argument required. +command.option.noschema = -%s: schema filename required. +command.option.invalid = -%s: invalid command option. +schema.notmap = schema definition is not a mapping. +key.unknown = unknown key. +type.notstr = not a string. +type.unknown = unknown type. +required.notbool = not a boolean. +pattern.notstr = not a string (or regexp) +pattern.notmatch = should be '/..../'. +pattern.syntaxerr = has regexp error. +enum.notseq = not a sequence. +enum.notscalar = not available with seq or map. +enum.type.unmatch = %s type expected. +enum.duplicate = duplicated enum value. +assert.notstr = not a string. +assert.noval = 'val' is not used. +assert.syntaxerr = expression syntax error. +range.notmap = not a mapping. +range.notscalar = is available only with scalar type. +range.notcollection = not a collection type. +range.type.unmatch = not a %s. +range.undefined = undefined key. +range.twomax = both 'max' and 'max-ex' are not available at once. +range.twomin = both 'min' and 'min-ex' are not available at once. +range.maxltmin = max '%s' is less than min '%s'. +range.maxleminex = max '%s' is less than or equal to min-ex '%s'. +range.maxexlemin = max-ex '%s' is less than or equal to min '%s'. +range.maxexleminex = max-ex '%s' is less than or equal to min-ex '%s'. +length.notmap = not a mapping. +length.nottext = is available only with string or text. +length.notint = not an integer. +length.undefined = undefined key. +length.twomax = both 'max' and 'max-ex' are not available at once. +length.twomin = both 'min' and 'min-ex' are not available at once. +length.maxltmin = max '%s' is less than min '%s'. +length.maxleminex = max '%s' is less than or equal to min-ex '%s'. +length.maxexlemin = max-ex '%s' is less than or equal to min '%s'. +length.maxexleminex = max-ex '%s' is less than or equal to min-ex '%s'. +ident.notbool = not a boolean. +ident.notscalar = is available only with a scalar type. +ident.onroot = is not available on root element. +ident.notmap = is available only with an element of mapping. +unique.notbool = not a boolean. +unique.notscalar = is available only with a scalar type. +unique.onroot = is not available on root element. +sequence.notseq = not a sequence. +sequence.noelem = required one element. +sequence.toomany = required just one element. +mapping.notmap = not a mapping. +mapping.noelem = required at least one element. +seq.nosequence = type 'seq' requires 'sequence:'. +seq.conflict = not available with sequence. +map.nomapping = type 'map' requires 'mapping:'. +map.conflict = not available with mapping. +scalar.conflict = not available with scalar type. +enum.conflict = not available with 'enum:'. +required.novalue = value required but none. +type.unmatch = not a %s. +assert.failed = assertion expression failed (%s). +enum.notexist = invalid %s value. +pattern.unmatch = not matched to pattern %s. +range.toolarge = too large (> max %s). +range.toosmall = too small (< min %s). +range.toolargeex = too large (>= max %s). +range.toosmallex = too small (<= min %s). +length.toolong = too long (length %d > max %d). +length.tooshort = too short (length %d < min %d). +length.toolongex = too long (length %d >= max %d). +length.tooshortex = too short (length %d <= min %d). +value.notunique = is already used at '%s'. +required.nokey = key '%s:' is required. +key.undefined = key '%s' is undefined. Expecting one of %s. +flow.hastail = flow style sequence is closed but got '%s'. +flow.eof = found EOF when parsing flow style. +flow.noseqitem = sequence item required (or last comma is extra). +flow.seqnotclosed = flow style sequence requires ']'. +flow.mapnoitem = mapping item required (or last comma is extra). +flow.mapnotclosed = flow style mapping requires '}'. +flow.nocolon = ':' expected but got '%s'. +anchor.duplicated = anchor '%s' is already used. +alias.extradata = alias cannot take any data. +anchor.notfound = anchor '%s' not found +sequence.noitem = sequence item is expected. +sequence.badindent = illegal indent of sequence. +mapping.noitem = mapping item is expected. +mapping.badindent = illegal indent of mapping. +collection.notcollection = not a collection
\ No newline at end of file diff --git a/dcaedt_validator/kwalify/src/main/resources/kwalify/messages.properties b/dcaedt_validator/kwalify/src/main/resources/kwalify/messages.properties new file mode 100644 index 0000000..edb3fd7 --- /dev/null +++ b/dcaedt_validator/kwalify/src/main/resources/kwalify/messages.properties @@ -0,0 +1,107 @@ +command.help = \ + Usage1: %s [-hvstlE] -f schema.yaml doc.yaml [doc2.yaml ...]\n\ + Usage2: %s [-hvstlE] -m schema.yaml [schema2.yaml ...]\n\ + \ \ -h, --help : help\n\ + \ \ -v : version\n\ + \ \ -s : silent\n\ + \ \ -f schema.yaml : schema definition file\n\ + \ \ -m : meta-validation mode\n\ + \ \ -t : expand tab character automatically\n\ + \ \ -l : show linenumber when errored (experimental)\n\ + \ \ -E : show errors in emacs-style (implies '-l')\n +command.option.noaction = command-line option '-f' or '-m' required. +meta.empty = %s: empty. +meta.valid = %s: ok. +meta.invalid = %s: NG! +schema.empty = %s#%d: empty. +validation.empty = %s#%d: empty. +validation.valid = %s#%d: valid. +validation.invalid = %s#%d: INVALID +command.property.invalid = %s: invalid property. +command.option.noarg = -%s: argument required. +command.option.noschema = -%s: schema filename required. +command.option.invalid = -%s: invalid command option. +schema.notmap = schema definition is not a mapping. +key.unknown = unknown key. +type.notstr = not a string. +type.unknown = unknown type. +required.notbool = not a boolean. +pattern.notstr = not a string (or regexp) +pattern.notmatch = should be '/..../'. +pattern.syntaxerr = has regexp error. +enum.notseq = not a sequence. +enum.notscalar = not available with seq or map. +enum.type.unmatch = %s type expected. +enum.duplicate = duplicated enum value. +assert.notstr = not a string. +assert.noval = 'val' is not used. +assert.syntaxerr = expression syntax error. +range.notmap = not a mapping. +range.notscalar = is available only with scalar type. +range.type.unmatch = not a %s. +range.undefined = undefined key. +range.twomax = both 'max' and 'max-ex' are not available at once. +range.twomin = both 'min' and 'min-ex' are not available at once. +range.maxltmin = max '%s' is less than min '%s'. +range.maxleminex = max '%s' is less than or equal to min-ex '%s'. +range.maxexlemin = max-ex '%s' is less than or equal to min '%s'. +range.maxexleminex = max-ex '%s' is less than or equal to min-ex '%s'. +length.notmap = not a mapping. +length.nottext = is available only with string or text. +length.notint = not an integer. +length.undefined = undefined key. +length.twomax = both 'max' and 'max-ex' are not available at once. +length.twomin = both 'min' and 'min-ex' are not available at once. +length.maxltmin = max '%s' is less than min '%s'. +length.maxleminex = max '%s' is less than or equal to min-ex '%s'. +length.maxexlemin = max-ex '%s' is less than or equal to min '%s'. +length.maxexleminex = max-ex '%s' is less than or equal to min-ex '%s'. +ident.notbool = not a boolean. +ident.notscalar = is available only with a scalar type. +ident.onroot = is not available on root element. +ident.notmap = is available only with an element of mapping. +unique.notbool = not a boolean. +unique.notscalar = is available only with a scalar type. +unique.onroot = is not available on root element. +sequence.notseq = not a sequence. +sequence.noelem = required one element. +sequence.toomany = required just one element. +mapping.notmap = not a mapping. +mapping.noelem = required at least one element. +seq.nosequence = type 'seq' requires 'sequence:'. +seq.conflict = not available with sequence. +map.nomapping = type 'map' requires 'mapping:'. +map.conflict = not available with mapping. +scalar.conflict = not available with scalar type. +enum.conflict = not available with 'enum:'. +required.novalue = value required but none. +type.unmatch = not a %s. +assert.failed = assertion expression failed (%s). +enum.notexist = invalid %s value. +pattern.unmatch = not matched to pattern %s. +range.toolarge = too large (> max %s). +range.toosmall = too small (< min %s). +range.toolargeex = too large (>= max %s). +range.toosmallex = too small (<= min %s). +length.toolong = too long (length %d > max %d). +length.tooshort = too short (length %d < min %d). +length.toolongex = too long (length %d >= max %d). +length.tooshortex = too short (length %d <= min %d). +value.notunique = is already used at '%s'. +required.nokey = key '%s:' is required. +key.undefined = key '%s' is undefined. Expecting one of %s. +flow.hastail = flow style sequence is closed but got '%s'. +flow.eof = found EOF when parsing flow style. +flow.noseqitem = sequence item required (or last comma is extra). +flow.seqnotclosed = flow style sequence requires ']'. +flow.mapnoitem = mapping item required (or last comma is extra). +flow.mapnotclosed = flow style mapping requires '}'. +flow.nocolon = ':' expected but got '%s'. +anchor.duplicated = anchor '%s' is already used. +alias.extradata = alias cannot take any data. +anchor.notfound = anchor '%s' not found +sequence.noitem = sequence item is expected. +sequence.badindent = illegal indent of sequence. +mapping.noitem = mapping item is expected. +mapping.badindent = illegal indent of mapping. +collection.notcollection = not a collection
\ No newline at end of file diff --git a/dcaedt_validator/pom.xml b/dcaedt_validator/pom.xml new file mode 100644 index 0000000..cc6df87 --- /dev/null +++ b/dcaedt_validator/pom.xml @@ -0,0 +1,36 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project + xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> + <modelVersion>4.0.0</modelVersion> + <artifactId>DCAE-DT-Validator</artifactId> + <packaging>pom</packaging> + <name>ASC Validator</name> + <parent> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>dcae_dt_be_main</artifactId> + <version>1806.0.1-SNAPSHOT</version> + </parent> + <modules> + <module>kwalify</module> + <module>checker</module> + </modules> + <scm> + <connection>scm:git:https://github.research.att.com/ASC/ASC-Validator/</connection> + <developerConnection>scm:git:https://github.research.att.com/ASC/ASC-Validator/</developerConnection> + <tag>HEAD</tag> + <url>https://github.research.att.com/ASC/ASC-Validator/</url> + </scm> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <configuration> + <source>1.8</source> + <target>1.8</target> + </configuration> + </plugin> + </plugins> + </build> +</project> @@ -0,0 +1,247 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project + xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <groupId>org.onap.sdc.dcae</groupId> + <artifactId>dcae_dt_be_main</artifactId> + <version>1806.0.1-SNAPSHOT</version> + <packaging>pom</packaging> + + <parent> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-parent</artifactId> + <version>1.5.9.RELEASE</version> + <relativePath/> + <!-- lookup parent from repository --> + </parent> + + <dependencies> + <dependency> + <groupId>org.onap.sdc.dcae.property</groupId> + <artifactId>DCAE-DT-PROPERTY</artifactId> + <version>${project.version}</version> + <exclusions> + <exclusion> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + </exclusion> + <exclusion> + <groupId>log4j</groupId> + <artifactId>log4j</artifactId> + </exclusion> + </exclusions> + </dependency> + </dependencies> + + <properties> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> + <java.version>1.8</java.version> + </properties> + <build> + <plugins> + <plugin> + <groupId>org.jacoco</groupId> + <artifactId>jacoco-maven-plugin</artifactId> + <version>0.7.9</version> + <executions> + <execution> + <id>default-prepare-agent</id> + <goals> + <goal>prepare-agent</goal> + </goals> + </execution> + <execution> + <id>default-report</id> + <goals> + <goal>report</goal> + </goals> + </execution> + </executions> + </plugin> + </plugins> + </build> + <profiles> + <profile> + <id>all</id> + <activation> + <activeByDefault>true</activeByDefault> + </activation> + <modules> + <module>dcaedt_validator</module> + <module>dcaedt_catalog</module> + <module>dcaedt_be</module> + <module>dcaedt_tools</module> + </modules> + </profile> + <profile> + <id>docker</id> + <properties> + <!-- Docker tags --> + <maven.build.timestamp.format>yyyyMMdd'T'HHmm</maven.build.timestamp.format> + <docker.tag>${project.version}-${maven.build.timestamp}</docker.tag> + <docker.latest.tag>${project.version}-latest</docker.latest.tag> + <docker.staging.tag>${parsedVersion.majorVersion}.${parsedVersion.minorVersion}-STAGING-latest</docker.staging.tag> + </properties> + <activation> + <activeByDefault>false</activeByDefault> + </activation> + <build> + <plugins> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>build-helper-maven-plugin</artifactId> + <version>1.8</version> + <executions> + <execution> + <id>parse-version</id> + <goals> + <goal>parse-version</goal> + </goals> + </execution> + </executions> + </plugin> + <plugin> + <artifactId>maven-resources-plugin</artifactId> + <version>3.0.2</version> + <executions> + <execution> + <id>copy-dcae-be-war</id> + <phase>validate</phase> + <goals> + <goal>copy-resources</goal> + </goals> + <configuration> + <outputDirectory>${project.basedir}/docker/docker_be/target</outputDirectory> + <resources> + <resource> + <directory>${project.basedir}/dcaedt_be/target</directory> + <includes> + <include>dcae.war</include> + </includes> + </resource> + </resources> + </configuration> + </execution> + <execution> + <id>copy-dcae-tools-jar</id> + <phase>validate</phase> + <goals> + <goal>copy-resources</goal> + </goals> + <configuration> + <outputDirectory>${project.basedir}/docker/docker_tools/target</outputDirectory> + <resources> + <resource> + <directory>${project.basedir}/dcaedt_tools/target</directory> + <includes> + <include>dcaedt_tools-${project.version}-jar-with-dependencies.jar</include> + </includes> + </resource> + </resources> + </configuration> + </execution> + <execution> + <id>copy-dcae-config-json</id> + <phase>validate</phase> + <goals> + <goal>copy-resources</goal> + </goals> + <configuration> + <outputDirectory>${project.basedir}/docker/docker_tools/chef-solo/cookbooks/Deploy-DCAE/files/default</outputDirectory> + <resources> + <resource> + <directory>${project.basedir}/dcaedt_tools/src/main/resources/conf</directory> + <includes> + <include>config.json</include> + </includes> + </resource> + </resources> + </configuration> + </execution> + + </executions> + </plugin> + <plugin> + <groupId>io.fabric8</groupId> + <artifactId>docker-maven-plugin</artifactId> + <version>0.23.0</version> + <configuration> + <verbose>true</verbose> + <apiVersion>1.23</apiVersion> + <images> + <!-- Build backend image --> +  + + <!-- Build tools image --> +  + </images> + </configuration> + <executions> + <execution> + <id>clean-images</id> + <phase>pre-clean</phase> + <goals> + <goal>remove</goal> + </goals> + <configuration> + <removeAll>true</removeAll> +  + </configuration> + </execution> + <execution> + <id>generate-images</id> + <phase>package</phase> + <goals> + <goal>build</goal> + </goals> + </execution> + <execution> + <id>push-images</id> + <phase>deploy</phase> + <goals> + <goal>push</goal> + </goals> + <configuration> +  + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> + <distributionManagement> + <site> + <id>nexus</id> + <url>dav:${nexus.url}${sitePath}</url> + </site> + </distributionManagement> + </profile> + </profiles> +</project> + |