aboutsummaryrefslogtreecommitdiffstats
path: root/src/main/java/org/openecomp
diff options
context:
space:
mode:
Diffstat (limited to 'src/main/java/org/openecomp')
-rw-r--r--src/main/java/org/openecomp/sa/auth/SearchDbServiceAuth.java61
-rw-r--r--src/main/java/org/openecomp/sa/auth/SearchDbServiceAuthCore.java254
-rw-r--r--src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java241
-rw-r--r--src/main/java/org/openecomp/sa/rest/AnalyzerApi.java158
-rw-r--r--src/main/java/org/openecomp/sa/rest/ApiUtils.java178
-rw-r--r--src/main/java/org/openecomp/sa/rest/BulkApi.java255
-rw-r--r--src/main/java/org/openecomp/sa/rest/BulkMetaData.java51
-rw-r--r--src/main/java/org/openecomp/sa/rest/BulkOperation.java63
-rw-r--r--src/main/java/org/openecomp/sa/rest/BulkRequest.java113
-rw-r--r--src/main/java/org/openecomp/sa/rest/Document.java63
-rw-r--r--src/main/java/org/openecomp/sa/rest/DocumentApi.java529
-rw-r--r--src/main/java/org/openecomp/sa/rest/IndexApi.java376
-rw-r--r--src/main/java/org/openecomp/sa/rest/SearchServiceApi.java257
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsEchoService.java50
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsUserService.java51
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/config/ElasticSearchConfig.java85
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntity.java33
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntityImpl.java62
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreInterface.java73
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchBulkOperationResult.java68
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchCause.java45
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchError.java73
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpController.java1634
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchOperationStatus.java117
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchResultItem.java150
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchShardStatus.java62
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/exception/DocumentStoreOperationException.java37
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationBucket.java77
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResult.java77
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResults.java43
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/entity/Document.java63
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/entity/DocumentOperationResult.java42
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/entity/ErrorResult.java58
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/entity/OperationResult.java80
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHit.java50
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHits.java52
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchOperationResult.java52
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/logging/SearchDbMsgs.java224
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AbstractAggregation.java80
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Aggregation.java65
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatement.java176
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregation.java117
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRange.java113
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregation.java131
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Filter.java188
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregation.java71
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/ParsedQuery.java118
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Query.java92
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryStatement.java140
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/RangeQuery.java346
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatement.java323
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Sort.java74
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/TermQuery.java347
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/service/SearchService.java57
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/util/AggregationParsingUtil.java104
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/util/DocumentSchemaUtil.java121
-rw-r--r--src/main/java/org/openecomp/sa/searchdbabstraction/util/SearchDbConstants.java54
57 files changed, 0 insertions, 8674 deletions
diff --git a/src/main/java/org/openecomp/sa/auth/SearchDbServiceAuth.java b/src/main/java/org/openecomp/sa/auth/SearchDbServiceAuth.java
deleted file mode 100644
index e0b4b1e..0000000
--- a/src/main/java/org/openecomp/sa/auth/SearchDbServiceAuth.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.auth;
-
-import javax.ws.rs.core.Cookie;
-import javax.ws.rs.core.HttpHeaders;
-
-public class SearchDbServiceAuth {
-
- public SearchDbServiceAuth() {
- }
-
- public boolean authBasic(String username, String authFunction) {
- return SearchDbServiceAuthCore.authorize(username, authFunction);
- }
-
- public String authUser(HttpHeaders headers, String authUser, String authFunction) {
-
-
- SearchDbServiceAuth aaiAuth = new SearchDbServiceAuth();
-
- StringBuilder username = new StringBuilder();
-
- username.append(authUser);
- if (aaiAuth.authBasic(username.toString(), authFunction) == false) {
- return "AAI_9101";
-
- }
- return "OK";
- }
-
- public boolean authCookie(Cookie cookie, String authFunction, StringBuilder username) {
-
- // String result = "no value";
- if (cookie == null) {
- return false;
- }
-
- return SearchDbServiceAuthCore.authorize(username.toString(), authFunction);
- }
-}
diff --git a/src/main/java/org/openecomp/sa/auth/SearchDbServiceAuthCore.java b/src/main/java/org/openecomp/sa/auth/SearchDbServiceAuthCore.java
deleted file mode 100644
index e5c77e0..0000000
--- a/src/main/java/org/openecomp/sa/auth/SearchDbServiceAuthCore.java
+++ /dev/null
@@ -1,254 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.auth;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.json.simple.parser.JSONParser;
-import org.json.simple.parser.ParseException;
-import org.openecomp.cl.api.Logger;
-import org.openecomp.cl.eelf.LoggerFactory;
-import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Timer;
-
-public class SearchDbServiceAuthCore {
-
- private static Logger logger = LoggerFactory.getInstance()
- .getLogger(SearchDbServiceAuthCore.class.getName());
-
- private static String GlobalAuthFileName = SearchDbConstants.SDB_AUTH_CONFIG_FILENAME;
-
- private static enum HTTP_METHODS {
- POST, GET, PUT, DELETE
- }
-
- ;
-
- // Don't instantiate
- private SearchDbServiceAuthCore() {
- }
-
- private static boolean usersInitialized = false;
- private static HashMap<String, SearchDbAuthUser> users;
- private static boolean timerSet = false;
- private static Timer timer = null;
-
- public synchronized static void init() {
-
-
- SearchDbServiceAuthCore.getConfigFile();
- SearchDbServiceAuthCore.reloadUsers();
-
- }
-
- public static void cleanup() {
- timer.cancel();
- }
-
- public static String getConfigFile() {
- if (GlobalAuthFileName == null) {
- String nc = SearchDbConstants.SDB_AUTH_CONFIG_FILENAME;
- if (nc == null) {
- nc = "/home/aaiadmin/etc/aaipolicy.json";
- }
-
- GlobalAuthFileName = nc;
- }
- return GlobalAuthFileName;
- }
-
- public synchronized static void reloadUsers() {
- users = new HashMap<String, SearchDbAuthUser>();
-
-
- ObjectMapper mapper = new ObjectMapper(); // can reuse, share globally
- JSONParser parser = new JSONParser();
- try {
- Object obj = parser.parse(new FileReader(GlobalAuthFileName));
- // aailogger.debug(logline, "Reading from " + GlobalAuthFileName);
- JsonNode rootNode = mapper.readTree(new File(GlobalAuthFileName));
- JsonNode rolesNode = rootNode.path("roles");
-
- for (JsonNode roleNode : rolesNode) {
- String roleName = roleNode.path("name").asText();
-
- TabularAuthRole authRole = new TabularAuthRole();
- JsonNode usersNode = roleNode.path("users");
- JsonNode functionsNode = roleNode.path("functions");
- for (JsonNode functionNode : functionsNode) {
- String function = functionNode.path("name").asText();
- JsonNode methodsNode = functionNode.path("methods");
- boolean hasMethods = false;
- for (JsonNode methodNode : methodsNode) {
- String methodName = methodNode.path("name").asText();
- hasMethods = true;
- String thisFunction = methodName + ":" + function;
-
- authRole.addAllowedFunction(thisFunction);
- }
-
- if (hasMethods == false) {
- // iterate the list from HTTP_METHODS
- for (HTTP_METHODS meth : HTTP_METHODS.values()) {
- String thisFunction = meth.toString() + ":" + function;
-
- authRole.addAllowedFunction(thisFunction);
- }
- }
-
- }
- for (JsonNode userNode : usersNode) {
- // make the user lower case
- String username = userNode.path("username").asText().toLowerCase();
- SearchDbAuthUser authUser = null;
- if (users.containsKey(username)) {
- authUser = users.get(username);
- } else {
- authUser = new SearchDbAuthUser();
- }
-
-
- authUser.setUser(username);
- authUser.addRole(roleName, authRole);
- users.put(username, authUser);
- }
- }
- } catch (FileNotFoundException fnfe) {
- logger.debug("Failed to load the policy file ");
-
- } catch (ParseException e) {
- logger.debug("Failed to Parse the policy file ");
-
- } catch (JsonProcessingException e) {
- logger.debug("JSON processing error while parsing policy file: " + e.getMessage());
-
- } catch (IOException e) {
- logger.debug("IO Exception while parsing policy file: " + e.getMessage());
- }
-
- usersInitialized = true;
-
- }
-
- public static class SearchDbAuthUser {
- public SearchDbAuthUser() {
- this.roles = new HashMap<String, TabularAuthRole>();
- }
-
- private String username;
- private HashMap<String, TabularAuthRole> roles;
-
- public String getUser() {
- return this.username;
- }
-
- public HashMap<String, TabularAuthRole> getRoles() {
- return this.roles;
- }
-
- public void addRole(String roleName, TabularAuthRole authRole) {
- this.roles.put(roleName, authRole);
- }
-
- public boolean checkAllowed(String checkFunc) {
- for (Map.Entry<String, TabularAuthRole> roleEntry : this.roles.entrySet()) {
- TabularAuthRole role = roleEntry.getValue();
- if (role.hasAllowedFunction(checkFunc)) {
- // break out as soon as we find it
- return true;
- }
- }
- // we would have got positive confirmation had it been there
- return false;
- }
-
- public void setUser(String myuser) {
- this.username = myuser;
- }
-
- }
-
- public static class TabularAuthRole {
- public TabularAuthRole() {
- this.allowedFunctions = new ArrayList<String>();
- }
-
- private List<String> allowedFunctions;
-
- public void addAllowedFunction(String func) {
- this.allowedFunctions.add(func);
- }
-
- public void delAllowedFunction(String delFunc) {
- if (this.allowedFunctions.contains(delFunc)) {
- this.allowedFunctions.remove(delFunc);
- }
- }
-
- public boolean hasAllowedFunction(String afunc) {
- if (this.allowedFunctions.contains(afunc)) {
- return true;
- } else {
- return false;
- }
- }
- }
-
- public static HashMap<String, SearchDbAuthUser> getUsers(String key) {
- if (!usersInitialized || (users == null)) {
- reloadUsers();
- }
- return users;
- }
-
- public static boolean authorize(String username, String authFunction) {
- // logline.init(component, transId, fromAppId, "authorize()");
-
- if (!usersInitialized || (users == null)) {
- init();
- }
- if (users.containsKey(username)) {
- if (users.get(username).checkAllowed(authFunction) == true) {
-
- return true;
- } else {
-
-
- return false;
- }
- } else {
-
- return false;
- }
- }
-}
diff --git a/src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java b/src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java
deleted file mode 100644
index d05d50d..0000000
--- a/src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java
+++ /dev/null
@@ -1,241 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.openecomp.cl.api.Logger;
-import org.openecomp.cl.eelf.LoggerFactory;
-import org.openecomp.sa.rest.AnalyzerSchema;
-import org.openecomp.sa.rest.FilterSchema;
-import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
-import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-/**
- * This class encapsulates the configuration of the predefined
- * Analyzer and Filter behaviours that help to tell the document
- * store how to index the documents that are provided to it.
- */
-public class AnalysisConfiguration {
-
- /**
- * Contains all of the predefined indexing filters.
- */
- private FilterSchema[] customFilters;
-
- /**
- * Contains all of the predefined indexing analyzers.
- */
- private AnalyzerSchema[] customAnalysers;
-
- /**
- * Indicates whether or not we have imported the filter and
- * analyzer configurations.
- */
- private AtomicBoolean configured = new AtomicBoolean(false);
-
- /**
- * A json format string which is readable by Elastic Search and defines
- * all of the custom filters and analyzers that we need Elastic Search
- * to know about.
- */
- private static String esSettings = null;
-
- private static Logger logger = LoggerFactory.getInstance()
- .getLogger(AnalysisConfiguration.class.getName());
-
-
- /**
- * Imports the filter and analyzer configuration files and
- * builds an Elastic Search readable settings file from the
- * contents.
- *
- * @param filterConfigFile - Location of filter configuration json file
- * @param analyzerConfigFile - Location of analyzer configuration json file
- */
- public void init(String filterConfigFile, String analyzerConfigFile) {
-
- if (configured.compareAndSet(false, true)) {
- ObjectMapper mapper = new ObjectMapper();
-
- File filtersConfig = new File(filterConfigFile);
- try {
- customFilters = mapper.readValue(filtersConfig, FilterSchema[].class);
- } catch (IOException e) {
-
- // generate log
- logger.warn(SearchDbMsgs.FILTERS_CONFIG_FAILURE, filterConfigFile, e.getMessage());
- }
-
- File analysersConfig = new File(analyzerConfigFile);
- try {
- customAnalysers = mapper.readValue(analysersConfig, AnalyzerSchema[].class);
- } catch (IOException e) {
-
- // generate log
- logger.warn(SearchDbMsgs.ANALYSYS_CONFIG_FAILURE, analyzerConfigFile, e.getMessage());
- }
-
- esSettings = buildEsIndexSettings();
- }
- }
-
-
- /**
- * Returns the set of pre-configured filters.
- *
- * @return - An array of filters.
- */
- public FilterSchema[] getFilters() {
- return customFilters;
- }
-
-
- /**
- * Returns the set of pre-configured analyzers.
- *
- * @return - An array of analyzers.
- */
- public AnalyzerSchema[] getAnalyzers() {
- init(SearchDbConstants.SDB_FILTER_CONFIG_FILE, SearchDbConstants.SDB_ANALYSIS_CONFIG_FILE);
- return customAnalysers;
- }
-
-
- /**
- * Imports the filter and analyzer configurations and translates those
- * into a settings string that will be parseable by Elastic Search.
- *
- * @return - Elastic Search formatted settings string.
- */
- public String getEsIndexSettings() {
-
- // Generate the es-settings string from our filter and analyzer
- // configurations if we have not already done so.
- init(SearchDbConstants.SDB_FILTER_CONFIG_FILE, SearchDbConstants.SDB_ANALYSIS_CONFIG_FILE);
-
- // Now, return the es-settings string.
- return esSettings;
- }
-
-
- /**
- * Constructs a settings string that is readable by Elastic Search based
- * on the contents of the filter and analyzer configuration files.
- *
- * @return Elastic Search formatted settings string.
- */
- public String buildEsIndexSettings() {
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("{");
- sb.append("\"analysis\": {");
-
- // Define the custom filters.
- boolean atLeastOneFilter = false;
- sb.append("\"filter\": {");
- AtomicBoolean firstFilter = new AtomicBoolean(true);
- for (FilterSchema filter : customFilters) {
-
- // Append a comma before the next entry, unless it is the
- // first one.
- if (!firstFilter.compareAndSet(true, false)) {
- sb.append(", ");
- }
-
- // Now, build the filter entry.
- buildFilterEntry(filter, sb);
- atLeastOneFilter = true;
- }
- sb.append((atLeastOneFilter) ? "}," : "}");
-
- // Define the custom analyzers.
- sb.append("\"analyzer\": {");
- AtomicBoolean firstAnalyzer = new AtomicBoolean(true);
- for (AnalyzerSchema analyzer : customAnalysers) {
-
- // Prepend a comma before the entry, unless it is the
- // first one.
- if (!firstAnalyzer.compareAndSet(true, false)) {
- sb.append(",");
- }
-
- // Now, construct the entry for this analyzer.
- buildAnalyzerEntry(analyzer, sb);
- }
- sb.append("}");
-
- sb.append("}");
- sb.append("}");
-
- return sb.toString();
- }
-
-
- /**
- * Constructs an ElasticSearch friendly custom filter definition.
- *
- * @param filter - The filter to generate ElasticSearch json for.
- * @param sb - The string builder to append the filter definition
- * to.
- */
- private void buildFilterEntry(FilterSchema filter, StringBuilder sb) {
-
- sb.append("\"" + filter.getName()).append("\": {");
-
- sb.append(filter.getConfiguration());
-
- sb.append("}");
- }
-
-
- /**
- * Constructs an ElasticSearch friendly custom analyzer definition.
- *
- * @param analyzer - The analyzer to generate ElasticSearch json for.
- * @param sb - The string builder to append the analyzer definition
- * to.
- */
- private void buildAnalyzerEntry(AnalyzerSchema analyzer, StringBuilder sb) {
-
- sb.append("\"").append(analyzer.getName()).append("\": {");
- sb.append("\"type\": \"custom\",");
- sb.append("\"tokenizer\": ").append("\"").append(analyzer.getTokenizer()).append("\",");
- sb.append("\"filter\": [");
- boolean firstFilter = true;
- for (String filter : analyzer.getFilters()) {
- if (!firstFilter) {
- sb.append(",");
- } else {
- firstFilter = false;
- }
- sb.append("\"").append(filter).append("\"");
- }
- sb.append("]");
- sb.append("}");
- }
-}
diff --git a/src/main/java/org/openecomp/sa/rest/AnalyzerApi.java b/src/main/java/org/openecomp/sa/rest/AnalyzerApi.java
deleted file mode 100644
index a02f55c..0000000
--- a/src/main/java/org/openecomp/sa/rest/AnalyzerApi.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-import org.openecomp.cl.api.LogFields;
-import org.openecomp.cl.api.LogLine;
-import org.openecomp.cl.api.Logger;
-import org.openecomp.cl.eelf.LoggerFactory;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.ElasticSearchHttpController;
-import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
-
-import java.util.concurrent.atomic.AtomicBoolean;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.Response;
-
-@Path("/analyzers")
-public class AnalyzerApi {
-
- private SearchServiceApi searchService = null;
-
- // Set up the loggers.
- private static Logger logger = LoggerFactory.getInstance().getLogger(IndexApi.class.getName());
- private static Logger auditLogger = LoggerFactory.getInstance()
- .getAuditLogger(IndexApi.class.getName());
-
- public AnalyzerApi(SearchServiceApi searchService) {
- this.searchService = searchService;
- }
-
- @GET
- public Response processGet(@Context HttpServletRequest request,
- @Context HttpHeaders headers,
- ApiUtils apiUtils) {
-
- Response.Status responseCode = Response.Status.INTERNAL_SERVER_ERROR;
- String responseString = "Undefined error";
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- // Validate that the request is correctly authenticated before going
- // any further.
- try {
-
- if (!searchService.validateRequest(headers, request,
- ApiUtils.Action.GET, ApiUtils.SEARCH_AUTH_POLICY_NAME)) {
- logger.warn(SearchDbMsgs.GET_ANALYZERS_FAILURE, "Authentication failure.");
- return Response.status(Response.Status.FORBIDDEN).entity("Authentication failure.").build();
- }
-
- } catch (Exception e) {
-
- logger.warn(SearchDbMsgs.GET_ANALYZERS_FAILURE,
- "Unexpected authentication failure - cause: " + e.getMessage());
- return Response.status(Response.Status.FORBIDDEN).entity("Authentication failure.").build();
- }
-
-
- // Now, build the list of analyzers.
- try {
- responseString = buildAnalyzerList(ElasticSearchHttpController.getInstance()
- .getAnalysisConfig());
- responseCode = Response.Status.OK;
-
- } catch (Exception e) {
-
- logger.warn(SearchDbMsgs.GET_ANALYZERS_FAILURE,
- "Unexpected failure retrieving analysis configuration - cause: " + e.getMessage());
- responseString = "Failed to retrieve analysis configuration. Cause: " + e.getMessage();
- }
-
- // Build the HTTP response.
- Response response = Response.status(responseCode).entity(responseString).build();
-
- // Generate our audit log.
- auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, responseCode.getStatusCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, responseCode.getStatusCode()),
- (request != null) ? request.getMethod() : "Unknown",
- (request != null) ? request.getRequestURL().toString() : "Unknown",
- (request != null) ? request.getRemoteHost() : "Unknown",
- Integer.toString(response.getStatus()));
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- return response;
- }
-
-
- /**
- * This method takes a list of analyzer objects and generates a simple json
- * structure to enumerate them.
- *
- * <p>Note, this includes only the aspects of the analyzer object that we want
- * to make public to an external client.
- *
- * @param analysisConfig - The analysis configuration object to extract the
- * analyzers from.
- * @return - A json string enumerating the defined analyzers.
- */
- private String buildAnalyzerList(AnalysisConfiguration analysisConfig) {
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("{");
- AtomicBoolean firstAnalyzer = new AtomicBoolean(true);
- for (AnalyzerSchema analyzer : analysisConfig.getAnalyzers()) {
-
- if (!firstAnalyzer.compareAndSet(true, false)) {
- sb.append(", ");
- }
-
- sb.append("{");
- sb.append("\"name\": \"").append(analyzer.getName()).append("\", ");
- sb.append("\"description\": \"").append(analyzer.getDescription()).append("\", ");
- sb.append("\"behaviours\": [");
- AtomicBoolean firstBehaviour = new AtomicBoolean(true);
- for (String behaviour : analyzer.getBehaviours()) {
- if (!firstBehaviour.compareAndSet(true, false)) {
- sb.append(", ");
- }
- sb.append("\"").append(behaviour).append("\"");
- }
- sb.append("]");
- sb.append("}");
- }
- sb.append("}");
-
- return sb.toString();
- }
-}
diff --git a/src/main/java/org/openecomp/sa/rest/ApiUtils.java b/src/main/java/org/openecomp/sa/rest/ApiUtils.java
deleted file mode 100644
index dc7f0cb..0000000
--- a/src/main/java/org/openecomp/sa/rest/ApiUtils.java
+++ /dev/null
@@ -1,178 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-import org.openecomp.cl.mdc.MdcContext;
-import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants;
-import org.slf4j.MDC;
-
-import java.util.UUID;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.Response;
-
-
-public class ApiUtils {
-
- public static final String SEARCH_AUTH_POLICY_NAME = "search";
- public static final String URL_PREFIX = "services/search-data-service/v1/search";
-
- public enum Action {
- POST, GET, PUT, DELETE
- }
-
- ;
-
-
- /**
- * This method uses the contents of the supplied HTTP headers and request
- * structures to populate the MDC Context used for logging purposes.
- *
- * @param httpReq - HTTP request structure.
- * @param headers - HTTP headers
- */
- protected static void initMdcContext(HttpServletRequest httpReq, HttpHeaders headers) {
-
- // Auto generate a transaction if we were not provided one.
- String transId = null;
- if (headers != null) {
- transId = headers.getRequestHeaders().getFirst("X-TransactionId");
-
- if ((transId == null) || (transId.equals(""))) {
- transId = UUID.randomUUID().toString();
- }
- }
-
- String fromIp = (httpReq != null) ? httpReq.getRemoteAddr() : "";
- String fromApp = (headers != null) ? headers.getRequestHeaders().getFirst("X-FromAppId") : "";
-
- MdcContext.initialize(transId, SearchDbConstants.SDB_SERVICE_NAME, "", fromApp, fromIp);
- }
-
-
- protected static void clearMdcContext() {
- MDC.clear();
- }
-
- public static String buildIndexUri(String index) {
-
- return (URL_PREFIX + "/indexes/") + index;
- }
-
- public static String buildDocumentUri(String index, String documentId) {
-
- return buildIndexUri(index) + "/documents/" + documentId;
- }
-
- public static boolean validateIndexUri(String uri) {
-
- // If the URI starts with a leading '/' character, remove it.
- uri = uri.startsWith("/") ? uri.substring(1) : uri;
-
- // Now, tokenize the URI string.
- String[] tokens = uri.split("/");
-
- return (tokens.length == 6) && (tokens[4].equals("indexes"));
-
- }
-
- public static boolean validateDocumentUri(String uri, boolean requireId) {
-
- // If the URI starts with a leading '/' character, remove it.
- uri = uri.startsWith("/") ? uri.substring(1) : uri;
-
- // Now, tokenize the URI string.
- String[] tokens = uri.split("/");
-
- if (requireId) {
- return (tokens.length == 8) && (tokens[4].equals("indexes")
- && (tokens[6].equals("documents")));
- } else {
- return ((tokens.length == 8) || (tokens.length == 7))
- && (tokens[4].equals("indexes") && (tokens[6].equals("documents")));
- }
- }
-
- public static String extractIndexFromUri(String uri) {
-
- // If the URI starts with a leading '/' character, remove it.
- uri = uri.startsWith("/") ? uri.substring(1) : uri;
-
- // Now, tokenize the URI string.
- String[] tokens = uri.split("/");
-
- int i = 0;
- for (String token : tokens) {
- if (token.equals("indexes")) {
- if (i + 1 < tokens.length) {
- return tokens[i + 1];
- }
- }
- i++;
- }
-
- return null;
- }
-
- public static String extractIdFromUri(String uri) {
-
- // If the URI starts with a leading '/' character, remove it.
- uri = uri.startsWith("/") ? uri.substring(1) : uri;
-
- // Now, tokenize the URI string.
- String[] tokens = uri.split("/");
-
- int i = 0;
- for (String token : tokens) {
- if (token.equals("documents")) {
- if (i + 1 < tokens.length) {
- return tokens[i + 1];
- }
- }
- i++;
- }
-
- return null;
- }
-
- public static String getHttpStatusString(int httpStatusCode) {
- // Some of the status codes we use are still in draft state in the standards, and are not
- // recognized in the javax library. We need to manually translate these to human-readable
- // strings.
- String statusString = "Unknown";
- Response.Status status = Response.Status.fromStatusCode(httpStatusCode);
-
- if (status == null) {
- switch (httpStatusCode) {
- case 207:
- statusString = "Multi Status";
- break;
- default:
- }
- } else {
- statusString = status.toString();
- }
-
- return statusString;
- }
-}
diff --git a/src/main/java/org/openecomp/sa/rest/BulkApi.java b/src/main/java/org/openecomp/sa/rest/BulkApi.java
deleted file mode 100644
index 6557dcb..0000000
--- a/src/main/java/org/openecomp/sa/rest/BulkApi.java
+++ /dev/null
@@ -1,255 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.github.fge.jsonschema.main.JsonSchema;
-import com.github.fge.jsonschema.main.JsonSchemaFactory;
-import org.openecomp.cl.api.LogFields;
-import org.openecomp.cl.api.LogLine;
-import org.openecomp.cl.api.Logger;
-import org.openecomp.cl.eelf.LoggerFactory;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
-import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
-import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
-
-import java.io.IOException;
-import java.util.concurrent.atomic.AtomicBoolean;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.Response;
-
-
-/**
- * This class encapsulates the REST end points associated with performing
- * bulk operations against the document store.
- */
-@Path("/bulk")
-public class BulkApi {
-
- /**
- * Indicates whether or not we have performed the one-time static
- * initialization required for performing schema validation.
- */
- protected static AtomicBoolean validationInitialized = new AtomicBoolean(false);
-
- /**
- * Factory used for importing our payload schema for validation purposes.
- */
- protected static JsonSchemaFactory schemaFactory = null;
-
- /**
- * Imported payload schema that will be used by our validation methods.
- */
- protected static JsonSchema schema = null;
-
- protected SearchServiceApi searchService = null;
-
- // Instantiate the loggers.
- private static Logger logger = LoggerFactory.getInstance().getLogger(BulkApi.class.getName());
- private static Logger auditLogger = LoggerFactory.getInstance()
- .getAuditLogger(BulkApi.class.getName());
-
-
- /**
- * Create a new instance of the BulkApi end point.
- */
- public BulkApi(SearchServiceApi searchService) {
- this.searchService = searchService;
- }
-
-
- /**
- * Processes client requests containing a set of operations to be
- * performed in bulk.
- *
- * <p>Method: POST
- *
- * @param operations - JSON structure enumerating the operations to be
- * performed.
- * @param request - Raw HTTP request.
- * @param headers - HTTP headers.
- * @return - A standard REST response structure.
- */
- public Response processPost(String operations,
- HttpServletRequest request,
- HttpHeaders headers,
- DocumentStoreInterface documentStore,
- ApiUtils apiUtils) {
-
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- // Set a default result code and entity string for the request.
- int resultCode = 500;
- String resultString = "Unexpected error";
-
- if (logger.isDebugEnabled()) {
- logger.debug("SEARCH: Process Bulk Request - operations = ["
- + operations.replaceAll("\n", "") + " ]");
- }
-
- try {
-
- // Validate that the request is correctly authenticated before going
- // any further.
- if (!searchService.validateRequest(headers, request,
- ApiUtils.Action.POST, ApiUtils.SEARCH_AUTH_POLICY_NAME)) {
- logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, "Authentication failure.");
-
- return buildResponse(Response.Status.FORBIDDEN.getStatusCode(),
- "Authentication failure.", request, apiUtils);
- }
-
- } catch (Exception e) {
-
- // This is a catch all for any unexpected failure trying to perform
- // the authentication.
- logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE,
- "Unexpected authentication failure - cause: " + e.getMessage());
- if (logger.isDebugEnabled()) {
- logger.debug("Stack Trace:\n" + e.getStackTrace());
- }
-
- return buildResponse(Response.Status.FORBIDDEN.getStatusCode(),
- "Authentication failure - cause " + e.getMessage(),
- request,
- apiUtils);
- }
-
- // We expect a payload containing a JSON structure enumerating the
- // operations to be performed.
- if (operations == null) {
- logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, "Missing operations list payload");
-
- return buildResponse(resultCode, "Missing payload", request, apiUtils);
- }
-
-
- // Marshal the supplied json string into a Java object.
- ObjectMapper mapper = new ObjectMapper();
- BulkRequest[] requests = null;
- try {
- requests = mapper.readValue(operations, BulkRequest[].class);
-
- } catch (IOException e) {
-
- logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE,
- "Failed to marshal operations list: " + e.getMessage());
- if (logger.isDebugEnabled()) {
- logger.debug("Stack Trace:\n" + e.getStackTrace());
- }
-
- // Populate the result code and entity string for our HTTP response
- // and return the response to the client..
- return buildResponse(javax.ws.rs.core.Response.Status.BAD_REQUEST.getStatusCode(),
- "Unable to marshal operations: " + e.getMessage(),
- request,
- apiUtils);
- }
-
- // Verify that our parsed operations list actually contains some valid
- // operations.
- if (requests.length == 0) {
- logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, "Empty operations list in bulk request");
-
-
- // Populate the result code and entity string for our HTTP response
- // and return the response to the client..
- return buildResponse(javax.ws.rs.core.Response.Status.BAD_REQUEST.getStatusCode(),
- "Empty operations list in bulk request",
- request,
- apiUtils);
- }
- try {
-
- // Now, forward the set of bulk operations to the DAO for processing.
- OperationResult result = documentStore.performBulkOperations(requests);
-
- // Populate the result code and entity string for our HTTP response.
- resultCode = result.getResultCode();
- resultString = (result.getFailureCause() == null)
- ? result.getResult() : result.getFailureCause();
-
- } catch (DocumentStoreOperationException e) {
-
- logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE,
- "Unexpected failure communicating with document store: " + e.getMessage());
- if (logger.isDebugEnabled()) {
- logger.debug("Stack Trace:\n" + e.getStackTrace());
- }
-
- // Populate the result code and entity string for our HTTP response.
- resultCode = javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR.getStatusCode();
- resultString = "Unexpected failure processing bulk operations: " + e.getMessage();
- }
-
- // Build our HTTP response.
- Response response = Response.status(resultCode).entity(resultString).build();
-
- // Log the result.
- if ((response.getStatus() >= 200) && (response.getStatus() < 300)) {
- logger.info(SearchDbMsgs.PROCESSED_BULK_OPERATIONS);
- } else {
- logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, (String) response.getEntity());
- }
-
- // Finally, return the HTTP response to the client.
- return buildResponse(resultCode, resultString, request, apiUtils);
- }
-
-
- /**
- * This method generates an audit log and returns an HTTP response object.
- *
- * @param resultCode - The result code to report.
- * @param resultString - The result string to report.
- * @param request - The HTTP request to extract data from for the audit log.
- * @return - An HTTP response object.
- */
- private Response buildResponse(int resultCode, String resultString,
- HttpServletRequest request, ApiUtils apiUtils) {
-
- Response response = Response.status(resultCode).entity(resultString).build();
-
- // Generate our audit log.
- auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, resultCode)
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION,
- ApiUtils.getHttpStatusString(resultCode)),
- (request != null) ? request.getMethod() : "Unknown",
- (request != null) ? request.getRequestURL().toString() : "Unknown",
- (request != null) ? request.getRemoteHost() : "Unknown",
- Integer.toString(response.getStatus()));
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- return response;
- }
-}
diff --git a/src/main/java/org/openecomp/sa/rest/BulkMetaData.java b/src/main/java/org/openecomp/sa/rest/BulkMetaData.java
deleted file mode 100644
index f0ca73d..0000000
--- a/src/main/java/org/openecomp/sa/rest/BulkMetaData.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-public class BulkMetaData {
-
- private String url;
- private String etag;
-
-
- public String getUrl() {
- return url;
- }
-
- public void setUrl(String url) {
- this.url = url;
- }
-
- public String getEtag() {
- return etag;
- }
-
- public void setEtag(String anEtag) {
- this.etag = anEtag;
- }
-
- @Override
- public String toString() {
- return "MetaData: [url=" + url + ", etag=" + etag + "]";
- }
-}
diff --git a/src/main/java/org/openecomp/sa/rest/BulkOperation.java b/src/main/java/org/openecomp/sa/rest/BulkOperation.java
deleted file mode 100644
index 565aec3..0000000
--- a/src/main/java/org/openecomp/sa/rest/BulkOperation.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-/**
- * This class represents a single instance of an operation to be performed
- * as part of a bulk request.
- */
-public class BulkOperation {
-
- /**
- * Contains meta data to be passed to the document store for it
- * to perform the operation.
- */
- private BulkMetaData metaData;
-
- /**
- * Contains the contents of the document to be acted on.
- */
- private Document document;
-
-
- public void setMetaData(BulkMetaData metaData) {
- this.metaData = metaData;
- }
-
- public BulkMetaData getMetaData() {
- return metaData;
- }
-
- public Document getDocument() {
- return document;
- }
-
- public void setDocument(Document document) {
- this.document = document;
- }
-
- @Override
- public String toString() {
- return "Operation: [" + metaData.toString() + ", " + document + "]";
- }
-}
diff --git a/src/main/java/org/openecomp/sa/rest/BulkRequest.java b/src/main/java/org/openecomp/sa/rest/BulkRequest.java
deleted file mode 100644
index ac2c7ba..0000000
--- a/src/main/java/org/openecomp/sa/rest/BulkRequest.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-
-/**
- * This class represents a single instance of a request from the search client
- * that would be part of a bundle of such operations sent in a single bulk
- * request.
- */
-public class BulkRequest {
-
- public enum OperationType {
- CREATE,
- UPDATE,
- DELETE
- }
-
- private BulkOperation create;
- private BulkOperation update;
- private BulkOperation delete;
-
- public BulkOperation getCreate() {
- return create;
- }
-
- public void setCreate(BulkOperation create) {
- this.create = create;
- }
-
- public BulkOperation getUpdate() {
- return update;
- }
-
- public void setUpdate(BulkOperation update) {
- this.update = update;
- }
-
- public BulkOperation getDelete() {
- return delete;
- }
-
- public void setDelete(BulkOperation delete) {
- this.delete = delete;
- }
-
- public OperationType getOperationType() {
-
- if (create != null) {
- return OperationType.CREATE;
- } else if (update != null) {
- return OperationType.UPDATE;
- } else if (delete != null) {
- return OperationType.DELETE;
- } else {
- return null;
- }
- }
-
- public BulkOperation getOperation() {
- if (create != null) {
- return create;
- } else if (update != null) {
- return update;
- } else if (delete != null) {
- return delete;
- } else {
- return null;
- }
- }
-
- public String getIndex() {
- return ApiUtils.extractIndexFromUri(getOperation().getMetaData().getUrl());
- }
-
- public String getId() {
- return ApiUtils.extractIdFromUri(getOperation().getMetaData().getUrl());
- }
-
- @Override
- public String toString() {
-
- if (create != null) {
- return "create: [" + create.toString() + "]\n";
- } else if (update != null) {
- return "update: [" + update.toString() + "]\n";
- } else if (delete != null) {
- return "delete: [" + delete.toString() + "]\n";
- } else {
- return "UNDEFINED";
- }
- }
-}
diff --git a/src/main/java/org/openecomp/sa/rest/Document.java b/src/main/java/org/openecomp/sa/rest/Document.java
deleted file mode 100644
index 6b5cc3b..0000000
--- a/src/main/java/org/openecomp/sa/rest/Document.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-import com.fasterxml.jackson.annotation.JsonAnyGetter;
-import com.fasterxml.jackson.annotation.JsonAnySetter;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class Document {
- private Map<String, Object> fields = new HashMap<String, Object>();
-
- @JsonAnyGetter
- public Map<String, Object> getFields() {
- return fields;
- }
-
- @JsonAnySetter
- public void setField(String name, Object value) {
- fields.put(name, value);
- }
-
- public String toJson() throws JsonProcessingException {
-
- ObjectMapper mapper = new ObjectMapper();
- return mapper.writeValueAsString(this);
- }
-
-
- @Override
- public String toString() {
- String str = "Document: [";
- for (String key : fields.keySet()) {
- str += key + ": " + fields.get(key);
- }
- str += "]";
-
- return str;
- }
-}
diff --git a/src/main/java/org/openecomp/sa/rest/DocumentApi.java b/src/main/java/org/openecomp/sa/rest/DocumentApi.java
deleted file mode 100644
index 8762380..0000000
--- a/src/main/java/org/openecomp/sa/rest/DocumentApi.java
+++ /dev/null
@@ -1,529 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-import com.fasterxml.jackson.annotation.JsonInclude.Include;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.openecomp.cl.api.LogFields;
-import org.openecomp.cl.api.LogLine;
-import org.openecomp.cl.api.Logger;
-import org.openecomp.cl.eelf.LoggerFactory;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreDataEntityImpl;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface;
-import org.openecomp.sa.searchdbabstraction.entity.AggregationResults;
-import org.openecomp.sa.searchdbabstraction.entity.DocumentOperationResult;
-import org.openecomp.sa.searchdbabstraction.entity.SearchOperationResult;
-import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
-import org.openecomp.sa.searchdbabstraction.searchapi.SearchStatement;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
-
-public class DocumentApi {
- private static final String REQUEST_HEADER_RESOURCE_VERSION = "If-Match";
- private static final String RESPONSE_HEADER_RESOURCE_VERSION = "ETag";
- private static final String REQUEST_HEADER_ALLOW_IMPLICIT_INDEX_CREATION = "X-CreateIndex";
-
- protected SearchServiceApi searchService = null;
-
- private Logger logger = LoggerFactory.getInstance().getLogger(DocumentApi.class.getName());
- private Logger auditLogger = LoggerFactory.getInstance()
- .getAuditLogger(DocumentApi.class.getName());
-
- public DocumentApi(SearchServiceApi searchService) {
- this.searchService = searchService;
- }
-
- public Response processPost(String content, HttpServletRequest request, HttpHeaders headers,
- HttpServletResponse httpResponse, String index,
- DocumentStoreInterface documentStore) {
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- try {
- ObjectMapper mapper = new ObjectMapper();
- mapper.setSerializationInclusion(Include.NON_EMPTY);
- if (content == null) {
- return handleError(request, content, Status.BAD_REQUEST);
- }
-
- boolean isValid;
- try {
- isValid = searchService.validateRequest(headers, request, ApiUtils.Action.POST,
- ApiUtils.SEARCH_AUTH_POLICY_NAME);
- } catch (Exception e) {
- logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
- "DocumentApi.processPost",
- e.getMessage());
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- if (!isValid) {
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl();
- document.setContent(content);
-
- DocumentOperationResult result = documentStore.createDocument(index, document, implicitlyCreateIndex(headers));
- String output = null;
- if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
- output = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getDocument());
- } else {
- output = result.getError() != null
- ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
- : result.getFailureCause();
- }
-
- if (httpResponse != null) {
- httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion());
- }
- Response response = Response.status(result.getResultCode()).entity(output).build();
- logResult(request, Response.Status.fromStatusCode(response.getStatus()));
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- return response;
- } catch (Exception e) {
- return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
- }
- }
-
- public Response processPut(String content, HttpServletRequest request, HttpHeaders headers,
- HttpServletResponse httpResponse, String index,
- String id, DocumentStoreInterface documentStore) {
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- try {
- ObjectMapper mapper = new ObjectMapper();
- mapper.setSerializationInclusion(Include.NON_EMPTY);
- if (content == null) {
- return handleError(request, content, Status.BAD_REQUEST);
- }
-
- boolean isValid;
- try {
- isValid = searchService.validateRequest(headers, request, ApiUtils.Action.PUT,
- ApiUtils.SEARCH_AUTH_POLICY_NAME);
- } catch (Exception e) {
- logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
- "DocumentApi.processPut",
- e.getMessage());
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- if (!isValid) {
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- String resourceVersion = headers.getRequestHeaders()
- .getFirst(REQUEST_HEADER_RESOURCE_VERSION);
-
- DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl();
- document.setId(id);
- document.setContent(content);
- document.setVersion(resourceVersion);
-
- DocumentOperationResult result = null;
- if (resourceVersion == null) {
- result = documentStore.createDocument(index, document, implicitlyCreateIndex(headers));
- } else {
- result = documentStore.updateDocument(index, document, implicitlyCreateIndex(headers));
- }
-
- String output = null;
- if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
- output = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getDocument());
- } else {
- output = result.getError() != null
- ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
- : result.getFailureCause();
- }
- if (httpResponse != null) {
- httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion());
- }
- Response response = Response.status(result.getResultCode()).entity(output).build();
- logResult(request, Response.Status.fromStatusCode(response.getStatus()));
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- return response;
- } catch (Exception e) {
- return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
- }
- }
-
- public Response processDelete(String content, HttpServletRequest request, HttpHeaders headers,
- HttpServletResponse httpResponse, String index, String id,
- DocumentStoreInterface documentStore) {
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- try {
- ObjectMapper mapper = new ObjectMapper();
- mapper.setSerializationInclusion(Include.NON_EMPTY);
- boolean isValid;
- try {
- isValid = searchService.validateRequest(headers, request, ApiUtils.Action.DELETE,
- ApiUtils.SEARCH_AUTH_POLICY_NAME);
- } catch (Exception e) {
- logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
- "DocumentApi.processDelete",
- e.getMessage());
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- if (!isValid) {
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- String resourceVersion = headers.getRequestHeaders()
- .getFirst(REQUEST_HEADER_RESOURCE_VERSION);
- if (resourceVersion == null || resourceVersion.isEmpty()) {
- return handleError(request, "Request header 'If-Match' missing",
- javax.ws.rs.core.Response.Status.BAD_REQUEST);
- }
-
- DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl();
- document.setId(id);
- document.setVersion(resourceVersion);
-
- DocumentOperationResult result = documentStore.deleteDocument(index, document);
- String output = null;
- if (!(result.getResultCode() >= 200 && result.getResultCode() <= 299)) { //
- output = result.getError() != null
- ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
- : result.getFailureCause();
- }
-
- if (httpResponse != null) {
- httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion());
- }
- Response response;
- if (output == null) {
- response = Response.status(result.getResultCode()).build();
- } else {
- response = Response.status(result.getResultCode()).entity(output).build();
- }
-
- logResult(request, Response.Status.fromStatusCode(response.getStatus()));
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- return response;
- } catch (Exception e) {
- return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
- }
- }
-
- public Response processGet(String content, HttpServletRequest request, HttpHeaders headers,
- HttpServletResponse httpResponse, String index, String id,
- DocumentStoreInterface documentStore) {
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- try {
- ObjectMapper mapper = new ObjectMapper();
- mapper.setSerializationInclusion(Include.NON_EMPTY);
- boolean isValid;
- try {
- isValid = searchService.validateRequest(headers, request, ApiUtils.Action.GET,
- ApiUtils.SEARCH_AUTH_POLICY_NAME);
- } catch (Exception e) {
- logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
- "DocumentApi.processGet",
- e.getMessage());
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- if (!isValid) {
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- String resourceVersion = headers.getRequestHeaders()
- .getFirst(REQUEST_HEADER_RESOURCE_VERSION);
-
- DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl();
- document.setId(id);
- document.setVersion(resourceVersion);
-
- DocumentOperationResult result = documentStore.getDocument(index, document);
- String output = null;
- if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
- output = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getDocument());
- } else {
- output = result.getError() != null
- ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
- : result.getFailureCause();
- }
- if (httpResponse != null) {
- httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion());
- }
- Response response = Response.status(result.getResultCode()).entity(output).build();
- logResult(request, Response.Status.fromStatusCode(response.getStatus()));
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- return response;
- } catch (Exception e) {
- return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
- }
- }
-
- public Response processSearchWithGet(String content, HttpServletRequest request,
- HttpHeaders headers, String index,
- String queryText, DocumentStoreInterface documentStore) {
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- try {
- ObjectMapper mapper = new ObjectMapper();
- mapper.setSerializationInclusion(Include.NON_EMPTY);
-
- boolean isValid;
- try {
- isValid = searchService.validateRequest(headers, request, ApiUtils.Action.GET,
- ApiUtils.SEARCH_AUTH_POLICY_NAME);
- } catch (Exception e) {
- logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
- "processSearchWithGet",
- e.getMessage());
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- if (!isValid) {
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- SearchOperationResult result = documentStore.search(index, queryText);
- String output = null;
- if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
- output = mapper.writerWithDefaultPrettyPrinter()
- .writeValueAsString(result.getSearchResult());
- } else {
- output = result.getError() != null
- ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
- : result.getFailureCause();
- }
- Response response = Response.status(result.getResultCode()).entity(output).build();
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- return response;
- } catch (Exception e) {
- return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
- }
- }
-
- public Response queryWithGetWithPayload(String content, HttpServletRequest request,
- HttpHeaders headers, String index,
- DocumentStoreInterface documentStore) {
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- logger.info(SearchDbMsgs.PROCESS_PAYLOAD_QUERY, "GET", (request != null)
- ? request.getRequestURL().toString() : "");
- if (logger.isDebugEnabled()) {
- logger.debug("Request Body: " + content);
- }
- return processQuery(index, content, request, headers, documentStore);
- }
-
- public Response processSearchWithPost(String content, HttpServletRequest request,
- HttpHeaders headers, String index,
- DocumentStoreInterface documentStore) {
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- logger.info(SearchDbMsgs.PROCESS_PAYLOAD_QUERY, "POST", (request != null)
- ? request.getRequestURL().toString() : "");
- if (logger.isDebugEnabled()) {
- logger.debug("Request Body: " + content);
- }
-
- return processQuery(index, content, request, headers, documentStore);
- }
-
- /**
- * Common handler for query requests. This is called by both the GET with
- * payload and POST with payload variants of the query endpoint.
- *
- * @param index - The index to be queried against.
- * @param content - The payload containing the query structure.
- * @param request - The HTTP request.
- * @param headers - The HTTP headers.
- * @return - A standard HTTP response.
- */
- private Response processQuery(String index, String content, HttpServletRequest request,
- HttpHeaders headers, DocumentStoreInterface documentStore) {
-
- try {
- ObjectMapper mapper = new ObjectMapper();
- mapper.setSerializationInclusion(Include.NON_EMPTY);
-
- // Make sure that we were supplied a payload before proceeding.
- if (content == null) {
- return handleError(request, content, Status.BAD_REQUEST);
- }
-
- // Validate that the request has the appropriate authorization.
- boolean isValid;
- try {
- isValid = searchService.validateRequest(headers, request, ApiUtils.Action.POST,
- ApiUtils.SEARCH_AUTH_POLICY_NAME);
-
- } catch (Exception e) {
- logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
- "processQuery",
- e.getMessage());
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- if (!isValid) {
- return handleError(request, content, Status.FORBIDDEN);
- }
-
- SearchStatement searchStatement;
-
- try {
- // Marshall the supplied request payload into a search statement
- // object.
- searchStatement = mapper.readValue(content, SearchStatement.class);
-
- } catch (Exception e) {
- return handleError(request, e.getMessage(), Status.BAD_REQUEST);
- }
-
- // Now, submit the search statement, translated into
- // ElasticSearch syntax, to the document store DAO.
- SearchOperationResult result = documentStore.searchWithPayload(index,
- searchStatement.toElasticSearch());
- String output = null;
- if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
- output = prepareOutput(mapper, result);
- } else {
- output = result.getError() != null
- ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
- : result.getFailureCause();
- }
- Response response = Response.status(result.getResultCode()).entity(output).build();
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- return response;
-
- } catch (Exception e) {
- return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
- }
- }
-
-
- /**
- * Checks the supplied HTTP headers to see if we should allow the underlying document
- * store to implicitly create the index referenced in a document PUT or POST if it
- * does not already exist in the data store.
- *
- * @param headers - The HTTP headers to examine.
- *
- * @return - true if the headers indicate that missing indices should be implicitly created,
- * false otherwise.
- */
- private boolean implicitlyCreateIndex(HttpHeaders headers) {
-
- boolean createIndexIfNotPresent = false;
- String implicitIndexCreationHeader =
- headers.getRequestHeaders().getFirst(REQUEST_HEADER_ALLOW_IMPLICIT_INDEX_CREATION);
-
- if( (implicitIndexCreationHeader != null) && (implicitIndexCreationHeader.equals("true")) ) {
- createIndexIfNotPresent = true;
- }
-
- return createIndexIfNotPresent;
- }
-
-
- private String prepareOutput(ObjectMapper mapper, SearchOperationResult result)
- throws JsonProcessingException {
- StringBuffer output = new StringBuffer();
- output.append("{\r\n\"searchResult\":");
- output.append(mapper.writerWithDefaultPrettyPrinter()
- .writeValueAsString(result.getSearchResult()));
- AggregationResults aggs = result.getAggregationResult();
- if (aggs != null) {
- output.append(",\r\n\"aggregationResult\":");
- output.append(mapper.setSerializationInclusion(Include.NON_NULL)
- .writerWithDefaultPrettyPrinter().writeValueAsString(aggs));
- }
- output.append("\r\n}");
- return output.toString();
- }
-
- private Response handleError(HttpServletRequest request, String message, Status status) {
- logResult(request, status);
- return Response.status(status).entity(message).type(MediaType.APPLICATION_JSON).build();
- }
-
- void logResult(HttpServletRequest request, Response.Status status) {
-
- logger.info(SearchDbMsgs.PROCESS_REST_REQUEST, (request != null) ? request.getMethod() : "",
- (request != null) ? request.getRequestURL().toString() : "",
- (request != null) ? request.getRemoteHost() : "", Integer.toString(status.getStatusCode()));
-
- auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, status.getStatusCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, status.getReasonPhrase()),
- (request != null) ? request.getMethod() : "",
- (request != null) ? request.getRequestURL().toString() : "",
- (request != null) ? request.getRemoteHost() : "", Integer.toString(status.getStatusCode()));
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
- }
-}
diff --git a/src/main/java/org/openecomp/sa/rest/IndexApi.java b/src/main/java/org/openecomp/sa/rest/IndexApi.java
deleted file mode 100644
index 7968b1b..0000000
--- a/src/main/java/org/openecomp/sa/rest/IndexApi.java
+++ /dev/null
@@ -1,376 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.openecomp.cl.api.LogFields;
-import org.openecomp.cl.api.LogLine;
-import org.openecomp.cl.api.Logger;
-import org.openecomp.cl.eelf.LoggerFactory;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
-import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
-import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.Response;
-
-
-/**
- * This class encapsulates the REST end points associated with manipulating
- * indexes in the document store.
- */
-public class IndexApi {
-
- protected SearchServiceApi searchService = null;
-
- /**
- * Configuration for the custom analyzers that will be used for indexing.
- */
- protected AnalysisConfiguration analysisConfig;
-
- // Set up the loggers.
- private static Logger logger = LoggerFactory.getInstance()
- .getLogger(IndexApi.class.getName());
- private static Logger auditLogger = LoggerFactory.getInstance()
- .getAuditLogger(IndexApi.class.getName());
-
-
- public IndexApi(SearchServiceApi searchService) {
- this.searchService = searchService;
- init();
- }
-
-
- /**
- * Initializes the end point.
- *
- * @throws FileNotFoundException
- * @throws IOException
- * @throws DocumentStoreOperationException
- */
- public void init() {
-
- // Instantiate our analysis configuration object.
- analysisConfig = new AnalysisConfiguration();
- }
-
-
- /**
- * Processes client requests to create a new index and document type in the
- * document store.
- *
- * @param documentSchema - The contents of the request body which is expected
- * to be a JSON structure which corresponds to the
- * schema defined in document.schema.json
- * @param index - The name of the index to create.
- * @return - A Standard REST response
- */
- public Response processCreateIndex(String documentSchema,
- HttpServletRequest request,
- HttpHeaders headers,
- String index,
- DocumentStoreInterface documentStore) {
-
- int resultCode = 500;
- String resultString = "Unexpected error";
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- // Validate that the request is correctly authenticated before going
- // any further.
- try {
-
- if (!searchService.validateRequest(headers, request,
- ApiUtils.Action.POST, ApiUtils.SEARCH_AUTH_POLICY_NAME)) {
- logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, "Authentication failure.");
- return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request);
- }
-
- } catch (Exception e) {
-
- logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index,
- "Unexpected authentication failure - cause: " + e.getMessage());
- return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request);
- }
-
-
- // We expect a payload containing the document schema. Make sure
- // it is present.
- if (documentSchema == null) {
- logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, "Missing document schema payload");
- return errorResponse(Response.Status.fromStatusCode(resultCode), "Missing payload", request);
- }
-
- try {
-
- // Marshal the supplied json string into a document schema object.
- ObjectMapper mapper = new ObjectMapper();
- DocumentSchema schema = mapper.readValue(documentSchema, DocumentSchema.class);
-
- // Now, ask the DAO to create the index.
- OperationResult result = documentStore.createIndex(index, schema);
-
- // Extract the result code and string from the OperationResult
- // object so that we can use them to generate a standard REST
- // response.
- // Note that we want to return a 201 result code on a successful
- // create, so if we get back a 200 from the document store,
- // translate that int a 201.
- resultCode = (result.getResultCode() == 200) ? 201 : result.getResultCode();
- resultString = (result.getFailureCause() == null)
- ? result.getResult() : result.getFailureCause();
-
- } catch (com.fasterxml.jackson.core.JsonParseException
- | com.fasterxml.jackson.databind.JsonMappingException e) {
-
- // We were unable to marshal the supplied json string into a valid
- // document schema, so return an appropriate error response.
- resultCode = javax.ws.rs.core.Response.Status.BAD_REQUEST.getStatusCode();
- resultString = "Malformed schema: " + e.getMessage();
-
- } catch (IOException e) {
-
- // We'll treat this is a general internal error.
- resultCode = javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR.getStatusCode();
- resultString = "IO Failure: " + e.getMessage();
- }
-
- Response response = Response.status(resultCode).entity(resultString).build();
-
- // Log the result.
- if ((response.getStatus() >= 200) && (response.getStatus() < 300)) {
- logger.info(SearchDbMsgs.CREATED_INDEX, index);
- } else {
- logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, resultString);
- }
-
- // Generate our audit log.
- auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, resultCode)
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION,
- Response.Status.fromStatusCode(resultCode).toString()),
- (request != null) ? request.getMethod() : "Unknown",
- (request != null) ? request.getRequestURL().toString() : "Unknown",
- (request != null) ? request.getRemoteHost() : "Unknown",
- Integer.toString(response.getStatus()));
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- // Finally, return the response.
- return response;
- }
-
-
- /**
- * Processes a client request to remove an index from the document store.
- * Note that this implicitly deletes all documents contained within that index.
- *
- * @param index - The index to be deleted.
- * @return - A standard REST response.
- */
- public Response processDelete(String index,
- HttpServletRequest request,
- HttpHeaders headers,
- DocumentStoreInterface documentStore) {
-
- // Initialize the MDC Context for logging purposes.
- ApiUtils.initMdcContext(request, headers);
-
- // Set a default response in case something unexpected goes wrong.
- Response response = Response.status(javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR)
- .entity("Unknown")
- .build();
-
- // Validate that the request is correctly authenticated before going
- // any further.
- try {
-
- if (!searchService.validateRequest(headers, request, ApiUtils.Action.POST,
- ApiUtils.SEARCH_AUTH_POLICY_NAME)) {
- logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, "Authentication failure.");
- return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request);
- }
-
- } catch (Exception e) {
-
- logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index,
- "Unexpected authentication failure - cause: " + e.getMessage());
- return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request);
- }
-
-
- try {
- // Send the request to the document store.
- response = responseFromOperationResult(documentStore.deleteIndex(index));
-
- } catch (DocumentStoreOperationException e) {
- response = Response.status(javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR)
- .entity(e.getMessage())
- .build();
- }
-
-
- // Log the result.
- if ((response.getStatus() >= 200) && (response.getStatus() < 300)) {
- logger.info(SearchDbMsgs.DELETED_INDEX, index);
- } else {
- logger.warn(SearchDbMsgs.INDEX_DELETE_FAILURE, index, (String) response.getEntity());
- }
-
- // Generate our audit log.
- auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, response.getStatus())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION,
- response.getStatusInfo().getReasonPhrase()),
- (request != null) ? request.getMethod() : "Unknown",
- (request != null) ? request.getRequestURL().toString() : "Unknown",
- (request != null) ? request.getRemoteHost() : "Unknown",
- Integer.toString(response.getStatus()));
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- return response;
- }
-
-
- /**
- * This method takes a JSON format document schema and produces a set of
- * field mappings in the form that Elastic Search expects.
- *
- * @param documentSchema - A document schema expressed as a JSON string.
- * @return - A JSON string expressing an Elastic Search mapping configuration.
- * @throws com.fasterxml.jackson.core.JsonParseException
- * @throws com.fasterxml.jackson.databind.JsonMappingException
- * @throws IOException
- */
- public String generateDocumentMappings(String documentSchema)
- throws com.fasterxml.jackson.core.JsonParseException,
- com.fasterxml.jackson.databind.JsonMappingException, IOException {
-
- // Unmarshal the json content into a document schema object.
- ObjectMapper mapper = new ObjectMapper();
- DocumentSchema schema = mapper.readValue(documentSchema, DocumentSchema.class);
-
- // Now, generate the Elastic Search mapping json and return it.
- StringBuilder sb = new StringBuilder();
- sb.append("{");
- sb.append("\"properties\": {");
-
- boolean first = true;
- for (DocumentFieldSchema field : schema.getFields()) {
-
- if (!first) {
- sb.append(",");
- } else {
- first = false;
- }
-
- sb.append("\"").append(field.getName()).append("\": {");
-
- // The field type is mandatory.
- sb.append("\"type\": \"").append(field.getDataType()).append("\"");
-
- // If the index field was specified, then append it.
- if (field.getSearchable() != null) {
- sb.append(", \"index\": \"").append(field.getSearchable()
- ? "analyzed" : "not_analyzed").append("\"");
- }
-
- // If a search analyzer was specified, then append it.
- if (field.getSearchAnalyzer() != null) {
- sb.append(", \"search_analyzer\": \"").append(field.getSearchAnalyzer()).append("\"");
- }
-
- // If an indexing analyzer was specified, then append it.
- if (field.getIndexAnalyzer() != null) {
- sb.append(", \"analyzer\": \"").append(field.getIndexAnalyzer()).append("\"");
- } else {
- sb.append(", \"analyzer\": \"").append("whitespace").append("\"");
- }
-
- sb.append("}");
- }
-
- sb.append("}");
- sb.append("}");
-
- logger.debug("Generated document mappings: " + sb.toString());
-
- return sb.toString();
- }
-
-
- /**
- * Converts an {@link OperationResult} to a standard REST {@link Response}
- * object.
- *
- * @param result - The {@link OperationResult} to be converted.
- * @return - The equivalent {@link Response} object.
- */
- public Response responseFromOperationResult(OperationResult result) {
-
- if ((result.getResultCode() >= 200) && (result.getResultCode() < 300)) {
- return Response.status(result.getResultCode()).entity(result.getResult()).build();
- } else {
- if (result.getFailureCause() != null) {
- return Response.status(result.getResultCode()).entity(result.getFailureCause()).build();
- } else {
- return Response.status(result.getResultCode()).entity(result.getResult()).build();
- }
- }
- }
-
- public Response errorResponse(Response.Status status, String msg, HttpServletRequest request) {
-
- // Generate our audit log.
- auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, status.getStatusCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, status.getReasonPhrase()),
- (request != null) ? request.getMethod() : "Unknown",
- (request != null) ? request.getRequestURL().toString() : "Unknown",
- (request != null) ? request.getRemoteHost() : "Unknown",
- Integer.toString(status.getStatusCode()));
-
- // Clear the MDC context so that no other transaction inadvertently
- // uses our transaction id.
- ApiUtils.clearMdcContext();
-
- return Response.status(status)
- .entity(msg)
- .build();
- }
-
-
-}
diff --git a/src/main/java/org/openecomp/sa/rest/SearchServiceApi.java b/src/main/java/org/openecomp/sa/rest/SearchServiceApi.java
deleted file mode 100644
index f6afefe..0000000
--- a/src/main/java/org/openecomp/sa/rest/SearchServiceApi.java
+++ /dev/null
@@ -1,257 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.rest;
-
-import org.openecomp.sa.auth.SearchDbServiceAuth;
-import org.openecomp.sa.rest.ApiUtils.Action;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.ElasticSearchHttpController;
-
-import java.security.cert.X509Certificate;
-import javax.security.auth.x500.X500Principal;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
-
-public class SearchServiceApi {
-
- /**
- * The Data Access Object that we will use to interact with the
- * document store.
- */
- protected DocumentStoreInterface documentStore = null;
- protected ApiUtils apiUtils = null;
-
-
- /**
- * Create a new instance of the end point.
- */
- public SearchServiceApi() {
-
- // Perform one-time initialization.
- init();
- }
-
-
- /**
- * Performs all one-time initialization required for the end point.
- */
- public void init() {
-
- // Instantiate our Document Store DAO.
- documentStore = ElasticSearchHttpController.getInstance();
-
- apiUtils = new ApiUtils();
- }
-
- @PUT
- @Path("/indexes/{index}")
- @Consumes({MediaType.APPLICATION_JSON})
- public Response processCreateIndex(String requestBody,
- @Context HttpServletRequest request,
- @Context HttpHeaders headers,
- @PathParam("index") String index) {
-
- // Forward the request to our index API to create the index.
- IndexApi indexApi = new IndexApi(this);
- return indexApi.processCreateIndex(requestBody, request, headers, index, documentStore);
- }
-
-
- @DELETE
- @Path("/indexes/{index}")
- @Consumes({MediaType.APPLICATION_JSON})
- public Response processDeleteIndex(String requestBody,
- @Context HttpServletRequest request,
- @Context HttpHeaders headers,
- @PathParam("index") String index) {
-
- // Forward the request to our index API to delete the index.
- IndexApi indexApi = new IndexApi(this);
- return indexApi.processDelete(index, request, headers, documentStore);
- }
-
-
- @GET
- @Path("/indexes/{index}/documents/{id}")
- @Consumes({MediaType.APPLICATION_JSON})
- public Response processGetDocument(String requestBody,
- @Context HttpServletRequest request,
- @Context HttpServletResponse httpResponse,
- @Context HttpHeaders headers,
- @PathParam("index") String index,
- @PathParam("id") String id) {
-
- // Forward the request to our document API to retrieve the document.
- DocumentApi documentApi = new DocumentApi(this);
- return documentApi.processGet(requestBody, request, headers, httpResponse,
- index, id, documentStore);
- }
-
- @POST
- @Path("/indexes/{index}/documents")
- @Consumes({MediaType.APPLICATION_JSON})
- public Response processCreateDocWithoutId(String requestBody,
- @Context HttpServletRequest request,
- @Context HttpServletResponse httpResponse,
- @Context HttpHeaders headers,
- @PathParam("index") String index) {
-
- // Forward the request to our document API to create the document.
- DocumentApi documentApi = new DocumentApi(this);
- return documentApi.processPost(requestBody, request, headers, httpResponse,
- index, documentStore);
- }
-
- @PUT
- @Path("/indexes/{index}/documents/{id}")
- @Consumes({MediaType.APPLICATION_JSON})
- public Response processUpsertDoc(String requestBody,
- @Context HttpServletRequest request,
- @Context HttpServletResponse httpResponse,
- @Context HttpHeaders headers,
- @PathParam("index") String index,
- @PathParam("id") String id) {
-
- // Forward the request to our document API to upsert the document.
- DocumentApi documentApi = new DocumentApi(this);
- return documentApi.processPut(requestBody, request, headers, httpResponse,
- index, id, documentStore);
- }
-
- @DELETE
- @Path("/indexes/{index}/documents/{id}")
- @Consumes({MediaType.APPLICATION_JSON})
- public Response processDeleteDoc(String requestBody,
- @Context HttpServletRequest request,
- @Context HttpServletResponse httpResponse,
- @Context HttpHeaders headers,
- @PathParam("index") String index,
- @PathParam("id") String id) {
-
- // Forward the request to our document API to delete the document.
- DocumentApi documentApi = new DocumentApi(this);
- return documentApi.processDelete(requestBody, request, headers, httpResponse,
- index, id, documentStore);
- }
-
-
- @GET
- @Path("/indexes/{index}/query/{queryText}")
- @Consumes({MediaType.APPLICATION_JSON})
- public Response processInlineQuery(String requestBody,
- @Context HttpServletRequest request,
- @Context HttpHeaders headers,
- @PathParam("index") String index,
- @PathParam("queryText") String queryText) {
-
- // Forward the request to our document API to delete the document.
- DocumentApi documentApi = new DocumentApi(this);
- return documentApi.processSearchWithGet(requestBody, request, headers,
- index, queryText, documentStore);
- }
-
-
- @GET
- @Path("/indexes/{index}/query")
- @Consumes({MediaType.APPLICATION_JSON})
- public Response processQueryWithGet(String requestBody,
- @Context HttpServletRequest request,
- @Context HttpHeaders headers,
- @PathParam("index") String index) {
-
- // Forward the request to our document API to delete the document.
- DocumentApi documentApi = new DocumentApi(this);
- return documentApi.queryWithGetWithPayload(requestBody, request, headers, index, documentStore);
- }
-
- @POST
- @Path("/indexes/{index}/query")
- @Consumes({MediaType.APPLICATION_JSON})
- public Response processQuery(String requestBody,
- @Context HttpServletRequest request,
- @Context HttpHeaders headers,
- @PathParam("index") String index) {
-
- // Forward the request to our document API to delete the document.
- DocumentApi documentApi = new DocumentApi(this);
- return documentApi.processSearchWithPost(requestBody, request, headers, index, documentStore);
- }
-
-
- @POST
- @Path("/bulk")
- @Consumes({MediaType.APPLICATION_JSON})
- public Response processBulkRequest(String requestBody,
- @Context HttpServletRequest request,
- @Context HttpHeaders headers,
- @PathParam("index") String index) {
-
- // Forward the request to our document API to delete the document.
- BulkApi bulkApi = new BulkApi(this);
- return bulkApi.processPost(requestBody, request, headers, documentStore, apiUtils);
- }
-
- protected boolean validateRequest(HttpHeaders headers,
- HttpServletRequest req,
- Action action,
- String authPolicyFunctionName) throws Exception {
-
- SearchDbServiceAuth serviceAuth = new SearchDbServiceAuth();
-
- String cipherSuite = (String) req.getAttribute("javax.servlet.request.cipher_suite");
- String authUser = null;
- if (cipherSuite != null) {
- Object x509CertAttribute = req.getAttribute("javax.servlet.request.X509Certificate");
- if (x509CertAttribute != null) {
- X509Certificate[] certChain = (X509Certificate[]) x509CertAttribute;
- X509Certificate clientCert = certChain[0];
- X500Principal subjectDn = clientCert.getSubjectX500Principal();
- authUser = subjectDn.toString();
- }
- }
-
- if (authUser == null) {
- return false;
- }
-
- String status = serviceAuth.authUser(headers, authUser.toLowerCase(),
- action.toString() + ":" + authPolicyFunctionName);
- if (!status.equals("OK")) {
- return false;
- }
-
- return true;
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsEchoService.java b/src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsEchoService.java
deleted file mode 100644
index c2726ca..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsEchoService.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-
-
-/**
- * Exposes REST endpoints for a simple echo service.
- */
-@Path("/jaxrs-services")
-public class JaxrsEchoService {
-
- /**
- * REST endpoint for a simple echo service.
- *
- * @param input - The value to be echoed back.
- * @return - The input value.
- */
- @GET
- @Path("/echo/{input}")
- @Produces("text/plain")
- public String ping(@PathParam("input") String input) {
- return "[Search Database Abstraction Micro Service] - Echo Service: " + input + ".";
- }
-
-} \ No newline at end of file
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsUserService.java b/src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsUserService.java
deleted file mode 100644
index 5c80f05..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsUserService.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction;
-
-import java.util.HashMap;
-import java.util.Map;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-
-@Path("/user")
-public class JaxrsUserService {
-
- private static final Map<String, String> userIdToNameMap;
-
- static {
- userIdToNameMap = new HashMap<String, String>();
- userIdToNameMap.put("dw113c", "Doug Wait");
- userIdToNameMap.put("so401q", "Stuart O'Day");
- }
-
- @GET
- @Path("/{userId}")
- @Produces("text/plain")
- public String lookupUser(@PathParam("userId") String userId) {
- String name = userIdToNameMap.get(userId);
- return name != null ? name : "unknown id";
- }
-
-} \ No newline at end of file
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/config/ElasticSearchConfig.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/config/ElasticSearchConfig.java
deleted file mode 100644
index 1a3dd00..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/config/ElasticSearchConfig.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.config;
-
-import java.util.Properties;
-
-public class ElasticSearchConfig {
- private String ipAddress;
- private String httpPort;
- private String javaApiPort;
- private String clusterName;
-
- public static final String ES_CLUSTER_NAME = "es.cluster-name";
- public static final String ES_IP_ADDRESS = "es.ip-address";
- public static final String ES_HTTP_PORT = "es.http-port";
-
- private static final String JAVA_API_PORT_DEFAULT = "9300";
-
- public ElasticSearchConfig(Properties props) {
-
- setClusterName(props.getProperty(ES_CLUSTER_NAME));
- setIpAddress(props.getProperty(ES_IP_ADDRESS));
- setHttpPort(props.getProperty(ES_HTTP_PORT));
- setJavaApiPort(JAVA_API_PORT_DEFAULT);
- }
-
- public String getIpAddress() {
- return ipAddress;
- }
-
- public void setIpAddress(String ipAddress) {
- this.ipAddress = ipAddress;
- }
-
- public String getHttpPort() {
- return httpPort;
- }
-
- public void setHttpPort(String httpPort) {
- this.httpPort = httpPort;
- }
-
- public String getJavaApiPort() {
- return javaApiPort;
- }
-
- public void setJavaApiPort(String javaApiPort) {
- this.javaApiPort = javaApiPort;
- }
-
- public String getClusterName() {
- return clusterName;
- }
-
- public void setClusterName(String clusterName) {
- this.clusterName = clusterName;
- }
-
- @Override
- public String toString() {
- return "ElasticSearchConfig [ipAddress=" + ipAddress + ", httpPort=" + httpPort
- + ", javaApiPort=" + javaApiPort + ", clusterName=" + clusterName + "]";
- }
-
-} \ No newline at end of file
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntity.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntity.java
deleted file mode 100644
index 87b38c1..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntity.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
-
-public interface DocumentStoreDataEntity {
-
- public String getId();
-
- public String getContentInJson();
-
- public String getVersion();
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntityImpl.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntityImpl.java
deleted file mode 100644
index 442b615..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntityImpl.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
-
-public class DocumentStoreDataEntityImpl implements DocumentStoreDataEntity {
-
- private String id;
- private String content;
- private String version;
-
- public String getContent() {
- return content;
- }
-
- public void setContent(String content) {
- this.content = content;
- }
-
- public void setId(String id) {
- this.id = id;
- }
-
- @Override
- public String getId() {
- return id;
- }
-
- @Override
- public String getContentInJson() {
- return content;
- }
-
- @Override
- public String getVersion() {
- return version;
- }
-
- public void setVersion(String version) {
- this.version = version;
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreInterface.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreInterface.java
deleted file mode 100644
index 73a5683..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreInterface.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
-
-
-import org.openecomp.sa.rest.BulkRequest;
-import org.openecomp.sa.rest.DocumentSchema;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
-import org.openecomp.sa.searchdbabstraction.entity.DocumentOperationResult;
-import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
-import org.openecomp.sa.searchdbabstraction.entity.SearchOperationResult;
-
-
-public interface DocumentStoreInterface {
-
- public OperationResult createIndex(String index, DocumentSchema documentSchema);
-
- public OperationResult deleteIndex(String indexName) throws DocumentStoreOperationException;
-
- public DocumentOperationResult createDocument(String indexName,
- DocumentStoreDataEntity document,
- boolean allowImplicitIndexCreation) throws DocumentStoreOperationException;
-
- public DocumentOperationResult updateDocument(String indexName,
- DocumentStoreDataEntity document,
- boolean allowImplicitIndexCreation) throws DocumentStoreOperationException;
-
- public DocumentOperationResult deleteDocument(String indexName, DocumentStoreDataEntity document)
- throws DocumentStoreOperationException;
-
- public DocumentOperationResult getDocument(String indexName, DocumentStoreDataEntity document)
- throws DocumentStoreOperationException;
-
- public SearchOperationResult search(String indexName, String queryText)
- throws DocumentStoreOperationException;
-
- public SearchOperationResult searchWithPayload(String indexName, String query)
- throws DocumentStoreOperationException;
-
-
- /**
- * Forwards a set of operations to the document store as a single, bulk
- * request.
- *
- * @param anIndex - The index to apply the operations to.
- * @param operations - A java object containing the set of operations to
- * be performed.
- * @return - An operation result.
- * @throws DocumentStoreOperationException
- */
- public OperationResult performBulkOperations(BulkRequest[] request)
- throws DocumentStoreOperationException;
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchBulkOperationResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchBulkOperationResult.java
deleted file mode 100644
index 6aff3b8..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchBulkOperationResult.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
-
-import java.util.Arrays;
-
-public class ElasticSearchBulkOperationResult {
-
- private Integer took;
- private Boolean errors;
- private ElasticSearchResultItem[] items;
-
- public ElasticSearchBulkOperationResult() {
-
- }
-
- public ElasticSearchResultItem[] getItems() {
- return items;
- }
-
- public void setItems(ElasticSearchResultItem[] items) {
- this.items = items;
- }
-
- public Integer getTook() {
- return took;
- }
-
- public void setTook(Integer took) {
- this.took = took;
- }
-
- public Boolean getErrors() {
- return errors;
- }
-
- public void setErrors(Boolean errors) {
- this.errors = errors;
- }
-
- @Override
- public String toString() {
- return "ElasticSearchOperationResult [took=" + took + ", errors="
- + errors + ", items=" + Arrays.toString(items) + "]";
- }
-
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchCause.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchCause.java
deleted file mode 100644
index ba5be26..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchCause.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
-
-public class ElasticSearchCause {
-
- private String type;
- private String reason;
-
- public String getType() {
- return type;
- }
-
- public void setType(String type) {
- this.type = type;
- }
-
- public String getReason() {
- return reason;
- }
-
- public void setReason(String reason) {
- this.reason = reason;
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchError.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchError.java
deleted file mode 100644
index 7ebf48c..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchError.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
-
-import com.fasterxml.jackson.annotation.JsonAnyGetter;
-import com.fasterxml.jackson.annotation.JsonAnySetter;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class ElasticSearchError {
-
- private String type;
- private String reason;
- private ElasticSearchCause causedBy;
-
- private Map<String, Object> additionalProperties = new HashMap<String, Object>();
-
-
- public String getType() {
- return type;
- }
-
- public void setType(String type) {
- this.type = type;
- }
-
- public String getReason() {
- return reason;
- }
-
- public void setReason(String reason) {
- this.reason = reason;
- }
-
- public ElasticSearchCause getCausedBy() {
- return causedBy;
- }
-
- public void setCausedBy(ElasticSearchCause causedBy) {
- this.causedBy = causedBy;
- }
-
- @JsonAnyGetter
- public Map<String, Object> getAdditionalProperties() {
- return additionalProperties;
- }
-
- @JsonAnySetter
- public void setAdditionalProperties(String name, Object value) {
- additionalProperties.put(name, value);
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpController.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpController.java
deleted file mode 100644
index cd07200..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpController.java
+++ /dev/null
@@ -1,1634 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
-
-import com.att.aft.dme2.internal.google.common.base.Throwables;
-import com.fasterxml.jackson.annotation.JsonInclude.Include;
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.JsonMappingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import edu.emory.mathcs.backport.java.util.Arrays;
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-import org.json.simple.parser.JSONParser;
-import org.json.simple.parser.ParseException;
-import org.openecomp.cl.api.LogFields;
-import org.openecomp.cl.api.LogLine;
-import org.openecomp.cl.api.Logger;
-import org.openecomp.cl.eelf.LoggerFactory;
-import org.openecomp.cl.mdc.MdcContext;
-import org.openecomp.cl.mdc.MdcOverride;
-import org.openecomp.sa.rest.AnalysisConfiguration;
-import org.openecomp.sa.rest.ApiUtils;
-import org.openecomp.sa.rest.BulkRequest;
-import org.openecomp.sa.rest.BulkRequest.OperationType;
-import org.openecomp.sa.rest.DocumentSchema;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.config.ElasticSearchConfig;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
-import org.openecomp.sa.searchdbabstraction.entity.AggregationResult;
-import org.openecomp.sa.searchdbabstraction.entity.AggregationResults;
-import org.openecomp.sa.searchdbabstraction.entity.Document;
-import org.openecomp.sa.searchdbabstraction.entity.DocumentOperationResult;
-import org.openecomp.sa.searchdbabstraction.entity.ErrorResult;
-import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
-import org.openecomp.sa.searchdbabstraction.entity.SearchHit;
-import org.openecomp.sa.searchdbabstraction.entity.SearchHits;
-import org.openecomp.sa.searchdbabstraction.entity.SearchOperationResult;
-import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
-import org.openecomp.sa.searchdbabstraction.util.AggregationParsingUtil;
-import org.openecomp.sa.searchdbabstraction.util.DocumentSchemaUtil;
-import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.net.HttpURLConnection;
-import java.net.MalformedURLException;
-import java.net.ProtocolException;
-import java.net.URL;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Properties;
-import java.util.concurrent.atomic.AtomicBoolean;
-import javax.ws.rs.core.Response.Status;
-
-
-/**
- * This class has the Elasticsearch implementation of the
- * DB operations defined in DocumentStoreInterface.
- */
-public class ElasticSearchHttpController implements DocumentStoreInterface {
-
- private static final String BULK_CREATE_WITHOUT_INDEX_TEMPLATE =
- "{\"create\":{\"_index\" : \"%s\", \"_type\" : \"%s\"} }\n";
- private static final String BULK_CREATE_WITH_INDEX_TEMPLATE =
- "{\"create\":{\"_index\" : \"%s\", \"_type\" : \"%s\", \"_id\" : \"%s\" } }\n";
- private static final String BULK_IMPORT_INDEX_TEMPLATE =
- "{\"index\":{\"_index\":\"%s\",\"_type\":\"%s\",\"_id\":\"%s\", \"_version\":\"%s\"}}\n";
- private static final String BULK_DELETE_TEMPLATE =
- "{ \"delete\": { \"_index\": \"%s\", \"_type\": \"%s\", \"_id\": \"%s\", \"_version\":\"%s\"}}\n";
-
- private static final String INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT =
- "Internal Error: ElasticSearch operation fault occurred";
- private static final Logger logger = LoggerFactory.getInstance()
- .getLogger(ElasticSearchHttpController.class.getName());
- private static final Logger metricsLogger = LoggerFactory.getInstance()
- .getMetricsLogger(ElasticSearchHttpController.class.getName());
- private final ElasticSearchConfig config;
-
- private static final String DEFAULT_TYPE = "default";
-
- private static ElasticSearchHttpController instance = null;
-
- protected AnalysisConfiguration analysisConfig;
-
- public static ElasticSearchHttpController getInstance() {
-
- synchronized (ElasticSearchHttpController.class) {
-
- if (instance == null) {
-
- Properties properties = new Properties();
- File file = new File(SearchDbConstants.ES_CONFIG_FILE);
- try {
- properties.load(new FileInputStream(file));
- } catch (Exception e) {
- logger.error(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
- "ElasticSearchHTTPController.getInstance",
- e.getLocalizedMessage());
- }
-
- ElasticSearchConfig config = new ElasticSearchConfig(properties);
- instance = new ElasticSearchHttpController(config);
- }
- }
-
- return instance;
- }
-
- public ElasticSearchHttpController(ElasticSearchConfig config) {
- this.config = config;
- analysisConfig = new AnalysisConfiguration();
-
- try {
- logger.info(SearchDbMsgs.ELASTIC_SEARCH_CONNECTION_ATTEMPT, getFullUrl("", false));
- checkConnection();
- logger.info(SearchDbMsgs.ELASTIC_SEARCH_CONNECTION_SUCCESS, getFullUrl("", false));
- } catch (Exception e) {
- logger.error(SearchDbMsgs.ELASTIC_SEARCH_CONNECTION_FAILURE, null, e,
- getFullUrl("", false), e.getMessage());
- }
- }
-
-
- public AnalysisConfiguration getAnalysisConfig() {
- return analysisConfig;
- }
-
- @Override
- public OperationResult createIndex(String index, DocumentSchema documentSchema) {
-
- OperationResult result = new OperationResult();
- result.setResultCode(500);
-
- try {
-
- // Submit the request to ElasticSearch to create the index using a
- // default document type.
- result = createTable(index,
- DEFAULT_TYPE,
- analysisConfig.getEsIndexSettings(),
- DocumentSchemaUtil.generateDocumentMappings(documentSchema));
-
- // ElasticSearch will return us a 200 code on success when we
- // want to report a 201, so translate the result here.
- result.setResultCode((result.getResultCode() == 200) ? 201 : result.getResultCode());
- if (isSuccess(result)) {
- result.setResult("{\"url\": \"" + ApiUtils.buildIndexUri(index) + "\"}");
- //result.setResult("{\"index\": \"" + index + ", \"type\": \"" + DEFAULT_TYPE + "\"}");
- }
-
- } catch (DocumentStoreOperationException e) {
-
- result.setFailureCause("Document store operation failure. Cause: " + e.getMessage());
- }
-
- return result;
- }
-
-
- @Override
- public OperationResult deleteIndex(String indexName) throws DocumentStoreOperationException {
-
- //Initialize operation result with a failure codes / fault string
- OperationResult opResult = new OperationResult();
- opResult.setResultCode(500);
- opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- String fullUrl = getFullUrl("/" + indexName + "/", false);
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- logger.debug("\nSending 'DELETE' request to URL : " + conn.getURL());
-
- try {
- conn.setRequestMethod("DELETE");
- } catch (ProtocolException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to set HTTP request method to DELETE.", e);
- }
-
- handleResponse(conn, opResult);
-
- // Generate a metrics log so we can track how long the operation took.
- metricsLogger.info(SearchDbMsgs.DELETE_INDEX_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
- override,
- indexName);
-
- shutdownConnection(conn);
-
- return opResult;
- }
-
-
- private OperationResult checkConnection() throws Exception {
-
- String fullUrl = getFullUrl("/_cluster/health", false);
- URL url = null;
- HttpURLConnection conn = null;
-
- url = new URL(fullUrl);
- conn = (HttpURLConnection) url.openConnection();
- conn.setRequestMethod("GET");
- conn.setDoOutput(true);
- logger.debug("getClusterHealth(), Sending 'GET' request to URL : " + url);
-
- int resultCode = conn.getResponseCode();
- logger.debug("getClusterHealth() response Code : " + resultCode);
- OperationResult opResult = new OperationResult();
- opResult.setResultCode(resultCode);
-
- shutdownConnection(conn);
-
- return opResult;
- }
-
- private String getFullUrl(String resourceUrl, boolean isSecure) {
-
- final String host = config.getIpAddress();
- final String port = config.getHttpPort();
-
- if (isSecure) {
- return String.format("https://%s:%s%s", host, port, resourceUrl);
- } else {
- return String.format("http://%s:%s%s", host, port, resourceUrl);
- }
- }
-
- private void shutdownConnection(HttpURLConnection connection) {
- if (connection == null) {
- return;
- }
-
- InputStream inputstream = null;
- OutputStream outputstream = null;
-
- try {
- inputstream = connection.getInputStream();
- } catch (IOException e) {
- logger.debug(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, "shutdownConnection", e.getLocalizedMessage());
- } finally {
- if (inputstream != null) {
- try {
- inputstream.close();
- } catch (IOException e) {
- logger.debug(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, "shutdownConnection",
- e.getLocalizedMessage());
- }
- }
- }
-
- try {
- outputstream = connection.getOutputStream();
- } catch (IOException e) {
- logger.debug(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, "shutdownConnection", e.getLocalizedMessage());
- } finally {
- if (outputstream != null) {
- try {
- outputstream.close();
- } catch (IOException e) {
- logger.debug(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, "shutdownConnection",
- e.getLocalizedMessage());
- }
- }
- }
-
- connection.disconnect();
- }
-
- //@Override
- protected OperationResult createTable(String indexName, String typeName,
- String indexSettings, String indexMappings)
- throws DocumentStoreOperationException {
-
- if (indexSettings == null) {
- logger.debug("No settings provided.");
- }
-
- if (indexMappings == null) {
- logger.debug("No mappings provided.");
- }
-
- OperationResult opResult = new OperationResult();
-
- // Initialize operation result with a failure codes / fault string
- opResult.setResultCode(500);
- opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- String fullUrl = getFullUrl("/" + indexName + "/", false);
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- try {
- conn.setRequestMethod("PUT");
- } catch (ProtocolException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to set HTTP request method to PUT.", e);
- }
-
- StringBuilder sb = new StringBuilder(128);
- sb.append("{ \"settings\" : ");
- sb.append(indexSettings);
- sb.append(",");
-
- sb.append("\"mappings\" : {");
- sb.append("\"" + typeName + "\" :");
- sb.append(indexMappings);
- sb.append("}}");
-
- attachContent(conn, sb.toString());
-
- logger.debug("\ncreateTable(), Sending 'PUT' request to URL : " + conn.getURL());
- logger.debug("Request content: " + sb.toString());
-
- handleResponse(conn, opResult);
-
- shutdownConnection(conn);
-
- // Generate a metrics log so we can track how long the operation took.
- metricsLogger.info(SearchDbMsgs.CREATE_INDEX_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResultCode()),
- override,
- indexName);
-
- return opResult;
- }
-
- @Override
- public DocumentOperationResult createDocument(String indexName,
- DocumentStoreDataEntity document,
- boolean allowImplicitIndexCreation)
- throws DocumentStoreOperationException {
-
- if(!allowImplicitIndexCreation) {
-
- // Before we do anything, make sure that the specified index actually exists in the
- // document store - we don't want to rely on ElasticSearch to fail the document
- // create because it could be configured to implicitly create a non-existent index,
- // which can lead to hard-to-debug behaviour with queries down the road.
- OperationResult indexExistsResult = checkIndexExistence(indexName);
- if ((indexExistsResult.getResultCode() < 200) || (indexExistsResult.getResultCode() >= 300)) {
-
- DocumentOperationResult opResult = new DocumentOperationResult();
- opResult.setResultCode(Status.NOT_FOUND.getStatusCode());
- opResult.setResult("Document Index '" + indexName + "' does not exist.");
- opResult.setFailureCause("Document Index '" + indexName + "' does not exist.");
- return opResult;
- }
- }
-
- if (document.getId() == null || document.getId().isEmpty()) {
- return createDocumentWithoutId(indexName, document);
- } else {
- return createDocumentWithId(indexName, document);
- }
- }
-
- private DocumentOperationResult createDocumentWithId(String indexName,
- DocumentStoreDataEntity document)
- throws DocumentStoreOperationException {
- // check if the document already exists
- DocumentOperationResult opResult = checkDocumentExistence(indexName, document.getId());
-
-
- if (opResult.getResultCode() != Status.NOT_FOUND.getStatusCode()) {
- if (opResult.getResultCode() == Status.OK.getStatusCode()) {
- opResult.setFailureCause("A document with the same id already exists.");
- } else {
- opResult.setFailureCause("Failed to verify a document with the specified id does not already exist.");
- }
- opResult.setResultCode(Status.CONFLICT.getStatusCode());
- return opResult;
- }
-
- opResult = new DocumentOperationResult();
- // Initialize operation result with a failure codes / fault string
- opResult.setResultCode(500);
- opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- String fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE
- + "/" + document.getId(), false);
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- try {
- conn.setRequestMethod("PUT");
- } catch (ProtocolException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to set HTTP request method to PUT.", e);
- }
-
- attachDocument(conn, document);
-
- logger.debug("Sending 'PUT' request to: " + conn.getURL());
-
- handleResponse(conn, opResult);
- buildDocumentResult(opResult, indexName);
-
- // Generate a metrics log so we can track how long the operation took.
- metricsLogger.info(SearchDbMsgs.CREATE_DOCUMENT_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
- override,
- indexName);
-
- shutdownConnection(conn);
-
- return opResult;
-
- }
-
- private DocumentOperationResult createDocumentWithoutId(String indexName,
- DocumentStoreDataEntity document)
- throws DocumentStoreOperationException {
-
- DocumentOperationResult response = new DocumentOperationResult();
- // Initialize operation result with a failure codes / fault string
- response.setResultCode(500);
- response.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- String fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE, false);
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- try {
- conn.setRequestMethod("POST");
- } catch (ProtocolException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to set HTTP request method to POST.", e);
- }
-
- attachDocument(conn, document);
-
- logger.debug("Sending 'POST' request to: " + conn.getURL());
-
- handleResponse(conn, response);
- buildDocumentResult(response, indexName);
-
- // Generate a metrics log so we can track how long the operation took.
- metricsLogger.info(SearchDbMsgs.CREATE_DOCUMENT_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, response.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, response.getResult()),
- override,
- indexName);
-
- shutdownConnection(conn);
-
- return response;
- }
-
- private void attachDocument(HttpURLConnection conn, DocumentStoreDataEntity doc)
- throws DocumentStoreOperationException {
- conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
- conn.setRequestProperty("Connection", "Close");
-
- attachContent(conn, doc.getContentInJson());
- }
-
- private DocumentOperationResult checkDocumentExistence(String indexName,
- String docId)
- throws DocumentStoreOperationException {
- DocumentOperationResult opResult = new DocumentOperationResult();
-
- // Initialize operation result with a failure codes / fault string
- opResult.setResultCode(500);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- String fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE + "/" + docId, false);
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- try {
- conn.setRequestMethod("HEAD");
- } catch (ProtocolException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to set HTTP request method to HEAD.", e);
- }
-
- logger.debug("Sending 'HEAD' request to: " + conn.getURL());
-
- int resultCode;
- try {
- resultCode = conn.getResponseCode();
- } catch (IOException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to get the response code from the connection.", e);
- }
-
- logger.debug("Response Code : " + resultCode);
-
- opResult.setResultCode(resultCode);
-
- // Generate a metrics log so we can track how long the operation took.
- metricsLogger.info(SearchDbMsgs.GET_DOCUMENT_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
- override,
- indexName,
- docId);
-
- shutdownConnection(conn);
-
- return opResult;
- }
-
- @Override
- public DocumentOperationResult updateDocument(String indexName,
- DocumentStoreDataEntity document,
- boolean allowImplicitIndexCreation)
- throws DocumentStoreOperationException {
-
- if(!allowImplicitIndexCreation) {
-
- // Before we do anything, make sure that the specified index actually exists in the
- // document store - we don't want to rely on ElasticSearch to fail the document
- // create because it could be configured to implicitly create a non-existent index,
- // which can lead to hard-to-debug behaviour with queries down the road.
- OperationResult indexExistsResult = checkIndexExistence(indexName);
- if ((indexExistsResult.getResultCode() < 200) || (indexExistsResult.getResultCode() >= 300)) {
-
- DocumentOperationResult opResult = new DocumentOperationResult();
- opResult.setResultCode(Status.NOT_FOUND.getStatusCode());
- opResult.setResult("Document Index '" + indexName + "' does not exist.");
- opResult.setFailureCause("Document Index '" + indexName + "' does not exist.");
- return opResult;
- }
- }
-
- DocumentOperationResult opResult = new DocumentOperationResult();
-
- // Initialize operation result with a failure codes / fault string
- opResult.setResultCode(500);
- opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- String fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE + "/" + document.getId()
- + "?version=" + document.getVersion(), false);
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- try {
- conn.setRequestMethod("PUT");
- } catch (ProtocolException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to set HTTP request method to PUT.", e);
- }
-
- attachDocument(conn, document);
-
- logger.debug("Sending 'PUT' request to: " + conn.getURL());
-
- handleResponse(conn, opResult);
- buildDocumentResult(opResult, indexName);
-
- // Generate a metrics log so we can track how long the operation took.
- metricsLogger.info(SearchDbMsgs.UPDATE_DOCUMENT_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
- override,
- indexName,
- document.getId());
-
- shutdownConnection(conn);
-
- return opResult;
- }
-
- @Override
- public DocumentOperationResult deleteDocument(String indexName, DocumentStoreDataEntity document)
- throws DocumentStoreOperationException {
- DocumentOperationResult opResult = new DocumentOperationResult();
-
- // Initialize operation result with a failure codes / fault string
- opResult.setResultCode(500);
- opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- String fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE + "/" + document.getId()
- + "?version=" + document.getVersion(), false);
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- try {
- conn.setRequestMethod("DELETE");
- } catch (ProtocolException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to set HTTP request method to DELETE.", e);
- }
-
- logger.debug("\nSending 'DELETE' request to " + conn.getURL());
-
- handleResponse(conn, opResult);
- buildDocumentResult(opResult, indexName);
- //supress the etag and url in response for delete as they are not required
- if (opResult.getDocument() != null) {
- opResult.getDocument().setEtag(null);
- opResult.getDocument().setUrl(null);
- }
-
- // Generate a metrics log so we can track how long the operation took.
- metricsLogger.info(SearchDbMsgs.DELETE_DOCUMENT_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResult())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResultCode()),
- override,
- indexName,
- document.getId());
-
- shutdownConnection(conn);
-
- return opResult;
- }
-
- @Override
- public DocumentOperationResult getDocument(String indexName, DocumentStoreDataEntity document)
- throws DocumentStoreOperationException {
- DocumentOperationResult opResult = new DocumentOperationResult();
-
- // Initialize operation result with a failure codes / fault string
- opResult.setResultCode(500);
- opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- String fullUrl = null;
- if (document.getVersion() == null) {
- fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE + "/" + document.getId(), false);
- } else {
- fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE + "/" + document.getId()
- + "?version=" + document.getVersion(), false);
- }
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- logger.debug("\nSending 'GET' request to: " + conn.getURL());
-
- handleResponse(conn, opResult);
- buildDocumentResult(opResult, indexName);
-
- // Generate a metrics log so we can track how long the operation took.
- metricsLogger.info(SearchDbMsgs.GET_DOCUMENT_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
- override,
- indexName,
- document.getId());
-
- shutdownConnection(conn);
-
- return opResult;
- }
-
- public SearchOperationResult search(String indexName, String queryString)
- throws DocumentStoreOperationException {
- SearchOperationResult opResult = new SearchOperationResult();
-
- // Initialize operation result with a failure codes / fault string
- opResult.setResultCode(500);
- opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
-
- String fullUrl = getFullUrl("/" + indexName + "/_search" + "?" + queryString, false);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- try {
- conn.setRequestMethod("GET");
- } catch (ProtocolException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to set HTTP request method to GET.", e);
- }
-
- logger.debug("\nsearch(), Sending 'GET' request to URL : " + conn.getURL());
-
- handleResponse(conn, opResult);
- buildSearchResult(opResult, indexName);
-
-
- metricsLogger.info(SearchDbMsgs.QUERY_DOCUMENT_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
- override,
- indexName,
- queryString);
-
- return opResult;
- }
-
- public SearchOperationResult searchWithPayload(String indexName, String query)
- throws DocumentStoreOperationException {
- SearchOperationResult opResult = new SearchOperationResult();
-
- if (logger.isDebugEnabled()) {
- logger.debug("Querying index: " + indexName + " with query string: " + query);
- }
-
- // Initialize operation result with a failure codes / fault string
- opResult.setResultCode(500);
- opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
-
- String fullUrl = getFullUrl("/" + indexName + "/_search", false);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- try {
- conn.setRequestMethod("POST");
- } catch (ProtocolException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to set HTTP request method to POST.", e);
- }
-
- attachContent(conn, query);
-
- logger.debug("\nsearch(), Sending 'POST' request to URL : " + conn.getURL());
- logger.debug("Request body = Elasticsearch query = " + query);
-
- handleResponse(conn, opResult);
- buildSearchResult(opResult, indexName);
-
- metricsLogger.info(SearchDbMsgs.QUERY_DOCUMENT_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
- override,
- indexName,
- query);
-
- shutdownConnection(conn);
-
- return opResult;
- }
-
- private void attachContent(HttpURLConnection conn, String content)
- throws DocumentStoreOperationException {
- OutputStream outputStream = null;
- OutputStreamWriter out = null;
-
- try {
- outputStream = conn.getOutputStream();
- } catch (IOException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to get connection output stream.", e);
- }
-
- out = new OutputStreamWriter(outputStream);
-
- try {
- out.write(content);
- out.close();
- } catch (IOException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to write to the output stream.", e);
- }
- }
-
- private HttpURLConnection initializeConnection(String fullUrl)
- throws DocumentStoreOperationException {
- URL url = null;
- HttpURLConnection conn = null;
-
- try {
- url = new URL(fullUrl);
- } catch (MalformedURLException e) {
- throw new DocumentStoreOperationException("Error building a URL with " + url, e);
- }
-
- try {
- conn = (HttpURLConnection) url.openConnection();
- conn.setDoOutput(true);
- } catch (IOException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to open connection to URL " + url, e);
- }
-
- return conn;
- }
-
- private void handleResponse(HttpURLConnection conn, OperationResult opResult)
- throws DocumentStoreOperationException {
- int resultCode = 200;
-
- try {
- resultCode = conn.getResponseCode();
- } catch (IOException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to get the response code from the connection.", e);
- }
-
- logger.debug("Response Code : " + resultCode);
-
- InputStream inputStream = null;
-
- if (!(resultCode >= 200 && resultCode <= 299)) { // 2xx response indicates success
- inputStream = conn.getErrorStream();
- } else {
- try {
- inputStream = conn.getInputStream();
- } catch (IOException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to get the response input stream.", e);
- }
- }
-
- InputStreamReader inputstreamreader = new InputStreamReader(inputStream);
- BufferedReader bufferedreader = new BufferedReader(inputstreamreader);
-
- StringBuilder result = new StringBuilder(128);
- String string = null;
-
- try {
- while ((string = bufferedreader.readLine()) != null) {
- result.append(string).append("\n");
- }
- } catch (IOException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed getting the response body payload.", e);
- }
-
- if (resultCode == Status.CONFLICT.getStatusCode()) {
- opResult.setResultCode(Status.PRECONDITION_FAILED.getStatusCode());
- } else {
- opResult.setResultCode(resultCode);
- }
- if (logger.isDebugEnabled()) {
- logger.debug("Raw result string from ElasticSearch = " + result.toString());
- }
- opResult.setResult(result.toString());
- opResult.setResultVersion(extractVersion(result.toString()));
- }
-
- private String extractVersion(String result) throws DocumentStoreOperationException {
-
- JSONParser parser = new JSONParser();
- String version = null;
- try {
- JSONObject root = (JSONObject) parser.parse(result);
- if (root.get("_version") != null) {
- version = root.get("_version").toString();
- }
-
- } catch (ParseException e) {
-
- // Not all responses from ElasticSearch include a version, so
- // if we don't get one back, just return an empty string rather
- // than trigger a false failure.
- version = "";
- }
- return version;
- }
-
- /**
- * This convenience method gets the current system time and stores
- * it in an attribute in the supplied {@link MdcOverride} object so
- * that it can be used later by the metrics logger.
- *
- * @param override - The {@link MdcOverride} object to update.
- * @return - The supplied {@link MdcOverride} object.
- */
- private MdcOverride getStartTime(MdcOverride override) {
-
- // Grab the current time...
- long startTimeInMs = System.currentTimeMillis();
-
- // ...and add it as an attribute to the supplied MDC Override
- // object.
- SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
- override.addAttribute(MdcContext.MDC_START_TIME, formatter.format(startTimeInMs));
-
- // Return the MdcOverride object that we were passed.
- // This looks odd, but it allows us to do stuff like:
- //
- // MdcOverride ov = getStartTime(new MdcOverride())
- //
- // which is quite handy, but also allows us to pass in an existing
- // MdcOverride object which already has some attributes set.
- return override;
- }
-
- private boolean isSuccess(OperationResult result) {
-
- return isSuccessCode(result.getResultCode());
- }
-
-
- private boolean isSuccessCode(int statusCode) {
- return ((statusCode >= 200) && (statusCode < 300));
- }
-
-
- @Override
- public OperationResult performBulkOperations(BulkRequest[] requests)
- throws DocumentStoreOperationException {
-
- if (logger.isDebugEnabled()) {
- String dbgString = "ESController: performBulkOperations - Operations: ";
-
- for (BulkRequest request : requests) {
- dbgString += "[" + request.toString() + "] ";
- }
-
- logger.debug(dbgString);
- }
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- // Parse the supplied set of operations.
- // Iterate over the list of operations which we were provided and
- // translate them into a format that ElasticSearh understands.
- int opCount = 0;
- StringBuilder esOperationSet = new StringBuilder(128);
- List<ElasticSearchResultItem> rejected = new ArrayList<ElasticSearchResultItem>();
- for (BulkRequest request : requests) {
-
- // Convert the request to the syntax ElasticSearch likes.
- if (buildEsOperation(request, esOperationSet, rejected)) {
- opCount++;
- }
- }
-
- ElasticSearchBulkOperationResult opResult = null;
- if (opCount > 0) {
-
- // Open an HTTP connection to the ElasticSearch back end.
- String fullUrl = getFullUrl("/_bulk", false);
- URL url;
- HttpURLConnection conn;
- try {
-
- url = new URL(fullUrl);
- conn = (HttpURLConnection) url.openConnection();
- conn.setRequestMethod("PUT");
- conn.setDoOutput(true);
- conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
- conn.setRequestProperty("Connection", "Close");
-
- } catch (IOException e) {
-
- logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, e.getMessage());
- if (logger.isDebugEnabled()) {
- logger.debug(Throwables.getStackTraceAsString(e));
- }
-
- throw new DocumentStoreOperationException("Failed to open connection to document store. Cause: "
- + e.getMessage(), e);
- }
-
- StringBuilder bulkResult = new StringBuilder(128);
- try {
- // Create an output stream to write our request to.
- OutputStreamWriter out = new OutputStreamWriter(conn.getOutputStream());
- ;
-
- if (logger.isDebugEnabled()) {
- logger.debug("ESController: Sending 'BULK' request to " + conn.getURL());
- logger.debug("ESController: operations: " + esOperationSet.toString().replaceAll("\n",
- "\\n"));
- }
-
- // Write the resulting request string to our output stream. (this sends the request to ES?)
- out.write(esOperationSet.toString());
- out.close();
-
- // Open an input stream on our connection in order to read back the results.
- InputStream is = conn.getInputStream();
- InputStreamReader inputstreamreader = new InputStreamReader(is);
- BufferedReader bufferedreader = new BufferedReader(inputstreamreader);
-
- // Read the contents of the input stream into our result string...
- String esResponseString = null;
-
- while ((esResponseString = bufferedreader.readLine()) != null) {
- bulkResult.append(esResponseString).append("\n");
- }
-
- } catch (IOException e) {
-
- logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, e.getMessage());
- if (logger.isDebugEnabled()) {
- StringWriter sw = new StringWriter();
- e.printStackTrace(new PrintWriter(sw));
- logger.debug(sw.toString());
- }
-
- throw new DocumentStoreOperationException("Failure interacting with document store. Cause: "
- + e.getMessage(), e);
- }
-
- if (logger.isDebugEnabled()) {
- logger.debug("ESController: Received result string from ElasticSearch: = "
- + bulkResult.toString());
- }
-
- // ...and marshal the resulting string into a Java object.
- try {
- opResult = marshallEsBulkResult(bulkResult.toString());
-
- } catch (IOException e) {
-
- logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, e.getMessage());
- if (logger.isDebugEnabled()) {
- logger.debug(Throwables.getStackTraceAsString(e));
- }
-
- throw new DocumentStoreOperationException("Failed to marshal response body. Cause: "
- + e.getMessage(), e);
- }
- }
-
- // Finally, build the operation result and return it to the caller.
- OperationResult result = new OperationResult();
- result.setResultCode(207);
- result.setResult(buildGenericBulkResultSet(opResult, rejected));
-
- // In the success case we don't want the entire result string to be
- // dumped into the metrics log, so concatenate it.
- String resultStringForMetricsLog = result.getResult();
- if ((result.getResultCode() >= 200) && (result.getResultCode() < 300)) {
- resultStringForMetricsLog = resultStringForMetricsLog.substring(0,
- Math.max(resultStringForMetricsLog.length(), 85)) + "...";
- }
-
- metricsLogger.info(SearchDbMsgs.BULK_OPERATIONS_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, result.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, resultStringForMetricsLog),
- override);
-
- return result;
- }
-
-
- /**
- * This method converts a {@link BulkRequest} object into a json structure
- * which can be understood by ElasticSearch.
- *
- * @param request - The request to be performed.
- * @param sb - The string builder to append the json data to
- * @throws DocumentStoreOperationException
- */
- private boolean buildEsOperation(BulkRequest request, StringBuilder sb,
- List<ElasticSearchResultItem> fails)
- throws DocumentStoreOperationException {
-
- boolean retVal = true;
- OperationResult indexExistsResult = null;
-
- // What kind of operation are we performing?
- switch (request.getOperationType()) {
-
- // Create a new document.
- case CREATE:
-
- // Make sure that we were supplied a document payload.
- if (request.getOperation().getDocument() == null) {
-
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Missing document payload",
- request.getIndex(),
- request.getId(),
- 400,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // Make sure that the supplied document URL is formatted
- // correctly.
- if (!ApiUtils.validateDocumentUri(request.getOperation().getMetaData().getUrl(), false)) {
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Invalid document URL: " + request.getOperation().getMetaData().getUrl(),
- request.getIndex(),
- "",
- 400,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // Validate that the specified index actually exists before we
- // try to perform the create.
- if (!indexExists(ApiUtils.extractIndexFromUri(request.getOperation().getMetaData().getUrl()))) {
-
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Specified resource does not exist: "
- + request.getOperation().getMetaData().getUrl(),
- request.getIndex(),
- request.getId(),
- 404,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // If we were supplied an id for the new document, then
- // include it in the bulk operation to Elastic Search
- if (request.getId() == null) {
-
- sb.append(String.format(BULK_CREATE_WITHOUT_INDEX_TEMPLATE,
- request.getIndex(),
- DEFAULT_TYPE));
-
- // Otherwise, we just leave that parameter off and ElasticSearch
- // will generate one for us.
- } else {
- sb.append(String.format(BULK_CREATE_WITH_INDEX_TEMPLATE,
- request.getIndex(),
- DEFAULT_TYPE,
- request.getId()));
- }
-
- try {
- // Append the document that we want to create.
- sb.append(request.getOperation().getDocument().toJson()).append("\n");
- } catch (JsonProcessingException e) {
- throw new DocumentStoreOperationException("Failure parsing document to json", e);
- }
-
- break;
-
- // Update an existing document.
- case UPDATE:
-
- // Make sure that we were supplied a document payload.
- if (request.getOperation().getDocument() == null) {
-
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Missing document payload",
- request.getIndex(),
- request.getId(),
- 400,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // Make sure that the supplied document URL is formatted
- // correctly.
- if (!ApiUtils.validateDocumentUri(request.getOperation().getMetaData().getUrl(), true)) {
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Invalid document URL: " + request.getOperation().getMetaData().getUrl(),
- request.getIndex(),
- "",
- 400,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // Validate that the specified index actually exists before we
- // try to perform the update.
- if (!indexExists(request.getIndex())) {
-
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Specified resource does not exist: "
- + request.getOperation().getMetaData().getUrl(),
- request.getIndex(),
- request.getId(),
- 404,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // Validate that the document we are trying to update actually
- // exists before we try to perform the update.
- if (!documentExists(request.getIndex(), request.getId())) {
-
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Specified resource does not exist: "
- + request.getOperation().getMetaData().getUrl(),
- request.getIndex(),
- request.getId(),
- 404,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // It is mandatory that a version be supplied for an update operation,
- // so validate that now.
- if (request.getOperation().getMetaData().getEtag() == null) {
-
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Missing mandatory ETag field",
- request.getIndex(),
- request.getId(),
- 400,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // Generate the update request...
- sb.append(String.format(BULK_IMPORT_INDEX_TEMPLATE,
- request.getIndex(),
- DEFAULT_TYPE,
- request.getId(),
- request.getOperation().getMetaData().getEtag()));
-
- // ...and append the document that we want to update.
- try {
- sb.append(request.getOperation().getDocument().toJson()).append("\n");
- } catch (JsonProcessingException e) {
- throw new DocumentStoreOperationException("Failure parsing document to json", e);
- }
- break;
-
- // Delete an existing document.
- case DELETE:
-
- // Make sure that the supplied document URL is formatted
- // correctly.
- if (!ApiUtils.validateDocumentUri(request.getOperation().getMetaData().getUrl(), true)) {
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Invalid document URL: " + request.getOperation().getMetaData().getUrl(),
- request.getIndex(),
- "",
- 400,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // Validate that the specified index actually exists before we
- // try to perform the delete.
- if (!indexExists(request.getIndex())) {
-
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Specified resource does not exist: "
- + request.getOperation().getMetaData().getUrl(),
- request.getIndex(),
- request.getId(),
- 404,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // Validate that the document we are trying to update actually
- // exists before we try to perform the delete.
- if (!documentExists(request.getIndex(), request.getId())) {
-
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Specified resource does not exist: "
- + request.getOperation().getMetaData().getUrl(),
- request.getIndex(),
- request.getId(),
- 404,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // It is mandatory that a version be supplied for a delete operation,
- // so validate that now.
- if (request.getOperation().getMetaData().getEtag() == null) {
-
- fails.add(generateRejectionEntry(request.getOperationType(),
- "Missing mandatory ETag field",
- request.getIndex(),
- request.getId(),
- 400,
- request.getOperation().getMetaData().getUrl()));
- return false;
- }
-
- // Generate the delete request.
- sb.append(String.format(BULK_DELETE_TEMPLATE,
- request.getIndex(),
- DEFAULT_TYPE,
- request.getId(),
- request.getOperation().getMetaData().getEtag()));
- break;
- default:
- }
-
- return retVal;
- }
-
- private boolean indexExists(String index) throws DocumentStoreOperationException {
-
- OperationResult indexExistsResult = checkIndexExistence(index);
-
- return ((indexExistsResult.getResultCode() >= 200)
- && (indexExistsResult.getResultCode() < 300));
- }
-
- private boolean documentExists(String index, String id) throws DocumentStoreOperationException {
-
- OperationResult docExistsResult = checkDocumentExistence(index, id);
-
- return ((docExistsResult.getResultCode() >= 200) && (docExistsResult.getResultCode() < 300));
- }
-
- /**
- * This method constructs a status entry for a bulk operation which has
- * been rejected before even sending it to the document store.
- *
- * @param rejectReason - A message describing why the operation was rejected.
- * @param anId - The identifier associated with the document being
- * acted on.
- * @param statusCode - An HTTP status code.
- * @return - A result set item.
- */
- private ElasticSearchResultItem generateRejectionEntry(OperationType opType,
- String rejectReason,
- String index,
- String anId,
- int statusCode,
- String originalUrl) {
-
- ElasticSearchError err = new ElasticSearchError();
- err.setReason(rejectReason);
-
- ElasticSearchOperationStatus op = new ElasticSearchOperationStatus();
- op.setIndex(index);
- op.setId(anId);
- op.setStatus(statusCode);
- op.setError(err);
- op.setAdditionalProperties(ElasticSearchResultItem.REQUEST_URL, originalUrl);
-
- ElasticSearchResultItem rejectionResult = new ElasticSearchResultItem();
-
- switch (opType) {
- case CREATE:
- rejectionResult.setCreate(op);
- break;
- case UPDATE:
- rejectionResult.setIndex(op);
- break;
- case DELETE:
- rejectionResult.setDelete(op);
- break;
- default:
- }
-
- return rejectionResult;
- }
-
-
- /**
- * This method takes the json structure returned from ElasticSearch in
- * response to a bulk operations request and marshals it into a Java
- * object.
- *
- * @param jsonResult - The bulk operations response returned from
- * ElasticSearch.
- * @return - The marshalled response.
- * @throws JsonParseException
- * @throws JsonMappingException
- * @throws IOException
- */
- private ElasticSearchBulkOperationResult marshallEsBulkResult(String jsonResult)
- throws JsonParseException, JsonMappingException, IOException {
-
- if (jsonResult != null) {
- if (logger.isDebugEnabled()) {
- logger.debug("ESController: Marshalling ES result set from json: "
- + jsonResult.replaceAll("\n", ""));
- }
-
- ObjectMapper mapper = new ObjectMapper();
- mapper.setSerializationInclusion(Include.NON_EMPTY);
-
- return mapper.readValue(jsonResult, ElasticSearchBulkOperationResult.class);
- }
-
- return null;
- }
-
-
- /**
- * This method takes the marshalled ElasticSearch bulk response and
- * converts it into a generic response payload.
- *
- * @param esResult - ElasticSearch bulk operations response.
- * @return - A generic result set.
- */
- private String buildGenericBulkResultSet(ElasticSearchBulkOperationResult esResult,
- List<ElasticSearchResultItem> rejectedOps) {
-
- int totalOps = 0;
- int totalSuccess = 0;
- int totalFails = 0;
-
- if (logger.isDebugEnabled()) {
-
- logger.debug("ESController: Build generic result set. ES Results: "
- + ((esResult != null) ? esResult.toString() : "[]")
- + " Rejected Ops: " + rejectedOps.toString());
- }
-
- // Build a combined list of result items from the results returned
- // from ElasticSearch and the list of operations that we rejected
- // without sending to ElasticSearch.
- List<ElasticSearchResultItem> combinedResults = new ArrayList<ElasticSearchResultItem>();
- if (esResult != null) {
- combinedResults.addAll(Arrays.asList(esResult.getItems()));
- }
- combinedResults.addAll(rejectedOps);
-
- // Iterate over the individual results in the resulting result set.
- StringBuilder resultsBuilder = new StringBuilder();
- AtomicBoolean firstItem = new AtomicBoolean(true);
- for (ElasticSearchResultItem item : combinedResults) {
-
- // Increment the operation counts.
- totalOps++;
- if (isSuccessCode(item.operationStatus().getStatus())) {
- totalSuccess++;
- } else {
- totalFails++;
- }
-
- // Prepend a comma to our response string unless this it the
- // first result in the set.
- if (!firstItem.compareAndSet(true, false)) {
- resultsBuilder.append(", ");
- }
-
- // Append the current result as a generic json structure.
- resultsBuilder.append(item.toJson());
- }
-
- // Now, build the result string and return it.
- String responseBody = "{ \"total_operations\": " + totalOps + ", "
- + "\"total_success\": " + totalSuccess + ", "
- + "\"total_fails\": " + totalFails + ", "
- + "\"results\": ["
- + resultsBuilder.toString()
- + "]}";
-
- return responseBody;
- }
-
-
- /**
- * This method queryies ElasticSearch to determine if the supplied
- * index is present in the document store.
- *
- * @param indexName - The index to look for.
- * @return - An operation result indicating the success or failure of
- * the check.
- * @throws DocumentStoreOperationException
- */
- public OperationResult checkIndexExistence(String indexName)
- throws DocumentStoreOperationException {
-
- // Initialize operation result with a failure codes / fault string
- OperationResult opResult = new OperationResult();
- opResult.setResultCode(500);
-
- // Grab the current time so we can use it to generate a metrics log.
- MdcOverride override = getStartTime(new MdcOverride());
-
- String fullUrl = getFullUrl("/" + indexName, false);
- HttpURLConnection conn = initializeConnection(fullUrl);
-
- try {
- conn.setRequestMethod("HEAD");
-
- } catch (ProtocolException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to set HTTP request method to HEAD.", e);
- }
-
- logger.debug("Sending 'HEAD' request to: " + conn.getURL());
-
- int resultCode;
- try {
- resultCode = conn.getResponseCode();
- } catch (IOException e) {
- shutdownConnection(conn);
- throw new DocumentStoreOperationException("Failed to get the response code from the connection.", e);
- }
- logger.debug("Response Code : " + resultCode);
-
- opResult.setResultCode(resultCode);
-
- // Generate a metrics log so we can track how long the operation took.
- metricsLogger.info(SearchDbMsgs.CHECK_INDEX_TIME,
- new LogFields()
- .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
- .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResultCode()),
- override,
- indexName);
-
- shutdownConnection(conn);
-
- return opResult;
- }
-
-
- private void buildDocumentResult(DocumentOperationResult result, String index)
- throws DocumentStoreOperationException {
-
- JSONParser parser = new JSONParser();
- JSONObject root;
- try {
- root = (JSONObject) parser.parse(result.getResult());
-
- if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
- // Success response object
- Document doc = new Document();
- doc.setEtag(result.getResultVersion());
- doc.setUrl(buildDocumentResponseUrl(index, root.get("_id").toString()));
-
- doc.setContent((JSONObject) root.get("_source"));
- result.setDocument(doc);
-
- } else {
- // Error response object
- JSONObject error = (JSONObject) root.get("error");
- if (error != null) {
- result.setError(new ErrorResult(error.get("type").toString(),
- error.get("reason").toString()));
- }
-
- }
- } catch (Exception e) {
- throw new DocumentStoreOperationException("Failed to parse Elastic Search response."
- + result.getResult());
- }
-
-
- }
-
- private String buildDocumentResponseUrl(String index, String id) {
- return ApiUtils.buildDocumentUri(index, id);
- }
-
- private void buildSearchResult(SearchOperationResult result, String index)
- throws DocumentStoreOperationException {
-
- JSONParser parser = new JSONParser();
- JSONObject root;
-
- try {
- root = (JSONObject) parser.parse(result.getResult());
- if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
- JSONObject hits = (JSONObject) root.get("hits");
- JSONArray hitArray = (JSONArray) hits.get("hits");
- SearchHits searchHits = new SearchHits();
- searchHits.setTotalHits(hits.get("total").toString());
- ArrayList<SearchHit> searchHitArray = new ArrayList<SearchHit>();
-
- for (int i = 0; i < hitArray.size(); i++) {
- JSONObject hit = (JSONObject) hitArray.get(i);
- SearchHit searchHit = new SearchHit();
- searchHit.setScore((hit.get("_score") != null) ? hit.get("_score").toString() : "");
- Document doc = new Document();
- if (hit.get("_version") != null) {
- doc.setEtag((hit.get("_version") != null) ? hit.get("_version").toString() : "");
- }
-
- doc.setUrl(buildDocumentResponseUrl(index, (hit.get("_id") != null)
- ? hit.get("_id").toString() : ""));
- doc.setContent((JSONObject) hit.get("_source"));
- searchHit.setDocument(doc);
- searchHitArray.add(searchHit);
- }
- searchHits.setHits(searchHitArray.toArray(new SearchHit[searchHitArray.size()]));
- result.setSearchResult(searchHits);
-
- JSONObject aggregations = (JSONObject) root.get("aggregations");
- if (aggregations != null) {
- AggregationResult[] aggResults =
- AggregationParsingUtil.parseAggregationResults(aggregations);
- AggregationResults aggs = new AggregationResults();
- aggs.setAggregations(aggResults);
- result.setAggregationResult(aggs);
- }
-
- // success
- } else {
- JSONObject error = (JSONObject) root.get("error");
- if (error != null) {
- result.setError(new ErrorResult(error.get("type").toString(),
- error.get("reason").toString()));
- }
- }
- } catch (Exception e) {
- throw new DocumentStoreOperationException("Failed to parse Elastic Search response."
- + result.getResult());
- }
-
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchOperationStatus.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchOperationStatus.java
deleted file mode 100644
index c463110..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchOperationStatus.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
-
-import com.fasterxml.jackson.annotation.JsonAnyGetter;
-import com.fasterxml.jackson.annotation.JsonAnySetter;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class ElasticSearchOperationStatus {
-
- private String index;
- private String type;
- private String id;
- private String version;
- private ElasticSearchShardStatus shards;
- private Integer status;
- private ElasticSearchError error;
-
- private Map<String, Object> additionalProperties = new HashMap<String, Object>();
-
-
- public ElasticSearchError getError() {
- return error;
- }
-
- public void setError(ElasticSearchError error) {
- this.error = error;
- }
-
- public Integer getStatus() {
- return status;
- }
-
- public void setStatus(Integer status) {
- this.status = status;
- }
-
- public ElasticSearchShardStatus getShards() {
- return shards;
- }
-
- public void setShards(ElasticSearchShardStatus shards) {
- this.shards = shards;
- }
-
- public String getIndex() {
- return index;
- }
-
- public void setIndex(String index) {
- this.index = index;
- }
-
- public String getType() {
- return type;
- }
-
- public void setType(String type) {
- this.type = type;
- }
-
- public String getId() {
- return id;
- }
-
- public void setId(String id) {
- this.id = id;
- }
-
- public String getVersion() {
- return version;
- }
-
- public void setVersion(String version) {
- this.version = version;
- }
-
- @JsonAnyGetter
- public Map<String, Object> getAdditionalProperties() {
- return additionalProperties;
- }
-
- @JsonAnySetter
- public void setAdditionalProperties(String name, Object value) {
- additionalProperties.put(name, value);
- }
-
- @Override
- public String toString() {
- return "ElasticSearchIndexStatus [index=" + index + ", type=" + type + ", id="
- + id + ", version=" + version + ", shards=" + shards + ", status=" + status + "]";
- }
-
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchResultItem.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchResultItem.java
deleted file mode 100644
index 9b24c5d..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchResultItem.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
-
-import org.openecomp.sa.rest.ApiUtils;
-
-public class ElasticSearchResultItem {
-
- public static final String REQUEST_URL = "REQUEST_URL";
-
- private ElasticSearchOperationStatus create;
- private ElasticSearchOperationStatus index;
- private ElasticSearchOperationStatus delete;
-
- public ElasticSearchOperationStatus getCreate() {
- return create;
- }
-
- public void setCreate(ElasticSearchOperationStatus index) {
- this.create = index;
- }
-
- public ElasticSearchOperationStatus getIndex() {
- return index;
- }
-
- public void setIndex(ElasticSearchOperationStatus index) {
- this.index = index;
- }
-
- public ElasticSearchOperationStatus getDelete() {
- return delete;
- }
-
- public void setDelete(ElasticSearchOperationStatus delete) {
- this.delete = delete;
- }
-
- public String operationType() {
-
- if (create != null) {
- return "create";
- }
- if (index != null) {
- return "update";
- }
- if (delete != null) {
- return "delete";
- }
-
- return "unknown";
- }
-
- public ElasticSearchOperationStatus operationStatus() {
-
- if (create != null) {
- return create;
- }
- if (index != null) {
- return index;
- }
- if (delete != null) {
- return delete;
- }
-
- return null;
- }
-
-
- public String toJson() {
- StringBuilder sb = new StringBuilder();
-
- sb.append("{");
-
- sb.append("\"operation\": \"").append(operationType()).append("\", ");
-
- if (operationStatus().getAdditionalProperties().containsKey(REQUEST_URL)) {
- sb.append("\"url\": \"").append(operationStatus().getAdditionalProperties()
- .get(REQUEST_URL)).append("\", ");
- } else {
- sb.append("\"url\": \"").append(ApiUtils.buildDocumentUri(operationStatus()
- .getIndex(), operationStatus().getId())).append("\", ");
- }
-
- // We don't want to include an etag field in the response in
- // the case of a delete, since that would imply that the client
- // could still access that version of the file in some manner
- // (which we are not supporting).
- if (!operationType().equals("delete")) {
- sb.append("\"etag\": \"").append(operationStatus().getVersion()).append("\", ");
- }
- sb.append("\"status-code\": \"").append(operationStatus().getStatus()).append("\", ");
-
- sb.append("\"status-message\": \"");
-
- if ((operationStatus().getStatus() >= 200) && (operationStatus().getStatus() < 300)) {
- sb.append("OK");
- } else {
- // Sometimes the error object doesn't get populated, so check
- // before we try to reference it...
- if (operationStatus().getError() != null) {
- sb.append(operationStatus().getError().getReason());
- } else {
- sb.append("");
- }
- }
- sb.append("\"");
- sb.append("}");
-
- return sb.toString();
- }
-
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder();
-
- sb.append("ElasticSearchItemStatus [");
- if (create != null) {
- sb.append("create " + create);
- } else if (index != null) {
- sb.append("index " + index);
- } else if (delete != null) {
- sb.append("delete " + index);
- }
- sb.append("]");
- return sb.toString();
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchShardStatus.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchShardStatus.java
deleted file mode 100644
index 158e295..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchShardStatus.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
-
-public class ElasticSearchShardStatus {
-
- private int total;
- private int successful;
- private int failed;
-
- public int getTotal() {
- return total;
- }
-
- public void setTotal(int total) {
- this.total = total;
- }
-
- public int getSuccessful() {
- return successful;
- }
-
- public void setSuccessful(int successful) {
- this.successful = successful;
- }
-
- public int getFailed() {
- return failed;
- }
-
- public void setFailed(int failed) {
- this.failed = failed;
- }
-
- @Override
- public String toString() {
- return "ElasticSearchShardStatus [total=" + total + ", successful=" + successful
- + ", failed=" + failed + "]";
- }
-
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/exception/DocumentStoreOperationException.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/exception/DocumentStoreOperationException.java
deleted file mode 100644
index 441f1d1..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/exception/DocumentStoreOperationException.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.elasticsearch.exception;
-
-public class DocumentStoreOperationException extends Exception {
-
- private static final long serialVersionUID = -7689309913743200670L;
-
- public DocumentStoreOperationException(String message, Exception ex) {
- super(message, ex);
- }
-
- public DocumentStoreOperationException(String message) {
- super(message);
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationBucket.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationBucket.java
deleted file mode 100644
index d02e526..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationBucket.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.entity;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import edu.emory.mathcs.backport.java.util.Arrays;
-
-public class AggregationBucket {
- private Object key;
-
- @JsonProperty("formatted-key")
- private String formattedKey;
-
- private Number count;
-
- @JsonProperty("sub-aggregations")
- private AggregationResult[] subAggregationResult;
-
- public Object getKey() {
- return key;
- }
-
- public void setKey(Object key) {
- this.key = key;
- }
-
- public String getFormattedKey() {
- return formattedKey;
- }
-
- public void setFormattedKey(String formattedKey) {
- this.formattedKey = formattedKey;
- }
-
- public Number getCount() {
- return count;
- }
-
- public void setCount(Number count) {
- this.count = count;
- }
-
- public AggregationResult[] getSubAggregationResult() {
- return subAggregationResult;
- }
-
- public void setSubAggregationResult(AggregationResult[] subAggregationResult) {
- this.subAggregationResult = subAggregationResult;
- }
-
- @Override
- public String toString() {
- return "AggregationBucket [key=" + key + ", formattedKey=" + formattedKey + ", count=" + count
- + ", subAggregationResult=" + Arrays.toString(subAggregationResult) + "]";
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResult.java
deleted file mode 100644
index 0c57de4..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResult.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.entity;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import edu.emory.mathcs.backport.java.util.Arrays;
-
-public class AggregationResult {
- private String name;
-
- private Number count;
-
- private AggregationBucket[] buckets;
-
- @JsonProperty("nested-aggregations")
- private AggregationResult[] nestedAggregations;
-
- public String getName() {
- return name;
- }
-
- public void setName(String name) {
- this.name = name;
- }
-
- public AggregationBucket[] getBuckets() {
- return buckets;
- }
-
- public void setBuckets(AggregationBucket[] buckets) {
- this.buckets = buckets;
- }
-
- public AggregationResult[] getNestedAggregations() {
- return nestedAggregations;
- }
-
- public void setNestedAggregations(AggregationResult[] nestedAggregations) {
- this.nestedAggregations = nestedAggregations;
- }
-
- public Number getCount() {
- return count;
- }
-
- public void setCount(Number count) {
- this.count = count;
- }
-
- @Override
- public String toString() {
- return "AggregationResult [name=" + name + ", count=" + count + ", buckets="
- + Arrays.toString(buckets) + ", nestedAggregations=" + Arrays.toString(nestedAggregations)
- + "]";
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResults.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResults.java
deleted file mode 100644
index 0ea1088..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResults.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.entity;
-
-import java.util.Arrays;
-
-public class AggregationResults {
- private AggregationResult[] aggregations;
-
- public AggregationResult[] getAggregations() {
- return aggregations;
- }
-
- public void setAggregations(AggregationResult[] aggregations) {
- this.aggregations = aggregations;
- }
-
- @Override
- public String toString() {
- return "AggregationResults [aggregations=" + Arrays.toString(aggregations) + "]";
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/Document.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/Document.java
deleted file mode 100644
index 2634846..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/Document.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.entity;
-
-import org.json.simple.JSONObject;
-
-public class Document {
- private String etag;
- private String url;
-
- private JSONObject content;
-
- public String getEtag() {
- return etag;
- }
-
- public void setEtag(String etag) {
- this.etag = etag;
- }
-
- public String getUrl() {
- return url;
- }
-
- public void setUrl(String url) {
- this.url = url;
- }
-
- public JSONObject getContent() {
- return content;
- }
-
- public void setContent(JSONObject content) {
- this.content = content;
- }
-
- @Override
- public String toString() {
- return "Document [etag=" + etag + ", url=" + url + "]";
- }
-
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/DocumentOperationResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/DocumentOperationResult.java
deleted file mode 100644
index 8400700..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/DocumentOperationResult.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.entity;
-
-public class DocumentOperationResult extends OperationResult {
- private Document document;
-
- public Document getDocument() {
- return document;
- }
-
- public void setDocument(Document document) {
- this.document = document;
- }
-
- @Override
- public String toString() {
- return "DocumentOperationResult [document=" + document + "]";
- }
-
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/ErrorResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/ErrorResult.java
deleted file mode 100644
index c930072..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/ErrorResult.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.entity;
-
-public class ErrorResult {
-
- private String type;
- private String reason;
-
-
- public ErrorResult(String type, String reason) {
- super();
- this.type = type;
- this.reason = reason;
- }
-
- public String getType() {
- return type;
- }
-
- public void setType(String type) {
- this.type = type;
- }
-
- public String getReason() {
- return reason;
- }
-
- public void setReason(String reason) {
- this.reason = reason;
- }
-
- @Override
- public String toString() {
- return "ErrorResponse [type=" + type + ", reason=" + reason + "]";
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/OperationResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/OperationResult.java
deleted file mode 100644
index 20822d8..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/OperationResult.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.entity;
-
-public class OperationResult {
-
- private int resultCode;
-
- private String failureCause;
- private String resultVersion;
- private String result;
- private ErrorResult error;
-
- public int getResultCode() {
- return resultCode;
- }
-
- public void setResultCode(int resultCode) {
- this.resultCode = resultCode;
- }
-
- public String getFailureCause() {
- return failureCause;
- }
-
- public void setFailureCause(String failureCause) {
- this.failureCause = failureCause;
- }
-
- public String getResultVersion() {
- return resultVersion;
- }
-
- public void setResultVersion(String resultVersion) {
- this.resultVersion = resultVersion;
- }
-
- public String getResult() {
- return result;
- }
-
- public void setResult(String result) {
- this.result = result;
- }
-
- public ErrorResult getError() {
- return error;
- }
-
- public void setError(ErrorResult error) {
- this.error = error;
- }
-
- @Override
- public String toString() {
- return "OperationResult [resultCode=" + resultCode + ", failureCause=" + failureCause
- + ", resultVersion=" + resultVersion + ", result=" + result + ", error=" + error + "]";
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHit.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHit.java
deleted file mode 100644
index 5ee6b40..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHit.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.entity;
-
-public class SearchHit {
- private String score;
- Document document;
-
- public String getScore() {
- return score;
- }
-
- public void setScore(String score) {
- this.score = score;
- }
-
- public Document getDocument() {
- return document;
- }
-
- public void setDocument(Document document) {
- this.document = document;
- }
-
- @Override
- public String toString() {
- return "SearchHit [score=" + score + ", document=" + document + "]";
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHits.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHits.java
deleted file mode 100644
index 8aa6f2b..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHits.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.entity;
-
-import java.util.Arrays;
-
-public class SearchHits {
- private String totalHits;
- private SearchHit[] hits;
-
- public String getTotalHits() {
- return totalHits;
- }
-
- public void setTotalHits(String totalHits) {
- this.totalHits = totalHits;
- }
-
- public SearchHit[] getHits() {
- return hits;
- }
-
- public void setHits(SearchHit[] hits) {
- this.hits = hits;
- }
-
- @Override
- public String toString() {
- return "SearchHits [totalHits=" + totalHits + ", hits=" + Arrays.toString(hits) + "]";
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchOperationResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchOperationResult.java
deleted file mode 100644
index 581fc49..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchOperationResult.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.entity;
-
-public class SearchOperationResult extends OperationResult {
-
- private SearchHits searchResult;
- private AggregationResults aggregationResult;
-
- public SearchHits getSearchResult() {
- return searchResult;
- }
-
- public AggregationResults getAggregationResult() {
- return aggregationResult;
- }
-
- public void setAggregationResult(AggregationResults aggregations) {
- this.aggregationResult = aggregations;
- }
-
- public void setSearchResult(SearchHits hits) {
- this.searchResult = hits;
- }
-
- @Override
- public String toString() {
- return "SearchOperationResult [searchResult=" + searchResult
- + ", aggregationResult=" + aggregationResult;
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/logging/SearchDbMsgs.java b/src/main/java/org/openecomp/sa/searchdbabstraction/logging/SearchDbMsgs.java
deleted file mode 100644
index cbc6abf..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/logging/SearchDbMsgs.java
+++ /dev/null
@@ -1,224 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.logging;
-
-import com.att.eelf.i18n.EELFResourceManager;
-import org.openecomp.cl.eelf.LogMessageEnum;
-
-public enum SearchDbMsgs implements LogMessageEnum {
-
- /**
- * Arguments:
- * None
- */
- SERVICE_STARTED,
-
- /**
- * Arguments:
- * {0} = url
- */
- ELASTIC_SEARCH_CONNECTION_ATTEMPT,
-
- /**
- * Arguments:
- * {0} = url
- */
- ELASTIC_SEARCH_CONNECTION_SUCCESS,
-
- /**
- * Arguments:
- * {0} = url
- * {1} = failure cause
- */
- ELASTIC_SEARCH_CONNECTION_FAILURE,
-
- /**
- * Arguments:
- * {0} = Filter configuration file.
- * {1} = Failure cause.
- */
- FILTERS_CONFIG_FAILURE,
-
- /**
- * Arguments:
- * {0} = Analysys configuration file.
- * {1} = Failure case.
- */
- ANALYSYS_CONFIG_FAILURE,
-
- /**
- * Arguments:
- * {0} = Index name
- */
- CREATED_INDEX,
-
- /**
- * Arguments:
- * {0} = Index name
- * {1} = Document type
- */
- CREATE_INDEX_TIME,
-
- /**
- * Arguments:
- * {0} = Index name
- */
- DELETED_INDEX,
-
- /**
- * Arguments:
- * {0} = Index name
- */
- DELETE_INDEX_TIME,
-
- /**
- * Arguments:
- * {0} = Index name
- */
- CHECK_INDEX_TIME,
-
- /**
- * Arguments:
- * {0} = Index name
- */
- CREATE_DOCUMENT_TIME,
-
- /**
- * Arguments:
- * {0} = Index name
- * {1} = Document id
- */
- UPDATE_DOCUMENT_TIME,
-
- /**
- * Arguments:
- * {0} = Index name
- * {1} = Document id
- */
- DELETE_DOCUMENT_TIME,
-
- /**
- * Arguments:
- * {0} = Index name
- * {1} = Document id
- */
- GET_DOCUMENT_TIME,
-
- /**
- * Arguments:
- * {0} = Index name
- * {1} = Query string
- */
- QUERY_DOCUMENT_TIME,
-
- /**
- * Arguments:
- */
- BULK_OPERATIONS_TIME,
-
- /**
- * Arguments:
- */
- PROCESSED_BULK_OPERATIONS,
-
- /**
- * Arguments:
- * {0} = Event
- * {1} = Result
- */
- PROCESS_EVENT,
-
- /**
- * Arguments:
- * {0} = URL.
- */
- PROCESS_INLINE_QUERY,
-
- /**
- * Arguments
- * {0} - Operation type (GET or POST)
- * {1} - URL.
- */
- PROCESS_PAYLOAD_QUERY,
-
- /**
- * Arguments:
- * {0} = Index
- * {1} = Error
- */
- INDEX_CREATE_FAILURE,
-
- /**
- * Arguments:
- * {0} = Index name
- * {1} = Error cause
- */
- INDEX_DELETE_FAILURE,
-
- /**
- * Arguments:
- * {0} = Failure cause.
- */
- GET_ANALYZERS_FAILURE,
-
- /**
- * Arguments:
- * {0} = Failure cause.
- */
- BULK_OPERATION_FAILURE,
-
- /**
- * Arguments:
- * {0} = Method
- * {1} = Exception
- */
- EXCEPTION_DURING_METHOD_CALL,
-
- /**
- * Received request {0} {1} from {2}. Sending response: {3}
- *
- * <p>Arguments:
- * {0} = operation
- * {1} = target URL
- * {2} = source
- * {3} = response code
- */
- PROCESS_REST_REQUEST,
-
- STARTUP_EXCEPTION
- /**
- * Exception encountered during startup of search service: {0}
- *
- * <p>Arguments:
- * {0} = exception
- */
- ;
-
- /**
- * Load message bundle (SearchDbMsgs.properties file)
- */
- static {
- EELFResourceManager.loadMessageBundle("logging/SearchDbMsgs");
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AbstractAggregation.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AbstractAggregation.java
deleted file mode 100644
index 02a57d8..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AbstractAggregation.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-
-/**
- * This is the common parent from which all aggregation types inherit. It defines
- * the common fields that all aggregations must include.
- */
-public abstract class AbstractAggregation {
-
- /**
- * The name of the field to apply the aggregation against.
- */
- protected String field;
-
- /**
- * Optionally allows the number of buckets for the aggregation to be
- * specified.
- */
- protected Integer size;
-
- /**
- * Optionally sets the minimum number of matches that must occur before
- * a particular bucket is included in the aggregation result.
- */
- @JsonProperty("min-threshold")
- protected Integer minThreshold;
-
-
- public String getField() {
- return field;
- }
-
- public void setField(String field) {
- this.field = field;
- }
-
- public Integer getSize() {
- return size;
- }
-
- public void setSize(Integer size) {
- this.size = size;
- }
-
- public Integer getMinThreshold() {
- return minThreshold;
- }
-
- public void setMinThreshold(Integer minThreshold) {
- this.minThreshold = minThreshold;
- }
-
- public abstract String toElasticSearch();
-
- public abstract String toString();
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Aggregation.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Aggregation.java
deleted file mode 100644
index 3bd74a3..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Aggregation.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class Aggregation {
- private String name;
-
- @JsonProperty("aggregation")
- private AggregationStatement aggregation;
-
- public String getName() {
- return name;
- }
-
- public void setName(String name) {
- this.name = name;
- }
-
- public AggregationStatement getStatement() {
- return aggregation;
- }
-
- public void setStatement(AggregationStatement aggregation) {
- this.aggregation = aggregation;
- }
-
- public String toElasticSearch() {
- StringBuffer sb = new StringBuffer();
-
- sb.append("\"");
- sb.append(name);
- sb.append("\": ");
- sb.append(aggregation.toElasticSearch());
-
- return sb.toString();
- }
-
- @Override
- public String toString() {
- return "{name: " + name + ", aggregation: " + aggregation.toString();
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatement.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatement.java
deleted file mode 100644
index ba62f74..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatement.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import java.util.Arrays;
-
-public class AggregationStatement {
-
- @JsonProperty("group-by")
- private GroupByAggregation groupBy;
-
- @JsonProperty("date-range")
- private DateRangeAggregation dateRange;
-
- @JsonProperty("date-histogram")
- private DateHistogramAggregation dateHist;
-
- @JsonProperty("nested")
- private Aggregation[] nested;
-
- @JsonProperty("sub-aggregations")
- private Aggregation[] subAggregations;
-
- public GroupByAggregation getGroupBy() {
- return groupBy;
- }
-
- public void setGroupBy(GroupByAggregation groupBy) {
- this.groupBy = groupBy;
- }
-
- public DateRangeAggregation getDateRange() {
- return dateRange;
- }
-
- public void setDateRange(DateRangeAggregation dateRange) {
- this.dateRange = dateRange;
- }
-
- public DateHistogramAggregation getDateHist() {
- return dateHist;
- }
-
- public void setDateHist(DateHistogramAggregation dateHist) {
- this.dateHist = dateHist;
- }
-
- public Aggregation[] getNested() {
- return nested;
- }
-
- public void setNested(Aggregation[] nested) {
- this.nested = nested;
- }
-
- public Aggregation[] getSubAggregations() {
- return subAggregations;
- }
-
- public void setSubAggregations(Aggregation[] subAggregations) {
- this.subAggregations = subAggregations;
- }
-
- public String toElasticSearch() {
- StringBuffer sb = new StringBuffer();
-
- sb.append("{");
-
- if (nested != null && nested.length > 0) {
- sb.append("\"nested\": {\"path\": \"");
- if (nested[0].getStatement() != null) {
- sb.append(nested[0].getStatement().getNestedPath());
- }
- sb.append("\"}, \"aggs\": {");
- for (int i = 0; i < nested.length; i++) {
- if (i > 0) {
- sb.append(",");
- }
- sb.append(nested[i].toElasticSearch());
- }
-
- sb.append("}");
- } else {
- if (groupBy != null) {
- sb.append(groupBy.toElasticSearch());
- } else if (dateRange != null) {
- sb.append(dateRange.toElasticSearch());
- } else if (dateHist != null) {
- sb.append(dateHist.toElasticSearch());
- }
-
- if (subAggregations != null && subAggregations.length > 0) {
- sb.append(", \"aggs\": {");
- for (int i = 0; i < subAggregations.length; i++) {
- if (i > 0) {
- sb.append(",");
- }
- sb.append(subAggregations[i].toElasticSearch());
- }
- sb.append("}");
- }
- }
-
- sb.append("}");
-
- return sb.toString();
- }
-
- @Override
- public String toString() {
- StringBuffer sb = new StringBuffer();
-
- if (nested != null) {
- sb.append("{nested: ");
- sb.append(Arrays.toString(nested));
- } else if (groupBy != null) {
- sb.append(groupBy.toString());
- } else if (dateHist != null) {
- sb.append(dateHist.toString());
- } else if (dateRange != null) {
- sb.append(dateRange.toString());
- }
-
- if (subAggregations != null) {
- sb.append(", sub-aggregations: ");
- sb.append(Arrays.toString(subAggregations));
- }
-
- sb.append("}");
-
- return sb.toString();
- }
-
- public String getNestedPath() {
- String path = null;
- String fieldName = null;
-
- if (groupBy != null) {
- fieldName = groupBy.getField();
- } else if (dateRange != null) {
- fieldName = dateRange.getField();
- } else if (dateHist != null) {
- fieldName = dateHist.getField();
- }
-
- if (fieldName != null && fieldName.contains(".")) {
- // we have nested field
- path = fieldName.substring(0, fieldName.indexOf("."));
- }
-
- return path;
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregation.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregation.java
deleted file mode 100644
index 2edad68..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregation.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * An example of elasticsearch date_histogram aggregation:
- *
- * <p><pre>
- * {
- * "aggs": {
- * "my_group": {
- * "date_histogram" : {
- * "field" : "date",
- * "interval" : "month"
- * }
- * }
- * }
- * }
- * </pre>
- */
-
-public class DateHistogramAggregation extends AbstractAggregation {
-
- private String interval;
-
- private String format;
-
- @JsonProperty("time-zone")
- private String timeZone;
-
-
- public String getInterval() {
- return interval;
- }
-
- public void setInterval(String interval) {
- this.interval = interval;
- }
-
- public String getTimeZone() {
- return timeZone;
- }
-
- public String getFormat() {
- return format;
- }
-
- public void setFormat(String format) {
- this.format = format;
- }
-
- public void setTimeZone(String timeZone) {
- this.timeZone = timeZone;
- }
-
- @Override
- public String toElasticSearch() {
- StringBuilder sb = new StringBuilder();
-
- sb.append("\"date_histogram\": {\"field\": \"");
- sb.append(field);
- sb.append("\"");
- if (interval != null) {
- sb.append(", \"interval\": \"");
- sb.append(interval);
- sb.append("\"");
- }
- if (format != null) {
- sb.append(", \"format\": \"");
- sb.append(format);
- sb.append("\"");
- }
- if (timeZone != null) {
- sb.append(", \"time_zone\": \"");
- sb.append(timeZone);
- sb.append("\"");
- }
- if (size != null) {
- sb.append(", \"size\": ");
- sb.append(size);
- }
- if (minThreshold != null) {
- sb.append(", \"min_doc_count\": ").append(minThreshold);
- }
- sb.append("}");
-
- return sb.toString();
- }
-
- @Override
- public String toString() {
- return "DateHistogramAggregation: [field=" + field + ", interval=" + interval + ", format="
- + format + ", timeZone=" + timeZone + ", size=" + size + " minThreshold=" + minThreshold;
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRange.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRange.java
deleted file mode 100644
index 69e795d..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRange.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * This class represents the ranges specification in an date_range statement.
- * <p>
- * The expected JSON structure for a ranges is as follows:
- * <p>
- * <pre>
- * {
- * "from": <from-date>
- * }
- * </pre>
- * <p>
- * or
- * <p>
- * <pre>
- * {
- * "to": <to-date>
- * }
- * </pre>
- * <p>
- * or
- * <p>
- * <pre>
- * {
- * "from": <from-date>,
- * "to": <to-date>
- * }
- * </pre>
- *
- * @author sye
- */
-public class DateRange {
-
- @JsonProperty("from")
- private String fromDate;
-
- @JsonProperty("to")
- private String toDate;
-
- public String getFromDate() {
- return fromDate;
- }
-
- public void setFromDate(String fromDate) {
- this.fromDate = fromDate;
- }
-
- public String getToDate() {
- return toDate;
- }
-
- public void setToDate(String toDate) {
- this.toDate = toDate;
- }
-
- public String toElasticSearch() {
- StringBuilder sb = new StringBuilder();
-
- sb.append("{");
-
- if (fromDate != null) {
- sb.append("\"from\": \"");
- sb.append(fromDate.toString());
- sb.append("\"");
- }
-
- if (toDate != null) {
- if (fromDate != null) {
- sb.append(", \"to\": \"");
- sb.append(toDate.toString());
- sb.append("\"");
- } else {
- sb.append("\"to\": \"");
- sb.append(toDate.toString());
- sb.append("\"");
- }
- }
-
- sb.append("}");
-
- return sb.toString();
- }
-
- public String toString() {
- return "{from: " + fromDate + ", to: " + toDate + "}";
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregation.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregation.java
deleted file mode 100644
index f893905..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregation.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * An example of a date_range aggregation:
- *
- * <p><pre>
- * {
- * "aggs": {
- * "range": {
- * "date_range": {
- * "field": "date",
- * "format": "MM-yyy",
- * "ranges": [
- * { "to": "now-10M/M" },
- * { "from": "now-10M/M" }
- * ]
- * }
- * }
- * }
- * }
- * </pre>
- *
- * @author sye
- */
-public class DateRangeAggregation extends AbstractAggregation {
-
-
- private String format;
-
- @JsonProperty("ranges")
- private DateRange[] dateRanges;
-
-
- public String getFormat() {
- return format;
- }
-
- public void setFormat(String format) {
- this.format = format;
- }
-
- public DateRange[] getDateRanges() {
- return dateRanges;
- }
-
- public void setDateRanges(DateRange[] dateRanges) {
- this.dateRanges = dateRanges;
- }
-
- @Override
- public String toElasticSearch() {
- StringBuilder sb = new StringBuilder();
-
- sb.append("\"date_range\": {\"field\": \"");
- sb.append(field);
- sb.append("\"");
-
- if (format != null) {
- sb.append(", \"format\": \"");
- sb.append(format);
- sb.append("\"");
- }
-
- if (dateRanges != null && dateRanges.length > 0) {
- sb.append(", \"ranges\": [");
-
- for (int i = 0; i < dateRanges.length; i++) {
- if (i > 0) {
- sb.append(",");
- }
- sb.append(dateRanges[i].toElasticSearch());
- }
-
- sb.append("]");
- }
-
- if (size != null) {
- sb.append(", \"size\": ");
- sb.append(size);
- }
-
- if (minThreshold != null) {
- sb.append(", \"min_doc_count\": ").append(minThreshold);
- }
-
- sb.append("}");
-
- return sb.toString();
- }
-
- @Override
- public String toString() {
- StringBuffer sb = new StringBuffer();
- sb.append("date-range: {field: " + field + ", format: " + format + ", size: " + size
- + ", minThreshold: " + minThreshold + "ranges: [");
- for (int i = 0; i < dateRanges.length; i++) {
- if (i > 0) {
- sb.append(",");
- }
- sb.append(dateRanges[i].toString());
- }
- sb.append("]");
-
- return sb.toString();
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Filter.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Filter.java
deleted file mode 100644
index bfed378..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Filter.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * This class represents the filter stanza in a search statement.
- *
- * <p>The expected JSON structure for a filter stanza is as follows:
- * <pre>
- * {
- * "filter": {
- * "all": [ {query structure}, {query structure}, ... {query structure} ],
- * "any": [ {query structure}, {query structure}, ... {query structure} ]
- * }
- * }
- * </pre>
- */
-public class Filter {
-
- /**
- * All queries in this list must evaluate to true for the filter to pass.
- */
- private QueryStatement[] all;
-
- /**
- * Any one of the queries in this list must evaluate to true for the
- * filter to pass.
- */
- private QueryStatement[] any;
-
-
- public QueryStatement[] getAll() {
- return all;
- }
-
- public void setAll(QueryStatement[] all) {
- this.all = all;
- }
-
- public QueryStatement[] getAny() {
- return any;
- }
-
- public void setAny(QueryStatement[] any) {
- this.any = any;
- }
-
- /**
- * This method returns a string which represents this filter in syntax
- * that is understandable by ElasticSearch and is suitable for inclusion
- * in an ElasticSearch query string.
- *
- * @return - ElasticSearch syntax string.
- */
- public String toElasticSearch() {
-
- StringBuilder sb = new StringBuilder();
-
- List<QueryStatement> notMatchQueries = new ArrayList<QueryStatement>();
- sb.append("{");
- sb.append("\"bool\": {");
-
- // Add the queries from our 'all' list.
- int matchQueriesCount = 0;
- int notMatchQueriesCount = 0;
- if (all != null) {
- sb.append("\"must\": [");
-
- for (QueryStatement query : all) {
- if (matchQueriesCount > 0) {
- sb.append(", ");
- }
-
- if (query.isNotMatch()) {
- notMatchQueries.add(query);
- } else {
- sb.append(query.toElasticSearch());
- matchQueriesCount++;
- }
- }
- sb.append("],");
-
-
- sb.append("\"must_not\": [");
- for (QueryStatement query : notMatchQueries) {
- if (notMatchQueriesCount > 0) {
- sb.append(", ");
- }
- sb.append(query.toElasticSearch());
- notMatchQueriesCount++;
- }
- sb.append("]");
- }
-
- // Add the queries from our 'any' list.
- notMatchQueries.clear();
- if (any != null) {
- if (all != null) {
- sb.append(",");
- }
- sb.append("\"should\": [");
-
- matchQueriesCount = 0;
- for (QueryStatement query : any) {
- //if(!firstQuery.compareAndSet(true, false)) {
- if (matchQueriesCount > 0) {
- sb.append(", ");
- }
-
- if (query.isNotMatch()) {
- notMatchQueries.add(query);
- } else {
- sb.append(query.toElasticSearch());
- matchQueriesCount++;
- }
- }
- sb.append("],");
-
- //firstQuery.set(true);
- notMatchQueriesCount = 0;
- sb.append("\"must_not\": [");
- for (QueryStatement query : notMatchQueries) {
- //if(!firstQuery.compareAndSet(true, false)) {
- if (notMatchQueriesCount > 0) {
- sb.append(", ");
- }
- sb.append(query.toElasticSearch());
- notMatchQueriesCount++;
- }
- sb.append("]");
- }
- sb.append("}");
- sb.append("}");
-
- return sb.toString();
- }
-
- @Override
- public String toString() {
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("{");
-
- sb.append("all: [");
- if (all != null) {
- for (QueryStatement query : all) {
- sb.append(query.toString());
- }
- }
- sb.append("], ");
-
- sb.append("any: [");
- if (any != null) {
- for (QueryStatement query : any) {
- sb.append(query.toString());
- }
- }
- sb.append("] ");
-
- sb.append("}");
-
- return sb.toString();
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregation.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregation.java
deleted file mode 100644
index fbd120c..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregation.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-/**
- * An example of a date_range aggregation:
- *
- * <p><pre>
- * {
- * "aggs": {
- * "my_group": {
- * "term": {
- * "field": "group"
- * }
- * }
- * }
- * }
- * </pre>
- *
- * @author sye
- */
-public class GroupByAggregation extends AbstractAggregation {
-
- @Override
- public String toElasticSearch() {
- StringBuilder sb = new StringBuilder();
-
- sb.append("\"terms\": {\"field\": \"");
- sb.append(field);
- sb.append("\"");
- if (size != null) {
- sb.append(", \"size\": ");
- sb.append(size);
- }
-
- if (minThreshold != null) {
- sb.append(", \"min_doc_count\": ").append(minThreshold);
- }
-
- sb.append("}");
-
- return sb.toString();
- }
-
- @Override
- public String toString() {
- return "{group-by: {field: " + field + ", size: " + size
- + " minThreshold: " + minThreshold + "}}";
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/ParsedQuery.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/ParsedQuery.java
deleted file mode 100644
index bacb2ef..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/ParsedQuery.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * This class represents a simple parsed query statement.
- *
- * <p>A 'parsed query' specifies a document field to inspect and a query
- * string which will be parsed by the document store to generate the
- * exact query to be performed.
- *
- * <p>The query string will be tokenized into 'terms' and 'operators' where:
- *
- * <p>Terms may be any of the following:
- * <ul>
- * <li> single words </li>
- * <li> exact phrases (denoted by surrounding the phrase with '"' characters) </li>
- * <li> regular expressions (denoted by surrounding the phrase with '/' characters) </li>
- * </ul>
- *
- * <p>Operators may be any of the following:
- * <ul>
- * <li> + -- The term to the right of the operator MUST be present to produce a match. </li>
- * <li> - -- The term to the right of the operator MUST NOT be present to produce a match. </li>
- * <li> AND -- Both the terms to the left and right of the operator MUST be present to produce a match. </li>
- * <li> OR -- Either the term to the left or right of the operator MUST be present to produce a match. </li>
- * <li> NOT -- The term to the right of the operator MUST NOT be present to produce a match. </li>
- * </ul>
- *
- * <p>The expected JSON structure for a parsed query is as follows:
- * <pre>
- * {
- * "parsed-query": {
- * "field": "fieldname",
- * "query-string": "string"
- * }
- * }
- * </pre>
- */
-public class ParsedQuery {
-
- /**
- * The name of the field which the query is to be applied to.
- */
- private String field;
-
- /**
- * The string to be parsed to generate the full query.
- */
- @JsonProperty("query-string")
- private String queryString;
-
-
- public String getField() {
- return field;
- }
-
- public void setField(String field) {
- this.field = field;
- }
-
- public String getQueryString() {
- return queryString;
- }
-
- public void setQueryString(String queryString) {
- this.queryString = queryString;
- }
-
-
- /**
- * This method returns a string which represents this query in syntax
- * that is understandable by ElasticSearch and is suitable for inclusion
- * in an ElasticSearch query string.
- *
- * @return - ElasticSearch syntax string.
- */
- public String toElasticSearch() {
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("{");
- sb.append("\"query_string\": {");
- sb.append("\"default_field\": \"").append(field).append("\", ");
- sb.append("\"query\": \"").append(queryString).append("\"");
- sb.append("}");
- sb.append("}");
-
- return sb.toString();
- }
-
- @Override
- public String toString() {
- return "{field:" + field + ", query-string: '" + queryString + "'}";
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Query.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Query.java
deleted file mode 100644
index 991b50c..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Query.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-public class Query {
-
- private QueryStatement may;
- private QueryStatement must;
-
- public QueryStatement getMay() {
- return may;
- }
-
- public void setMay(QueryStatement may) {
- this.may = may;
- }
-
- public QueryStatement getMust() {
- return must;
- }
-
- public void setMust(QueryStatement must) {
- this.must = must;
- }
-
- public QueryStatement getQueryStatement() {
- if (isMust()) {
- return must;
- } else if (isMay()) {
- return may;
- } else {
- return null;
- }
- }
-
- public boolean isMust() {
- return must != null;
- }
-
- public boolean isMay() {
- return may != null;
- }
-
- public String toElasticSearch() {
-
- if (isMust()) {
- return must.toElasticSearch();
- } else if (isMay()) {
- return may.toElasticSearch();
- } else {
- return ""; // throw an exception?
- }
- }
-
- @Override
- public String toString() {
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("Query:[");
- if (isMust()) {
- sb.append("must: ").append(must.toString());
- } else if (isMay()) {
- sb.append("may: ").append(may.toString());
- } else {
- sb.append("INVALID");
- }
- sb.append("]");
-
- return sb.toString();
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryStatement.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryStatement.java
deleted file mode 100644
index e8687b3..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryStatement.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class QueryStatement {
-
- private TermQuery match;
-
- @JsonProperty("not-match")
- private TermQuery notMatch;
-
- @JsonProperty("parsed-query")
- private ParsedQuery parsedQuery;
-
- private RangeQuery range;
-
- public TermQuery getMatch() {
- return match;
- }
-
- public void setMatch(TermQuery match) {
- this.match = match;
- }
-
- public TermQuery getNotMatch() {
- return notMatch;
- }
-
- public void setNotMatch(TermQuery notMatch) {
- this.notMatch = notMatch;
- }
-
- public ParsedQuery getParsedQuery() {
- return parsedQuery;
- }
-
- public void setParsedQuery(ParsedQuery parsedQuery) {
- this.parsedQuery = parsedQuery;
- }
-
- public RangeQuery getRange() {
- return range;
- }
-
- public void setRange(RangeQuery range) {
- this.range = range;
- }
-
- public boolean isNotMatch() {
- return (notMatch != null);
- }
-
- public String toElasticSearch() {
-
- if (match != null) {
- return match.toElasticSearch();
-
- } else if (notMatch != null) {
- return notMatch.toElasticSearch();
-
- } else if (parsedQuery != null) {
-
- // We need some special wrapping if this query is against a nested field.
- if (fieldIsNested(parsedQuery.getField())) {
- return "{\"nested\": { \"path\": \"" + pathForNestedField(parsedQuery.getField())
- + "\", \"query\": " + parsedQuery.toElasticSearch() + "}}";
- } else {
- return parsedQuery.toElasticSearch();
- }
-
- } else if (range != null) {
-
- // We need some special wrapping if this query is against a nested field.
- if (fieldIsNested(range.getField())) {
- return "{\"nested\": { \"path\": \"" + pathForNestedField(range.getField())
- + "\", \"query\": " + range.toElasticSearch() + "}}";
- } else {
- return range.toElasticSearch();
- }
-
- } else {
- // throw an exception?
- return null;
- }
- }
-
- private boolean fieldIsNested(String field) {
- return field.contains(".");
- }
-
- private String pathForNestedField(String field) {
- int index = field.lastIndexOf('.');
- return field.substring(0, index);
- }
-
- @Override
- public String toString() {
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("{");
-
- if (match != null) {
- sb.append("TERM QUERY: { match: {").append(match.toString()).append("}}");
- } else if (notMatch != null) {
- sb.append("TERM QUERY: { not-match: {").append(match.toString()).append("}}");
- } else if (parsedQuery != null) {
- sb.append("PARSED QUERY: { ").append(parsedQuery.toString()).append("}");
- } else if (range != null) {
- sb.append("RANGE QUERY: { ").append(range.toString()).append("}");
- } else {
- sb.append("UNDEFINED");
- }
-
- sb.append("}");
- return sb.toString();
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/RangeQuery.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/RangeQuery.java
deleted file mode 100644
index 9b8bc08..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/RangeQuery.java
+++ /dev/null
@@ -1,346 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * This class represents a simple range query.
- *
- * <p>A range query is composed of one or more operator/value pairs which define
- * the upper and lower bounds of the range, and a field to apply the query to.
- *
- * <p>Operators may be one of the following:
- * <ul>
- * <li>gt - Greater than. </li>
- * <li>gte - Greater than or equal to. </li>
- * <li>lt - Less than. </li>
- * <li>lte - Less than or equal to. </li>
- * </ul>
- * Values may be either numeric values (Integer or Double) or Strings representing
- * dates.
- *
- * <p>The following examples illustrate a couple of variants of the range query:
- *
- * <p><pre>
- * // A simple numeric range query:
- * {
- * "range": {
- * "field": "fieldname",
- * "gte": 5,
- * "lte": 10
- * }
- * }
- *
- * // A simple date range query:
- * {
- * "range": {
- * "field": "fieldname",
- * "gt": "2016-10-06T00:00:00.558+03:00",
- * "lt": "2016-10-06T23:59:59.558+03:00"
- * }
- * }
- * </pre>
- */
-public class RangeQuery {
-
- /**
- * The name of the field to apply the range query against.
- */
- private String field;
-
- /**
- * The value of the field must be greater than this value to be a match.<br>
- * NOTE: Only one of 'gt' or 'gte' should be set on any single {@link RangeQuery}
- * instance.
- */
- private Object gt;
-
- /**
- * The value of the field must be greater than or equal to this value to be a match.<br>
- * NOTE: Only one of 'gt' or 'gte' should be set on any single {@link RangeQuery}
- * instance.
- */
- private Object gte;
-
- /**
- * The value of the field must be less than this value to be a match.<br>
- * NOTE: Only one of 'lt' or 'lte' should be set on any single {@link RangeQuery}
- * instance.
- */
- private Object lt;
-
- /**
- * The value of the field must be less than or equal to than this value to be a match.<br>
- * NOTE: Only one of 'lt' or 'lte' should be set on any single {@link RangeQuery}
- * instance.
- */
- private Object lte;
-
- private String format;
-
- @JsonProperty("time-zone")
- private String timeZone;
-
- public String getField() {
- return field;
- }
-
- public void setField(String field) {
- this.field = field;
- }
-
- public Object getGt() {
- return gt;
- }
-
- public void setGt(Object gt) {
-
- // It does not make sense to assign a value to both the 'greater than'
- // and 'greater than or equal' operations, so make sure we are not
- // trying to do that.
- if (gte == null) {
-
- // Make sure that we are not trying to mix both numeric and date
- // type values in the same queries.
- if (((lt != null) && !typesMatch(gt, lt))
- || ((lte != null) && !typesMatch(gt, lte))) {
- throw new IllegalArgumentException("Cannot mix date and numeric values in the same ranged query");
- }
-
- // If we made it here, then we're all good. Store the value.
- this.gt = gt;
- } else {
- throw new IllegalArgumentException("Cannot assign both 'gt' and 'gte' fields in the same ranged query");
- }
- }
-
-
- public Object getGte() {
- return gte;
- }
-
- public void setGte(Object gte) {
-
- // It does not make sense to assign a value to both the 'greater than'
- // and 'greater than or equal' operations, so make sure we are not
- // trying to do that.
- if (gt == null) {
-
- // Make sure that we are not trying to mix both numeric and date
- // type values in the same queries.
- if (((lt != null) && !typesMatch(gte, lt))
- || ((lte != null) && !typesMatch(gte, lte))) {
- throw new IllegalArgumentException("Cannot mix date and numeric values in the same ranged query");
- }
-
- // If we made it here, then we're all good. Store the value.
- this.gte = gte;
-
- } else {
- throw new IllegalArgumentException("Cannot assign both 'gt' and 'gte' fields in the same ranged query");
- }
- }
-
- public Object getLt() {
- return lt;
- }
-
- public void setLt(Object lt) {
-
- // It does not make sense to assign a value to both the 'less than'
- // and 'less than or equal' operations, so make sure we are not
- // trying to do that.
- if (lte == null) {
-
- // Make sure that we are not trying to mix both numeric and date
- // type values in the same queries.
- if (((gt != null) && !typesMatch(lt, gt))
- || ((gte != null) && !typesMatch(lt, gte))) {
- throw new IllegalArgumentException("Cannot mix date and numeric values in the same ranged query");
- }
-
- // If we made it here, then we're all good. Store the value.
-
- this.lt = lt;
- } else {
- throw new IllegalArgumentException("Cannot assign both 'lt' and 'lte' fields in the same ranged query");
- }
- }
-
- public Object getLte() {
- return lte;
- }
-
- public void setLte(Object lte) {
-
- // It does not make sense to assign a value to both the 'greater than'
- // and 'greater than or equal' operations, so make sure we are not
- // trying to do that.
- if (lt == null) {
-
- // Make sure that we are not trying to mix both numeric and date
- // type values in the same queries.
- if (((gt != null) && !typesMatch(lte, gt))
- || ((gte != null) && !typesMatch(lte, gte))) {
- throw new IllegalArgumentException("Cannot mix date and numeric values in the same ranged query");
- }
-
- // If we made it here, then we're all good. Store the value.
-
- this.lte = lte;
- } else {
- throw new IllegalArgumentException("Cannot assign both 'lt' and 'lte' fields in the same ranged query");
- }
- }
-
- public String getFormat() {
- return format;
- }
-
- public void setFormat(String format) {
- this.format = format;
- }
-
- public String getTimeZone() {
- return timeZone;
- }
-
- public void setTimeZone(String timeZone) {
- this.timeZone = timeZone;
- }
-
- /**
- * This convenience method determines whether or not the supplied
- * value needs to be enclosed in '"' characters when generating
- * ElasticSearch compatible syntax.
- *
- * @param val - The value to check.
- * @return - A string representation of the value for inclusion
- * in an ElasticSearch syntax string.
- */
- private String formatStringOrNumericVal(Object val) {
-
- if (val instanceof String) {
- return "\"" + val.toString() + "\"";
- } else {
- return val.toString();
- }
- }
-
-
- /**
- * This convenience method verifies that the supplied objects are
- * of classes considered to be compatible for a ranged query.
- *
- * @param value1 - The first value to check.
- * @param value2 - The second value to check.
- * @return - True if the two objects are compatible for inclusion in the
- * same ranged query, False, otherwise.
- */
- boolean typesMatch(Object value1, Object value2) {
-
- return ((value1 instanceof String) && (value2 instanceof String))
- || (!(value1 instanceof String) && !(value2 instanceof String));
- }
-
-
- /**
- * This method returns a string which represents this query in syntax
- * that is understandable by ElasticSearch and is suitable for inclusion
- * in an ElasticSearch query string.
- *
- * @return - ElasticSearch syntax string.
- */
- public String toElasticSearch() {
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("{");
- sb.append("\"range\": {");
- sb.append("\"").append(field).append("\": {");
-
- // We may have one or zero of 'greater than' or 'greater
- // than or equal'
- boolean needComma = false;
- if (gte != null) {
- sb.append("\"gte\": ").append(formatStringOrNumericVal(gte));
- needComma = true;
- } else if (gt != null) {
- sb.append("\"gt\": ").append(formatStringOrNumericVal(gt));
- needComma = true;
- }
-
- // We may have one or zero of 'less than' or 'less
- // than or equal'
- if (lte != null) {
- if (needComma) {
- sb.append(", ");
- }
- sb.append("\"lte\": ").append(formatStringOrNumericVal(lte));
- } else if (lt != null) {
- if (needComma) {
- sb.append(", ");
- }
- sb.append("\"lt\": ").append(formatStringOrNumericVal(lt));
- }
-
- // Append the format specifier if one was provided.
- if (format != null) {
- sb.append(", \"format\": \"").append(format).append("\"");
- }
-
- // Append the time zone specifier if one was provided.
- if (timeZone != null) {
- sb.append(", \"time_zone\": \"").append(timeZone).append("\"");
- }
-
- sb.append("}");
- sb.append("}");
- sb.append("}");
-
- return sb.toString();
- }
-
- @Override
- public String toString() {
-
- String str = "{ field: " + field + ", ";
-
- if (gt != null) {
- str += "gt: " + gt;
- } else if (gte != null) {
- str += "gte: " + gte;
- }
-
- if (lt != null) {
- str += (((gt != null) || (gte != null)) ? ", " : "") + "lt: " + lt;
- } else if (lte != null) {
- str += (((gt != null) || (gte != null)) ? ", " : "") + "lte: " + lte;
- }
-
- str += "}";
-
- return str;
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatement.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatement.java
deleted file mode 100644
index 38ea80d..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatement.java
+++ /dev/null
@@ -1,323 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import org.radeox.util.logging.Logger;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-/**
- * This class represents the structure of a search statement.
- *
- * <p>The expected JSON structure to represent a search statement is as follows:
- *
- * <p><pre>
- * {
- * "results-start": int, - Optional: index of starting point in result set.
- * "results-size": int, - Optional: maximum number of documents to include in result set.
- *
- * "filter": {
- * { filter structure - see {@link Filter} }
- * },
- *
- * "queries": [
- * { query structure - see {@link QueryStatement} },
- * { query structure - see {@link QueryStatement} },
- * .
- * .
- * { query structure - see {@link QueryStatement} },
- * ],
- *
- * "aggregations": [
- * { aggregation structure - see {@link AggregationStatement} },
- * { aggregation structure - see {@link AggregationStatement} },
- * .
- * .
- * { aggregation structure - see {@link AggregationStatement} },
- * ]
- * }
- * </pre>
- */
-public class SearchStatement {
-
- /**
- * Defines the filters that should be applied before running the
- * actual queries. This is optional.
- */
- private Filter filter;
-
- /**
- * The list of queries to be applied to the document store.
- */
- private Query[] queries;
-
- /**
- * The list of aggregations to be applied to the search
- */
- private Aggregation[] aggregations;
-
- /**
- * Defines the sort criteria to apply to the query result set.
- * This is optional.
- */
- private Sort sort;
-
- @JsonProperty("results-start")
- private Integer resultsStart;
-
- @JsonProperty("results-size")
- private Integer size;
-
- public Filter getFilter() {
- return filter;
- }
-
- public void setFilter(Filter filter) {
- this.filter = filter;
- }
-
- public Query[] getQueries() {
- return queries;
- }
-
- public void setQueries(Query[] queries) {
- this.queries = queries;
- }
-
- public Sort getSort() {
- return sort;
- }
-
- public void setSort(Sort sort) {
- this.sort = sort;
- }
-
- public boolean isFiltered() {
- return filter != null;
- }
-
- public Aggregation[] getAggregations() {
- return aggregations;
- }
-
- public void setAggregations(Aggregation[] aggregations) {
- this.aggregations = aggregations;
- }
-
- public boolean hasAggregations() {
- return aggregations != null && aggregations.length > 0;
- }
-
- public Integer getFrom() {
- return resultsStart;
- }
-
- public void setFrom(Integer from) {
- this.resultsStart = from;
- }
-
- public Integer getSize() {
- return size;
- }
-
- public void setSize(Integer size) {
- this.size = size;
- }
-
- /**
- * This method returns a string which represents this statement in syntax
- * that is understandable by ElasticSearch and is suitable for inclusion
- * in an ElasticSearch query string.
- *
- * @return - ElasticSearch syntax string.
- */
- public String toElasticSearch() {
-
- StringBuilder sb = new StringBuilder();
- List<QueryStatement> notMatchQueries = new ArrayList<QueryStatement>();
- List<QueryStatement> mustQueries = new ArrayList<QueryStatement>();
- List<QueryStatement> shouldQueries = new ArrayList<QueryStatement>();
-
- createQueryLists(queries, mustQueries, shouldQueries, notMatchQueries);
-
- sb.append("{");
-
- sb.append("\"version\": true,");
-
- // If the client has specified an index into the results for the first
- // document in the result set then include that in the ElasticSearch
- // query.
- if (resultsStart != null) {
- sb.append("\"from\": ").append(resultsStart).append(", ");
- }
-
- // If the client has specified a maximum number of documents to be returned
- // in the result set then include that in the ElasticSearch query.
- if (size != null) {
- sb.append("\"size\": ").append(size).append(", ");
- }
-
- sb.append("\"query\": {");
- sb.append("\"bool\": {");
-
- sb.append("\"must\": [");
- AtomicBoolean firstQuery = new AtomicBoolean(true);
- for (QueryStatement query : mustQueries) {
-
- if (!firstQuery.compareAndSet(true, false)) {
- sb.append(", ");
- }
-
- sb.append(query.toElasticSearch());
- }
- sb.append("], ");
-
- sb.append("\"should\": [");
-
- firstQuery = new AtomicBoolean(true);
- for (QueryStatement query : shouldQueries) {
-
- if (!firstQuery.compareAndSet(true, false)) {
- sb.append(", ");
- }
-
- sb.append(query.toElasticSearch());
- }
-
- sb.append("],"); // close should list
-
- sb.append("\"must_not\": [");
- firstQuery.set(true);
- for (QueryStatement query : notMatchQueries) {
- sb.append(query.toElasticSearch());
- }
- sb.append("]");
-
- // Add the filter stanza, if one is required.
- if (isFiltered()) {
- sb.append(", \"filter\": ").append(filter.toElasticSearch());
- }
-
- sb.append("}"); // close bool clause
- sb.append("}"); // close query clause
-
- // Add the sort directive, if one is required.
- if (sort != null) {
- sb.append(", \"sort\": ").append(sort.toElasticSearch());
- }
-
- // Add aggregations
- if (hasAggregations()) {
- sb.append(", \"aggs\": {");
-
- for (int i = 0; i < aggregations.length; i++) {
- if (i > 0) {
- sb.append(",");
- }
- sb.append(aggregations[i].toElasticSearch());
- }
-
- sb.append("}");
- }
-
- sb.append("}");
-
- Logger.debug("Generated raw ElasticSearch query statement: " + sb.toString());
- return sb.toString();
- }
-
- private void createQueryLists(Query[] queries, List<QueryStatement> mustList,
- List<QueryStatement> mayList, List<QueryStatement> mustNotList) {
-
- for (Query query : queries) {
-
- if (query.isMust()) {
-
- if (query.getQueryStatement().isNotMatch()) {
- mustNotList.add(query.getQueryStatement());
- } else {
- mustList.add(query.getQueryStatement());
- }
- } else {
-
- if (query.getQueryStatement().isNotMatch()) {
- mustNotList.add(query.getQueryStatement());
- } else {
- mayList.add(query.getQueryStatement());
- }
- }
- }
- }
-
-
- @Override
- public String toString() {
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("SEARCH STATEMENT: {");
-
- if (size != null) {
- sb.append("from: ").append(resultsStart).append(", size: ").append(size).append(", ");
- }
-
- if (filter != null) {
- sb.append("filter: ").append(filter.toString()).append(", ");
- }
-
- sb.append("queries: [");
- AtomicBoolean firstQuery = new AtomicBoolean(true);
- if (queries != null) {
- for (Query query : queries) {
-
- if (!firstQuery.compareAndSet(true, false)) {
- sb.append(", ");
- }
- sb.append(query.toString());
- }
- }
- sb.append("]");
-
- sb.append("aggregations: [");
- firstQuery = new AtomicBoolean(true);
-
- if (aggregations != null) {
- for (Aggregation agg : aggregations) {
-
- if (!firstQuery.compareAndSet(true, false)) {
- sb.append(", ");
- }
- sb.append(agg.toString());
- }
- }
- sb.append("]");
-
- sb.append("]}");
-
- return sb.toString();
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Sort.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Sort.java
deleted file mode 100644
index 3e98621..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Sort.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-public class Sort {
-
- private String field;
- private SortDirection order = null;
-
- public enum SortDirection {
- ascending,
- descending
- }
-
- public String getField() {
- return field;
- }
-
- public void setField(String field) {
- this.field = field;
- }
-
- public SortDirection getOrder() {
- return order;
- }
-
- public void setOrder(SortDirection order) {
- this.order = order;
- }
-
- public String toElasticSearch() {
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("{ \"").append(field).append("\": { \"order\": ");
-
- // If a sort order wasn't explicitly supplied, default to 'ascending'.
- if (order != null) {
- switch (order) {
- case ascending:
- sb.append("\"asc\"}}");
- break;
- case descending:
- sb.append("\"desc\"}}");
- break;
- default:
- }
- } else {
- sb.append("\"asc\"}}");
- }
-
- return sb.toString();
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/TermQuery.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/TermQuery.java
deleted file mode 100644
index 109d88c..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/TermQuery.java
+++ /dev/null
@@ -1,347 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.searchapi;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import edu.emory.mathcs.backport.java.util.Arrays;
-
-import java.util.List;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-/**
- * This class represents a simple term query.
- *
- * <p>A term query takes an operator, a field to apply the query to and a value to match
- * against the query contents.
- *
- * <p>Valid operators include:
- * <ul>
- * <li> match - Field must contain the supplied value to produce a match. </li>
- * <li> not-match - Field must NOT contain the supplied value to produce a match. </li>
- * </ul>
- * The following examples illustrate the structure of a few variants of the
- * term query:
- *
- * <p><pre>
- * // Single Field Match Query:
- * {
- * "match": {"field": "searchTags", "value": "abcd"}
- * }
- *
- * // Single Field Not-Match query:
- * {
- * "not-match": {"field": "searchTags", "value": "efgh"}
- * }
- * </pre>
- *
- * <p><pre>
- * // Multi Field Match Query With A Single Value:
- * {
- * "match": {"field": "entityType searchTags", "value": "pserver"}
- * }
- *
- * // Multi Field Match Query With Multiple Values:
- * {
- * "match": {"field": "entityType searchTags", "value": "pserver tenant"}
- * }
- * </pre>
- */
-public class TermQuery {
-
- /**
- * The name of the field to apply the term query to.
- */
- private String field;
-
- /**
- * The value which the field must contain in order to have a match.
- */
- private Object value;
-
- /**
- * For multi field queries only. Determines the rules for whether or not a document matches
- * the query, as follows:
- *
- * <p>"and" - At least one occurrence of every supplied value must be present in any of the
- * supplied fields.
- *
- * <p>"or" - At least one occurrence of any of the supplied values must be present in any of
- * the supplied fields.
- */
- private String operator;
-
- @JsonProperty("analyzer")
- private String searchAnalyzer;
-
-
- public String getField() {
- return field;
- }
-
- public void setField(String field) {
- this.field = field;
- }
-
- public Object getValue() {
- return value;
- }
-
- public void setValue(Object value) {
- this.value = value;
- }
-
- private boolean isNumericValue() {
- return ((value instanceof Integer) || (value instanceof Double));
- }
-
- public String getOperator() {
- return operator;
- }
-
- public void setOperator(String operator) {
- this.operator = operator;
- }
-
- public String getSearchAnalyzer() {
- return searchAnalyzer;
- }
-
- public void setSearchAnalyzer(String searchAnalyzer) {
- this.searchAnalyzer = searchAnalyzer;
- }
-
- /**
- * This method returns a string which represents this query in syntax
- * that is understandable by ElasticSearch and is suitable for inclusion
- * in an ElasticSearch query string.
- *
- * @return - ElasticSearch syntax string.
- */
- public String toElasticSearch() {
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("{");
-
- // Are we generating a multi field query?
- if (isMultiFieldQuery()) {
-
- // For multi field queries, we have to be careful about how we handle
- // nested fields, so check to see if any of the specified fields are
- // nested.
- if (field.contains(".")) {
-
- // Build the equivalent of a multi match query across one or more nested fields.
- toElasticSearchNestedMultiMatchQuery(sb);
-
- } else {
-
- // Build a real multi match query, since we don't need to worry about nested fields.
- toElasticSearchMultiFieldQuery(sb);
- }
- } else {
-
- // Single field query.
-
- // Add the necessary wrapping if this is a query against a nested field.
- if (fieldIsNested(field)) {
- sb.append("{\"nested\": { \"path\": \"").append(pathForNestedField(field))
- .append("\", \"query\": ");
- }
-
- // Build the query.
- toElasticSearchSingleFieldQuery(sb);
-
- if (fieldIsNested(field)) {
- sb.append("}}");
- }
- }
-
- sb.append("}");
-
- return sb.toString();
- }
-
-
- /**
- * Determines whether or not the client has specified a term query with
- * multiple fields.
- *
- * @return - true if the query is referencing multiple fields, false, otherwise.
- */
- private boolean isMultiFieldQuery() {
-
- return (field.split(" ").length > 1);
- }
-
-
- /**
- * Constructs a single field term query in ElasticSearch syntax.
- *
- * @param sb - The string builder to assemble the query string with.
- * @return - The single term query.
- */
- private void toElasticSearchSingleFieldQuery(StringBuilder sb) {
-
- sb.append("\"term\": {\"").append(field).append("\" : ");
-
- // For numeric values, don't enclose the value in quotes.
- if (!isNumericValue()) {
- sb.append("\"").append(value).append("\"");
- } else {
- sb.append(value);
- }
-
- sb.append("}");
- }
-
-
- /**
- * Constructs a multi field query in ElasticSearch syntax.
- *
- * @param sb - The string builder to assemble the query string with.
- * @return - The multi field query.
- */
- private void toElasticSearchMultiFieldQuery(StringBuilder sb) {
-
- sb.append("\"multi_match\": {");
-
- sb.append("\"query\": \"").append(value).append("\", ");
- sb.append("\"type\": \"cross_fields\",");
- sb.append("\"fields\": [");
-
- List<String> fields = Arrays.asList(field.split(" "));
- AtomicBoolean firstField = new AtomicBoolean(true);
- for (String f : fields) {
- if (!firstField.compareAndSet(true, false)) {
- sb.append(", ");
- }
- sb.append("\"").append(f.trim()).append("\"");
- }
- sb.append("],");
-
- sb.append("\"operator\": \"").append((operator != null)
- ? operator.toLowerCase() : "and").append("\"");
-
- if (searchAnalyzer != null) {
- sb.append(", \"analyzer\": \"").append(searchAnalyzer).append("\"");
- }
-
- sb.append("}");
- }
-
-
- /**
- * Constructs the equivalent of an ElasticSearch multi match query across
- * multiple nested fields.
- *
- * <p>Since ElasticSearch doesn't really let you do that, we have to be clever
- * and construct an equivalent query using boolean operators to produce
- * the same result.
- *
- * @param sb - The string builder to use to build the query.
- */
- public void toElasticSearchNestedMultiMatchQuery(StringBuilder sb) {
-
- // Break out our whitespace delimited list of fields and values into a actual lists.
- List<String> fields = Arrays.asList(field.split(" "));
- List<String> values = Arrays.asList(((String) value).split(" ")); // GDF: revisit this cast.
-
- sb.append("\"bool\": {");
-
- if (operator != null) {
-
- if (operator.toLowerCase().equals("and")) {
- sb.append("\"must\": [");
- } else if (operator.toLowerCase().equals("or")) {
- sb.append("\"should\": [");
- }
-
- } else {
- sb.append("\"must\": [");
- }
-
- AtomicBoolean firstField = new AtomicBoolean(true);
- for (String f : fields) {
-
- if (!firstField.compareAndSet(true, false)) {
- sb.append(", ");
- }
-
- sb.append("{ ");
-
- // Is this a nested field?
- if (fieldIsNested(f)) {
-
- sb.append("\"nested\": {");
- sb.append("\"path\": \"").append(pathForNestedField(f)).append("\", ");
- sb.append("\"query\": ");
- }
-
- sb.append("{\"bool\": {");
- sb.append("\"should\": [");
-
- AtomicBoolean firstValue = new AtomicBoolean(true);
- for (String v : values) {
- if (!firstValue.compareAndSet(true, false)) {
- sb.append(", ");
- }
- sb.append("{\"match\": { \"");
- sb.append(f).append("\": {\"query\": \"").append(v).append("\"");
-
- if (searchAnalyzer != null) {
- sb.append(", \"analyzer\": \"").append(searchAnalyzer).append("\"");
- }
- sb.append("}}}");
- }
-
- sb.append("]");
- sb.append("}");
-
- if (fieldIsNested(f)) {
- sb.append("}");
- sb.append("}");
- }
-
- sb.append("}");
- }
-
- sb.append("]");
- sb.append("}");
- }
-
-
- @Override
- public String toString() {
- return "field: " + field + ", value: " + value + " (" + value.getClass().getName() + ")";
- }
-
- public boolean fieldIsNested(String field) {
- return field.contains(".");
- }
-
- public String pathForNestedField(String field) {
- int index = field.lastIndexOf('.');
- return field.substring(0, index);
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/service/SearchService.java b/src/main/java/org/openecomp/sa/searchdbabstraction/service/SearchService.java
deleted file mode 100644
index c31307c..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/service/SearchService.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.service;
-
-import org.openecomp.cl.api.Logger;
-import org.openecomp.cl.eelf.LoggerFactory;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.config.ElasticSearchConfig;
-import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.ElasticSearchHttpController;
-import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
-import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants;
-
-import java.io.FileInputStream;
-import java.util.Properties;
-
-
-public class SearchService {
- private ElasticSearchHttpController esController = null;
-
- static Logger logger = LoggerFactory.getInstance().getLogger(SearchService.class.getName());
-
- public SearchService() {
- try {
- start();
- } catch (Exception e) {
- logger.error(SearchDbMsgs.STARTUP_EXCEPTION, e.getLocalizedMessage());
- }
- }
-
- protected void start() throws Exception {
- Properties configProperties = new Properties();
- configProperties.load(new FileInputStream(SearchDbConstants.ES_CONFIG_FILE));
- ElasticSearchConfig esConfig = new ElasticSearchConfig(configProperties);
-
- esController = new ElasticSearchHttpController(esConfig);
- logger.info(SearchDbMsgs.SERVICE_STARTED);
- }
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/util/AggregationParsingUtil.java b/src/main/java/org/openecomp/sa/searchdbabstraction/util/AggregationParsingUtil.java
deleted file mode 100644
index 3f61e28..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/util/AggregationParsingUtil.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.util;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-import org.openecomp.sa.searchdbabstraction.entity.AggregationBucket;
-import org.openecomp.sa.searchdbabstraction.entity.AggregationResult;
-
-import java.util.Iterator;
-import java.util.Set;
-
-public class AggregationParsingUtil {
- public static AggregationResult[] parseAggregationResults(JSONObject aggregations)
- throws JsonProcessingException {
-
- // Obtain the set of aggregation names
- Set keySet = aggregations.keySet();
- AggregationResult[] aggResults = new AggregationResult[keySet.size()];
-
- int index = 0;
- for (Iterator it = keySet.iterator(); it.hasNext(); ) {
- String key = (String) it.next();
- AggregationResult aggResult = new AggregationResult();
- aggResult.setName(key);
-
- JSONObject bucketsOrNested = (JSONObject) aggregations.get(key);
- Object buckets = bucketsOrNested.get("buckets");
- if (buckets == null) {
- // we have a nested
- Number count = (Number) bucketsOrNested.remove("doc_count");
- aggResult.setCount(count);
- AggregationResult[] nestedResults = parseAggregationResults(bucketsOrNested);
- aggResult.setNestedAggregations(nestedResults);
- } else {
- AggregationBucket[] aggBuckets = parseAggregationBuckets((JSONArray) buckets);
- aggResult.setBuckets(aggBuckets);
- }
-
- aggResults[index] = aggResult;
- index++;
- }
-
- return aggResults;
-
- }
-
- private static AggregationBucket[] parseAggregationBuckets(JSONArray buckets)
- throws JsonProcessingException {
- AggregationBucket[] aggBuckets = new AggregationBucket[buckets.size()];
- for (int i = 0; i < buckets.size(); i++) {
- AggregationBucket aggBucket = new AggregationBucket();
- JSONObject bucketContent = (JSONObject) buckets.get(i);
- Object key = bucketContent.remove("key");
- aggBucket.setKey(key);
- Object formatted = bucketContent.remove("key_as_string");
- if (formatted != null) {
- aggBucket.setFormattedKey((String) formatted);
- }
- Object count = bucketContent.remove("doc_count");
- if (count != null) {
- aggBucket.setCount((Number) count);
- }
- bucketContent.remove("from");
- bucketContent.remove("from_as_string");
- bucketContent.remove("to");
- bucketContent.remove("to_as_string");
-
-
- if (!bucketContent.entrySet().isEmpty()) {
- // we have results from sub-aggregation
- AggregationResult[] subResult = parseAggregationResults(bucketContent);
- if (subResult != null) {
- aggBucket.setSubAggregationResult(subResult);
- }
- }
- aggBuckets[i] = aggBucket;
- }
-
- return aggBuckets;
- }
-
-}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/util/DocumentSchemaUtil.java b/src/main/java/org/openecomp/sa/searchdbabstraction/util/DocumentSchemaUtil.java
deleted file mode 100644
index 34d1837..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/util/DocumentSchemaUtil.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.util;
-
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.databind.JsonMappingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.openecomp.sa.rest.DocumentFieldSchema;
-import org.openecomp.sa.rest.DocumentSchema;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-public class DocumentSchemaUtil {
-
- public static String generateDocumentMappings(String documentSchema)
- throws JsonParseException, JsonMappingException, IOException {
-
- // Unmarshal the json content into a document schema object.
- ObjectMapper mapper = new ObjectMapper();
- DocumentSchema schema = mapper.readValue(documentSchema, DocumentSchema.class);
-
- return generateDocumentMappings(schema);
- }
-
- public static String generateDocumentMappings(DocumentSchema schema) {
-
- // Now, generate the Elastic Search mapping json and return it.
- StringBuilder sb = new StringBuilder();
- sb.append("{");
- sb.append("\"properties\": {");
-
- generateFieldMappings(schema.getFields(), sb);
-
- sb.append("}");
- sb.append("}");
-
- return sb.toString();
- }
-
-
- private static void generateFieldMappings(List<DocumentFieldSchema> fields, StringBuilder sb) {
-
- AtomicBoolean firstField = new AtomicBoolean(true);
-
- for (DocumentFieldSchema field : fields) {
-
- // If this isn't the first field in the list, prepend it with a ','
- if (!firstField.compareAndSet(true, false)) {
- sb.append(", ");
- }
-
- // Now, append the translated field contents.
- generateFieldMapping(field, sb);
- }
- }
-
- private static void generateFieldMapping(DocumentFieldSchema fieldSchema, StringBuilder sb) {
-
- sb.append("\"").append(fieldSchema.getName()).append("\": {");
-
- // The field type is mandatory.
- sb.append("\"type\": \"").append(fieldSchema.getDataType()).append("\"");
-
- // For date type fields we may optionally supply a format specifier.
- if (fieldSchema.getDataType().equals("date")) {
- if (fieldSchema.getFormat() != null) {
- sb.append(", \"format\": \"").append(fieldSchema.getFormat()).append("\"");
- }
- }
-
- // If the index field was specified, then append it.
- if (fieldSchema.getSearchable() != null) {
- sb.append(", \"index\": \"").append(fieldSchema.getSearchable()
- ? "analyzed" : "not_analyzed").append("\"");
- }
-
- // If a search analyzer was specified, then append it.
- if (fieldSchema.getSearchAnalyzer() != null) {
- sb.append(", \"search_analyzer\": \"").append(fieldSchema.getSearchAnalyzer()).append("\"");
- }
-
- // If an indexing analyzer was specified, then append it.
- if (fieldSchema.getIndexAnalyzer() != null) {
- sb.append(", \"analyzer\": \"").append(fieldSchema.getIndexAnalyzer()).append("\"");
- }
-
-
- if (fieldSchema.getDataType().equals("nested")) {
-
- sb.append(", \"properties\": {");
- generateFieldMappings(fieldSchema.getSubFields(), sb);
- sb.append("}");
- }
-
- sb.append("}");
- }
-
-}
-
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/util/SearchDbConstants.java b/src/main/java/org/openecomp/sa/searchdbabstraction/util/SearchDbConstants.java
deleted file mode 100644
index 457980e..0000000
--- a/src/main/java/org/openecomp/sa/searchdbabstraction/util/SearchDbConstants.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Copyright © 2017 Amdocs
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.openecomp.sa.searchdbabstraction.util;
-
-public class SearchDbConstants {
- public static final String SDB_FILESEP = (System.getProperty("file.separator") == null) ? "/"
- : System.getProperty("file.separator");
- public static final String SDB_BUNDLECONFIG_NAME =
- (System.getProperty("BUNDLECONFIG_DIR") == null)
- ? "bundleconfig" : System.getProperty("BUNDLECONFIG_DIR");
-
- public static final String SDB_HOME_BUNDLECONFIG = (System.getProperty("AJSC_HOME") == null)
- ? SDB_FILESEP + "opt" + SDB_FILESEP + "app" + SDB_FILESEP + "searchdb"
- + SDB_FILESEP + SDB_BUNDLECONFIG_NAME
- : System.getProperty("AJSC_HOME") + SDB_FILESEP + SDB_BUNDLECONFIG_NAME;
-
- public static final String SDB_HOME_ETC =
- SDB_HOME_BUNDLECONFIG + SDB_FILESEP + "etc" + SDB_FILESEP;
- public static final String SDB_CONFIG_APP_LOCATION = SDB_HOME_ETC + "appprops" + SDB_FILESEP;
-
- // Elastic Search related
- public static final String SDB_SPECIFIC_CONFIG = (System.getProperty("CONFIG_HOME") == null)
- ? SDB_CONFIG_APP_LOCATION : System.getProperty("CONFIG_HOME") + SDB_FILESEP;
- public static final String ES_CONFIG_FILE = SDB_SPECIFIC_CONFIG + SDB_FILESEP
- + "elastic-search.properties";
- public static final String SDB_AUTH = SDB_SPECIFIC_CONFIG + "auth" + SDB_FILESEP;
- public static final String SDB_AUTH_CONFIG_FILENAME = SDB_AUTH + "search_policy.json";
- public static final String SDB_FILTER_CONFIG_FILE = SDB_SPECIFIC_CONFIG + "filter-config.json";
- public static final String SDB_ANALYSIS_CONFIG_FILE =
- SDB_SPECIFIC_CONFIG + "analysis-config.json";
-
- // Logging related
- public static final String SDB_SERVICE_NAME = "SearchDataService";
-}