From c75a08a749718fc5ef25f8c2f826939be657c0bf Mon Sep 17 00:00:00 2001 From: Daniel Silverthorn Date: Thu, 4 May 2017 13:08:13 -0400 Subject: Initial search service commit Changing common logging dep Change-Id: I454697a9df0ee63f43d7b7d2a3818fe2d9b7bcf2 Signed-off-by: Daniel Silverthorn --- .../openecomp/sa/rest/AnalysisConfiguration.java | 243 ++++++++++ .../java/org/openecomp/sa/rest/AnalyzerApi.java | 160 +++++++ src/main/java/org/openecomp/sa/rest/ApiUtils.java | 180 ++++++++ src/main/java/org/openecomp/sa/rest/BulkApi.java | 257 +++++++++++ .../java/org/openecomp/sa/rest/BulkMetaData.java | 53 +++ .../java/org/openecomp/sa/rest/BulkOperation.java | 65 +++ .../java/org/openecomp/sa/rest/BulkRequest.java | 115 +++++ src/main/java/org/openecomp/sa/rest/Document.java | 65 +++ .../java/org/openecomp/sa/rest/DocumentApi.java | 505 +++++++++++++++++++++ src/main/java/org/openecomp/sa/rest/IndexApi.java | 378 +++++++++++++++ .../org/openecomp/sa/rest/SearchServiceApi.java | 259 +++++++++++ 11 files changed, 2280 insertions(+) create mode 100644 src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java create mode 100644 src/main/java/org/openecomp/sa/rest/AnalyzerApi.java create mode 100644 src/main/java/org/openecomp/sa/rest/ApiUtils.java create mode 100644 src/main/java/org/openecomp/sa/rest/BulkApi.java create mode 100644 src/main/java/org/openecomp/sa/rest/BulkMetaData.java create mode 100644 src/main/java/org/openecomp/sa/rest/BulkOperation.java create mode 100644 src/main/java/org/openecomp/sa/rest/BulkRequest.java create mode 100644 src/main/java/org/openecomp/sa/rest/Document.java create mode 100644 src/main/java/org/openecomp/sa/rest/DocumentApi.java create mode 100644 src/main/java/org/openecomp/sa/rest/IndexApi.java create mode 100644 src/main/java/org/openecomp/sa/rest/SearchServiceApi.java (limited to 'src/main/java/org/openecomp/sa/rest') diff --git a/src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java b/src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java new file mode 100644 index 0000000..6218d9c --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java @@ -0,0 +1,243 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sa.rest.AnalyzerSchema; +import org.openecomp.sa.rest.FilterSchema; +import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs; +import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants; + +import java.io.File; +import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * This class encapsulates the configuration of the predefined + * Analyzer and Filter behaviours that help to tell the document + * store how to index the documents that are provided to it. + */ +public class AnalysisConfiguration { + + /** + * Contains all of the predefined indexing filters. + */ + private FilterSchema[] customFilters; + + /** + * Contains all of the predefined indexing analyzers. + */ + private AnalyzerSchema[] customAnalysers; + + /** + * Indicates whether or not we have imported the filter and + * analyzer configurations. + */ + private AtomicBoolean configured = new AtomicBoolean(false); + + /** + * A json format string which is readable by Elastic Search and defines + * all of the custom filters and analyzers that we need Elastic Search + * to know about. + */ + private static String esSettings = null; + + private static Logger logger = LoggerFactory.getInstance() + .getLogger(AnalysisConfiguration.class.getName()); + + + /** + * Imports the filter and analyzer configuration files and + * builds an Elastic Search readable settings file from the + * contents. + * + * @param filterConfigFile - Location of filter configuration json file + * @param analyzerConfigFile - Location of analyzer configuration json file + */ + public void init(String filterConfigFile, String analyzerConfigFile) { + + if (configured.compareAndSet(false, true)) { + ObjectMapper mapper = new ObjectMapper(); + + File filtersConfig = new File(filterConfigFile); + try { + customFilters = mapper.readValue(filtersConfig, FilterSchema[].class); + } catch (IOException e) { + + // generate log + logger.warn(SearchDbMsgs.FILTERS_CONFIG_FAILURE, filterConfigFile, e.getMessage()); + } + + File analysersConfig = new File(analyzerConfigFile); + try { + customAnalysers = mapper.readValue(analysersConfig, AnalyzerSchema[].class); + } catch (IOException e) { + + // generate log + logger.warn(SearchDbMsgs.ANALYSYS_CONFIG_FAILURE, analyzerConfigFile, e.getMessage()); + } + + esSettings = buildEsIndexSettings(); + } + } + + + /** + * Returns the set of pre-configured filters. + * + * @return - An array of filters. + */ + public FilterSchema[] getFilters() { + return customFilters; + } + + + /** + * Returns the set of pre-configured analyzers. + * + * @return - An array of analyzers. + */ + public AnalyzerSchema[] getAnalyzers() { + init(SearchDbConstants.SDB_FILTER_CONFIG_FILE, SearchDbConstants.SDB_ANALYSIS_CONFIG_FILE); + return customAnalysers; + } + + + /** + * Imports the filter and analyzer configurations and translates those + * into a settings string that will be parseable by Elastic Search. + * + * @return - Elastic Search formatted settings string. + */ + public String getEsIndexSettings() { + + // Generate the es-settings string from our filter and analyzer + // configurations if we have not already done so. + init(SearchDbConstants.SDB_FILTER_CONFIG_FILE, SearchDbConstants.SDB_ANALYSIS_CONFIG_FILE); + + // Now, return the es-settings string. + return esSettings; + } + + + /** + * Constructs a settings string that is readable by Elastic Search based + * on the contents of the filter and analyzer configuration files. + * + * @return Elastic Search formatted settings string. + */ + public String buildEsIndexSettings() { + + StringBuilder sb = new StringBuilder(); + + sb.append("{"); + sb.append("\"analysis\": {"); + + // Define the custom filters. + boolean atLeastOneFilter = false; + sb.append("\"filter\": {"); + AtomicBoolean firstFilter = new AtomicBoolean(true); + for (FilterSchema filter : customFilters) { + + // Append a comma before the next entry, unless it is the + // first one. + if (!firstFilter.compareAndSet(true, false)) { + sb.append(", "); + } + + // Now, build the filter entry. + buildFilterEntry(filter, sb); + atLeastOneFilter = true; + } + sb.append((atLeastOneFilter) ? "}," : "}"); + + // Define the custom analyzers. + sb.append("\"analyzer\": {"); + AtomicBoolean firstAnalyzer = new AtomicBoolean(true); + for (AnalyzerSchema analyzer : customAnalysers) { + + // Prepend a comma before the entry, unless it is the + // first one. + if (!firstAnalyzer.compareAndSet(true, false)) { + sb.append(","); + } + + // Now, construct the entry for this analyzer. + buildAnalyzerEntry(analyzer, sb); + } + sb.append("}"); + + sb.append("}"); + sb.append("}"); + + return sb.toString(); + } + + + /** + * Constructs an ElasticSearch friendly custom filter definition. + * + * @param filter - The filter to generate ElasticSearch json for. + * @param sb - The string builder to append the filter definition + * to. + */ + private void buildFilterEntry(FilterSchema filter, StringBuilder sb) { + + sb.append("\"" + filter.getName()).append("\": {"); + + sb.append(filter.getConfiguration()); + + sb.append("}"); + } + + + /** + * Constructs an ElasticSearch friendly custom analyzer definition. + * + * @param analyzer - The analyzer to generate ElasticSearch json for. + * @param sb - The string builder to append the analyzer definition + * to. + */ + private void buildAnalyzerEntry(AnalyzerSchema analyzer, StringBuilder sb) { + + sb.append("\"").append(analyzer.getName()).append("\": {"); + sb.append("\"type\": \"custom\","); + sb.append("\"tokenizer\": ").append("\"").append(analyzer.getTokenizer()).append("\","); + sb.append("\"filter\": ["); + boolean firstFilter = true; + for (String filter : analyzer.getFilters()) { + if (!firstFilter) { + sb.append(","); + } else { + firstFilter = false; + } + sb.append("\"").append(filter).append("\""); + } + sb.append("]"); + sb.append("}"); + } +} diff --git a/src/main/java/org/openecomp/sa/rest/AnalyzerApi.java b/src/main/java/org/openecomp/sa/rest/AnalyzerApi.java new file mode 100644 index 0000000..430b7a6 --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/AnalyzerApi.java @@ -0,0 +1,160 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + +import org.openecomp.cl.api.LogFields; +import org.openecomp.cl.api.LogLine; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.ElasticSearchHttpController; +import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs; + +import java.util.concurrent.atomic.AtomicBoolean; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.Response; + +@Path("/analyzers") +public class AnalyzerApi { + + private SearchServiceApi searchService = null; + + // Set up the loggers. + private static Logger logger = LoggerFactory.getInstance().getLogger(IndexApi.class.getName()); + private static Logger auditLogger = LoggerFactory.getInstance() + .getAuditLogger(IndexApi.class.getName()); + + public AnalyzerApi(SearchServiceApi searchService) { + this.searchService = searchService; + } + + @GET + public Response processGet(@Context HttpServletRequest request, + @Context HttpHeaders headers, + ApiUtils apiUtils) { + + Response.Status responseCode = Response.Status.INTERNAL_SERVER_ERROR; + String responseString = "Undefined error"; + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + // Validate that the request is correctly authenticated before going + // any further. + try { + + if (!searchService.validateRequest(headers, request, + ApiUtils.Action.GET, ApiUtils.SEARCH_AUTH_POLICY_NAME)) { + logger.warn(SearchDbMsgs.GET_ANALYZERS_FAILURE, "Authentication failure."); + return Response.status(Response.Status.FORBIDDEN).entity("Authentication failure.").build(); + } + + } catch (Exception e) { + + logger.warn(SearchDbMsgs.GET_ANALYZERS_FAILURE, + "Unexpected authentication failure - cause: " + e.getMessage()); + return Response.status(Response.Status.FORBIDDEN).entity("Authentication failure.").build(); + } + + + // Now, build the list of analyzers. + try { + responseString = buildAnalyzerList(ElasticSearchHttpController.getInstance() + .getAnalysisConfig()); + responseCode = Response.Status.OK; + + } catch (Exception e) { + + logger.warn(SearchDbMsgs.GET_ANALYZERS_FAILURE, + "Unexpected failure retrieving analysis configuration - cause: " + e.getMessage()); + responseString = "Failed to retrieve analysis configuration. Cause: " + e.getMessage(); + } + + // Build the HTTP response. + Response response = Response.status(responseCode).entity(responseString).build(); + + // Generate our audit log. + auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST, + new LogFields() + .setField(LogLine.DefinedFields.RESPONSE_CODE, responseCode.getStatusCode()) + .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, responseCode.getStatusCode()), + (request != null) ? request.getMethod() : "Unknown", + (request != null) ? request.getRequestURL().toString() : "Unknown", + (request != null) ? request.getRemoteHost() : "Unknown", + Integer.toString(response.getStatus())); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + return response; + } + + + /** + * This method takes a list of analyzer objects and generates a simple json + * structure to enumerate them. + * + *

Note, this includes only the aspects of the analyzer object that we want + * to make public to an external client. + * + * @param analysisConfig - The analysis configuration object to extract the + * analyzers from. + * @return - A json string enumerating the defined analyzers. + */ + private String buildAnalyzerList(AnalysisConfiguration analysisConfig) { + + StringBuilder sb = new StringBuilder(); + + sb.append("{"); + AtomicBoolean firstAnalyzer = new AtomicBoolean(true); + for (AnalyzerSchema analyzer : analysisConfig.getAnalyzers()) { + + if (!firstAnalyzer.compareAndSet(true, false)) { + sb.append(", "); + } + + sb.append("{"); + sb.append("\"name\": \"").append(analyzer.getName()).append("\", "); + sb.append("\"description\": \"").append(analyzer.getDescription()).append("\", "); + sb.append("\"behaviours\": ["); + AtomicBoolean firstBehaviour = new AtomicBoolean(true); + for (String behaviour : analyzer.getBehaviours()) { + if (!firstBehaviour.compareAndSet(true, false)) { + sb.append(", "); + } + sb.append("\"").append(behaviour).append("\""); + } + sb.append("]"); + sb.append("}"); + } + sb.append("}"); + + return sb.toString(); + } +} diff --git a/src/main/java/org/openecomp/sa/rest/ApiUtils.java b/src/main/java/org/openecomp/sa/rest/ApiUtils.java new file mode 100644 index 0000000..4c043d4 --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/ApiUtils.java @@ -0,0 +1,180 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + +import org.openecomp.cl.mdc.MdcContext; +import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants; +import org.slf4j.MDC; + +import java.util.UUID; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.Response; + + +public class ApiUtils { + + public static final String SEARCH_AUTH_POLICY_NAME = "search"; + public static final String URL_PREFIX = "services/search-data-service/v1/search"; + + public enum Action { + POST, GET, PUT, DELETE + } + + ; + + + /** + * This method uses the contents of the supplied HTTP headers and request + * structures to populate the MDC Context used for logging purposes. + * + * @param httpReq - HTTP request structure. + * @param headers - HTTP headers + */ + protected static void initMdcContext(HttpServletRequest httpReq, HttpHeaders headers) { + + // Auto generate a transaction if we were not provided one. + String transId = null; + if (headers != null) { + transId = headers.getRequestHeaders().getFirst("X-TransactionId"); + + if ((transId == null) || (transId.equals(""))) { + transId = UUID.randomUUID().toString(); + } + } + + String fromIp = (httpReq != null) ? httpReq.getRemoteAddr() : ""; + String fromApp = (headers != null) ? headers.getRequestHeaders().getFirst("X-FromAppId") : ""; + + MdcContext.initialize(transId, SearchDbConstants.SDB_SERVICE_NAME, "", fromApp, fromIp); + } + + + protected static void clearMdcContext() { + MDC.clear(); + } + + public static String buildIndexUri(String index) { + + return (URL_PREFIX + "/indexes/") + index; + } + + public static String buildDocumentUri(String index, String documentId) { + + return buildIndexUri(index) + "/documents/" + documentId; + } + + public static boolean validateIndexUri(String uri) { + + // If the URI starts with a leading '/' character, remove it. + uri = uri.startsWith("/") ? uri.substring(1) : uri; + + // Now, tokenize the URI string. + String[] tokens = uri.split("/"); + + return (tokens.length == 6) && (tokens[4].equals("indexes")); + + } + + public static boolean validateDocumentUri(String uri, boolean requireId) { + + // If the URI starts with a leading '/' character, remove it. + uri = uri.startsWith("/") ? uri.substring(1) : uri; + + // Now, tokenize the URI string. + String[] tokens = uri.split("/"); + + if (requireId) { + return (tokens.length == 8) && (tokens[4].equals("indexes") + && (tokens[6].equals("documents"))); + } else { + return ((tokens.length == 8) || (tokens.length == 7)) + && (tokens[4].equals("indexes") && (tokens[6].equals("documents"))); + } + } + + public static String extractIndexFromUri(String uri) { + + // If the URI starts with a leading '/' character, remove it. + uri = uri.startsWith("/") ? uri.substring(1) : uri; + + // Now, tokenize the URI string. + String[] tokens = uri.split("/"); + + int i = 0; + for (String token : tokens) { + if (token.equals("indexes")) { + if (i + 1 < tokens.length) { + return tokens[i + 1]; + } + } + i++; + } + + return null; + } + + public static String extractIdFromUri(String uri) { + + // If the URI starts with a leading '/' character, remove it. + uri = uri.startsWith("/") ? uri.substring(1) : uri; + + // Now, tokenize the URI string. + String[] tokens = uri.split("/"); + + int i = 0; + for (String token : tokens) { + if (token.equals("documents")) { + if (i + 1 < tokens.length) { + return tokens[i + 1]; + } + } + i++; + } + + return null; + } + + public static String getHttpStatusString(int httpStatusCode) { + // Some of the status codes we use are still in draft state in the standards, and are not + // recognized in the javax library. We need to manually translate these to human-readable + // strings. + String statusString = "Unknown"; + Response.Status status = Response.Status.fromStatusCode(httpStatusCode); + + if (status == null) { + switch (httpStatusCode) { + case 207: + statusString = "Multi Status"; + break; + default: + } + } else { + statusString = status.toString(); + } + + return statusString; + } +} diff --git a/src/main/java/org/openecomp/sa/rest/BulkApi.java b/src/main/java/org/openecomp/sa/rest/BulkApi.java new file mode 100644 index 0000000..f74a3c2 --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/BulkApi.java @@ -0,0 +1,257 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.fge.jsonschema.main.JsonSchema; +import com.github.fge.jsonschema.main.JsonSchemaFactory; +import org.openecomp.cl.api.LogFields; +import org.openecomp.cl.api.LogLine; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface; +import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException; +import org.openecomp.sa.searchdbabstraction.entity.OperationResult; +import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs; + +import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.Path; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.Response; + + +/** + * This class encapsulates the REST end points associated with performing + * bulk operations against the document store. + */ +@Path("/bulk") +public class BulkApi { + + /** + * Indicates whether or not we have performed the one-time static + * initialization required for performing schema validation. + */ + protected static AtomicBoolean validationInitialized = new AtomicBoolean(false); + + /** + * Factory used for importing our payload schema for validation purposes. + */ + protected static JsonSchemaFactory schemaFactory = null; + + /** + * Imported payload schema that will be used by our validation methods. + */ + protected static JsonSchema schema = null; + + protected SearchServiceApi searchService = null; + + // Instantiate the loggers. + private static Logger logger = LoggerFactory.getInstance().getLogger(BulkApi.class.getName()); + private static Logger auditLogger = LoggerFactory.getInstance() + .getAuditLogger(BulkApi.class.getName()); + + + /** + * Create a new instance of the BulkApi end point. + */ + public BulkApi(SearchServiceApi searchService) { + this.searchService = searchService; + } + + + /** + * Processes client requests containing a set of operations to be + * performed in bulk. + * + *

Method: POST + * + * @param operations - JSON structure enumerating the operations to be + * performed. + * @param request - Raw HTTP request. + * @param headers - HTTP headers. + * @return - A standard REST response structure. + */ + public Response processPost(String operations, + HttpServletRequest request, + HttpHeaders headers, + DocumentStoreInterface documentStore, + ApiUtils apiUtils) { + + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + // Set a default result code and entity string for the request. + int resultCode = 500; + String resultString = "Unexpected error"; + + if (logger.isDebugEnabled()) { + logger.debug("SEARCH: Process Bulk Request - operations = [" + + operations.replaceAll("\n", "") + " ]"); + } + + try { + + // Validate that the request is correctly authenticated before going + // any further. + if (!searchService.validateRequest(headers, request, + ApiUtils.Action.POST, ApiUtils.SEARCH_AUTH_POLICY_NAME)) { + logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, "Authentication failure."); + + return buildResponse(Response.Status.FORBIDDEN.getStatusCode(), + "Authentication failure.", request, apiUtils); + } + + } catch (Exception e) { + + // This is a catch all for any unexpected failure trying to perform + // the authentication. + logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, + "Unexpected authentication failure - cause: " + e.getMessage()); + if (logger.isDebugEnabled()) { + logger.debug("Stack Trace:\n" + e.getStackTrace()); + } + + return buildResponse(Response.Status.FORBIDDEN.getStatusCode(), + "Authentication failure - cause " + e.getMessage(), + request, + apiUtils); + } + + // We expect a payload containing a JSON structure enumerating the + // operations to be performed. + if (operations == null) { + logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, "Missing operations list payload"); + + return buildResponse(resultCode, "Missing payload", request, apiUtils); + } + + + // Marshal the supplied json string into a Java object. + ObjectMapper mapper = new ObjectMapper(); + BulkRequest[] requests = null; + try { + requests = mapper.readValue(operations, BulkRequest[].class); + + } catch (IOException e) { + + logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, + "Failed to marshal operations list: " + e.getMessage()); + if (logger.isDebugEnabled()) { + logger.debug("Stack Trace:\n" + e.getStackTrace()); + } + + // Populate the result code and entity string for our HTTP response + // and return the response to the client.. + return buildResponse(javax.ws.rs.core.Response.Status.BAD_REQUEST.getStatusCode(), + "Unable to marshal operations: " + e.getMessage(), + request, + apiUtils); + } + + // Verify that our parsed operations list actually contains some valid + // operations. + if (requests.length == 0) { + logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, "Empty operations list in bulk request"); + + + // Populate the result code and entity string for our HTTP response + // and return the response to the client.. + return buildResponse(javax.ws.rs.core.Response.Status.BAD_REQUEST.getStatusCode(), + "Empty operations list in bulk request", + request, + apiUtils); + } + try { + + // Now, forward the set of bulk operations to the DAO for processing. + OperationResult result = documentStore.performBulkOperations(requests); + + // Populate the result code and entity string for our HTTP response. + resultCode = result.getResultCode(); + resultString = (result.getFailureCause() == null) + ? result.getResult() : result.getFailureCause(); + + } catch (DocumentStoreOperationException e) { + + logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, + "Unexpected failure communicating with document store: " + e.getMessage()); + if (logger.isDebugEnabled()) { + logger.debug("Stack Trace:\n" + e.getStackTrace()); + } + + // Populate the result code and entity string for our HTTP response. + resultCode = javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(); + resultString = "Unexpected failure processing bulk operations: " + e.getMessage(); + } + + // Build our HTTP response. + Response response = Response.status(resultCode).entity(resultString).build(); + + // Log the result. + if ((response.getStatus() >= 200) && (response.getStatus() < 300)) { + logger.info(SearchDbMsgs.PROCESSED_BULK_OPERATIONS); + } else { + logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, (String) response.getEntity()); + } + + // Finally, return the HTTP response to the client. + return buildResponse(resultCode, resultString, request, apiUtils); + } + + + /** + * This method generates an audit log and returns an HTTP response object. + * + * @param resultCode - The result code to report. + * @param resultString - The result string to report. + * @param request - The HTTP request to extract data from for the audit log. + * @return - An HTTP response object. + */ + private Response buildResponse(int resultCode, String resultString, + HttpServletRequest request, ApiUtils apiUtils) { + + Response response = Response.status(resultCode).entity(resultString).build(); + + // Generate our audit log. + auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST, + new LogFields() + .setField(LogLine.DefinedFields.RESPONSE_CODE, resultCode) + .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, + ApiUtils.getHttpStatusString(resultCode)), + (request != null) ? request.getMethod() : "Unknown", + (request != null) ? request.getRequestURL().toString() : "Unknown", + (request != null) ? request.getRemoteHost() : "Unknown", + Integer.toString(response.getStatus())); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + return response; + } +} diff --git a/src/main/java/org/openecomp/sa/rest/BulkMetaData.java b/src/main/java/org/openecomp/sa/rest/BulkMetaData.java new file mode 100644 index 0000000..8a1a6d6 --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/BulkMetaData.java @@ -0,0 +1,53 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + +public class BulkMetaData { + + private String url; + private String etag; + + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getEtag() { + return etag; + } + + public void setEtag(String anEtag) { + this.etag = anEtag; + } + + @Override + public String toString() { + return "MetaData: [url=" + url + ", etag=" + etag + "]"; + } +} diff --git a/src/main/java/org/openecomp/sa/rest/BulkOperation.java b/src/main/java/org/openecomp/sa/rest/BulkOperation.java new file mode 100644 index 0000000..cf71ed5 --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/BulkOperation.java @@ -0,0 +1,65 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + +/** + * This class represents a single instance of an operation to be performed + * as part of a bulk request. + */ +public class BulkOperation { + + /** + * Contains meta data to be passed to the document store for it + * to perform the operation. + */ + private BulkMetaData metaData; + + /** + * Contains the contents of the document to be acted on. + */ + private Document document; + + + public void setMetaData(BulkMetaData metaData) { + this.metaData = metaData; + } + + public BulkMetaData getMetaData() { + return metaData; + } + + public Document getDocument() { + return document; + } + + public void setDocument(Document document) { + this.document = document; + } + + @Override + public String toString() { + return "Operation: [" + metaData.toString() + ", " + document + "]"; + } +} diff --git a/src/main/java/org/openecomp/sa/rest/BulkRequest.java b/src/main/java/org/openecomp/sa/rest/BulkRequest.java new file mode 100644 index 0000000..261f238 --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/BulkRequest.java @@ -0,0 +1,115 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + + +/** + * This class represents a single instance of a request from the search client + * that would be part of a bundle of such operations sent in a single bulk + * request. + */ +public class BulkRequest { + + public enum OperationType { + CREATE, + UPDATE, + DELETE + } + + private BulkOperation create; + private BulkOperation update; + private BulkOperation delete; + + public BulkOperation getCreate() { + return create; + } + + public void setCreate(BulkOperation create) { + this.create = create; + } + + public BulkOperation getUpdate() { + return update; + } + + public void setUpdate(BulkOperation update) { + this.update = update; + } + + public BulkOperation getDelete() { + return delete; + } + + public void setDelete(BulkOperation delete) { + this.delete = delete; + } + + public OperationType getOperationType() { + + if (create != null) { + return OperationType.CREATE; + } else if (update != null) { + return OperationType.UPDATE; + } else if (delete != null) { + return OperationType.DELETE; + } else { + return null; + } + } + + public BulkOperation getOperation() { + if (create != null) { + return create; + } else if (update != null) { + return update; + } else if (delete != null) { + return delete; + } else { + return null; + } + } + + public String getIndex() { + return ApiUtils.extractIndexFromUri(getOperation().getMetaData().getUrl()); + } + + public String getId() { + return ApiUtils.extractIdFromUri(getOperation().getMetaData().getUrl()); + } + + @Override + public String toString() { + + if (create != null) { + return "create: [" + create.toString() + "]\n"; + } else if (update != null) { + return "update: [" + update.toString() + "]\n"; + } else if (delete != null) { + return "delete: [" + delete.toString() + "]\n"; + } else { + return "UNDEFINED"; + } + } +} diff --git a/src/main/java/org/openecomp/sa/rest/Document.java b/src/main/java/org/openecomp/sa/rest/Document.java new file mode 100644 index 0000000..a0be736 --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/Document.java @@ -0,0 +1,65 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.util.HashMap; +import java.util.Map; + +public class Document { + private Map fields = new HashMap(); + + @JsonAnyGetter + public Map getFields() { + return fields; + } + + @JsonAnySetter + public void setField(String name, Object value) { + fields.put(name, value); + } + + public String toJson() throws JsonProcessingException { + + ObjectMapper mapper = new ObjectMapper(); + return mapper.writeValueAsString(this); + } + + + @Override + public String toString() { + String str = "Document: ["; + for (String key : fields.keySet()) { + str += key + ": " + fields.get(key); + } + str += "]"; + + return str; + } +} diff --git a/src/main/java/org/openecomp/sa/rest/DocumentApi.java b/src/main/java/org/openecomp/sa/rest/DocumentApi.java new file mode 100644 index 0000000..e3c15a5 --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/DocumentApi.java @@ -0,0 +1,505 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.openecomp.cl.api.LogFields; +import org.openecomp.cl.api.LogLine; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreDataEntityImpl; +import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface; +import org.openecomp.sa.searchdbabstraction.entity.AggregationResults; +import org.openecomp.sa.searchdbabstraction.entity.DocumentOperationResult; +import org.openecomp.sa.searchdbabstraction.entity.SearchOperationResult; +import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs; +import org.openecomp.sa.searchdbabstraction.searchapi.SearchStatement; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.Status; + +public class DocumentApi { + private static final String REQUEST_HEADER_RESOURCE_VERSION = "If-Match"; + private static final String RESPONSE_HEADER_RESOURCE_VERSION = "ETag"; + + protected SearchServiceApi searchService = null; + + private Logger logger = LoggerFactory.getInstance().getLogger(DocumentApi.class.getName()); + private Logger auditLogger = LoggerFactory.getInstance() + .getAuditLogger(DocumentApi.class.getName()); + + public DocumentApi(SearchServiceApi searchService) { + this.searchService = searchService; + } + + public Response processPost(String content, HttpServletRequest request, HttpHeaders headers, + HttpServletResponse httpResponse, String index, + DocumentStoreInterface documentStore) { + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + try { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + if (content == null) { + return handleError(request, content, Status.BAD_REQUEST); + } + + boolean isValid; + try { + isValid = searchService.validateRequest(headers, request, ApiUtils.Action.POST, + ApiUtils.SEARCH_AUTH_POLICY_NAME); + } catch (Exception e) { + logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, + "DocumentApi.processPost", + e.getMessage()); + return handleError(request, content, Status.FORBIDDEN); + } + + if (!isValid) { + return handleError(request, content, Status.FORBIDDEN); + } + + DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl(); + document.setContent(content); + + DocumentOperationResult result = documentStore.createDocument(index, document); + String output = null; + if (result.getResultCode() >= 200 && result.getResultCode() <= 299) { + output = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getDocument()); + } else { + output = result.getError() != null + ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError()) + : result.getFailureCause(); + } + + if (httpResponse != null) { + httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion()); + } + Response response = Response.status(result.getResultCode()).entity(output).build(); + logResult(request, Response.Status.fromStatusCode(response.getStatus())); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + return response; + } catch (Exception e) { + return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR); + } + } + + public Response processPut(String content, HttpServletRequest request, HttpHeaders headers, + HttpServletResponse httpResponse, String index, + String id, DocumentStoreInterface documentStore) { + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + try { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + if (content == null) { + return handleError(request, content, Status.BAD_REQUEST); + } + + boolean isValid; + try { + isValid = searchService.validateRequest(headers, request, ApiUtils.Action.PUT, + ApiUtils.SEARCH_AUTH_POLICY_NAME); + } catch (Exception e) { + logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, + "DocumentApi.processPut", + e.getMessage()); + return handleError(request, content, Status.FORBIDDEN); + } + + if (!isValid) { + return handleError(request, content, Status.FORBIDDEN); + } + + String resourceVersion = headers.getRequestHeaders() + .getFirst(REQUEST_HEADER_RESOURCE_VERSION); + + DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl(); + document.setId(id); + document.setContent(content); + document.setVersion(resourceVersion); + + DocumentOperationResult result = null; + if (resourceVersion == null) { + result = documentStore.createDocument(index, document); + } else { + result = documentStore.updateDocument(index, document); + } + + String output = null; + if (result.getResultCode() >= 200 && result.getResultCode() <= 299) { + output = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getDocument()); + } else { + output = result.getError() != null + ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError()) + : result.getFailureCause(); + } + if (httpResponse != null) { + httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion()); + } + Response response = Response.status(result.getResultCode()).entity(output).build(); + logResult(request, Response.Status.fromStatusCode(response.getStatus())); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + return response; + } catch (Exception e) { + return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR); + } + } + + public Response processDelete(String content, HttpServletRequest request, HttpHeaders headers, + HttpServletResponse httpResponse, String index, String id, + DocumentStoreInterface documentStore) { + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + try { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + boolean isValid; + try { + isValid = searchService.validateRequest(headers, request, ApiUtils.Action.DELETE, + ApiUtils.SEARCH_AUTH_POLICY_NAME); + } catch (Exception e) { + logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, + "DocumentApi.processDelete", + e.getMessage()); + return handleError(request, content, Status.FORBIDDEN); + } + + if (!isValid) { + return handleError(request, content, Status.FORBIDDEN); + } + + String resourceVersion = headers.getRequestHeaders() + .getFirst(REQUEST_HEADER_RESOURCE_VERSION); + if (resourceVersion == null || resourceVersion.isEmpty()) { + return handleError(request, "Request header 'If-Match' missing", + javax.ws.rs.core.Response.Status.BAD_REQUEST); + } + + DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl(); + document.setId(id); + document.setVersion(resourceVersion); + + DocumentOperationResult result = documentStore.deleteDocument(index, document); + String output = null; + if (!(result.getResultCode() >= 200 && result.getResultCode() <= 299)) { // + output = result.getError() != null + ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError()) + : result.getFailureCause(); + } + + if (httpResponse != null) { + httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion()); + } + Response response; + if (output == null) { + response = Response.status(result.getResultCode()).build(); + } else { + response = Response.status(result.getResultCode()).entity(output).build(); + } + + logResult(request, Response.Status.fromStatusCode(response.getStatus())); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + return response; + } catch (Exception e) { + return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR); + } + } + + public Response processGet(String content, HttpServletRequest request, HttpHeaders headers, + HttpServletResponse httpResponse, String index, String id, + DocumentStoreInterface documentStore) { + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + try { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + boolean isValid; + try { + isValid = searchService.validateRequest(headers, request, ApiUtils.Action.GET, + ApiUtils.SEARCH_AUTH_POLICY_NAME); + } catch (Exception e) { + logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, + "DocumentApi.processGet", + e.getMessage()); + return handleError(request, content, Status.FORBIDDEN); + } + + if (!isValid) { + return handleError(request, content, Status.FORBIDDEN); + } + + String resourceVersion = headers.getRequestHeaders() + .getFirst(REQUEST_HEADER_RESOURCE_VERSION); + + DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl(); + document.setId(id); + document.setVersion(resourceVersion); + + DocumentOperationResult result = documentStore.getDocument(index, document); + String output = null; + if (result.getResultCode() >= 200 && result.getResultCode() <= 299) { + output = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getDocument()); + } else { + output = result.getError() != null + ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError()) + : result.getFailureCause(); + } + if (httpResponse != null) { + httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion()); + } + Response response = Response.status(result.getResultCode()).entity(output).build(); + logResult(request, Response.Status.fromStatusCode(response.getStatus())); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + return response; + } catch (Exception e) { + return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR); + } + } + + public Response processSearchWithGet(String content, HttpServletRequest request, + HttpHeaders headers, String index, + String queryText, DocumentStoreInterface documentStore) { + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + try { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + + boolean isValid; + try { + isValid = searchService.validateRequest(headers, request, ApiUtils.Action.GET, + ApiUtils.SEARCH_AUTH_POLICY_NAME); + } catch (Exception e) { + logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, + "processSearchWithGet", + e.getMessage()); + return handleError(request, content, Status.FORBIDDEN); + } + + if (!isValid) { + return handleError(request, content, Status.FORBIDDEN); + } + + SearchOperationResult result = documentStore.search(index, queryText); + String output = null; + if (result.getResultCode() >= 200 && result.getResultCode() <= 299) { + output = mapper.writerWithDefaultPrettyPrinter() + .writeValueAsString(result.getSearchResult()); + } else { + output = result.getError() != null + ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError()) + : result.getFailureCause(); + } + Response response = Response.status(result.getResultCode()).entity(output).build(); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + return response; + } catch (Exception e) { + return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR); + } + } + + public Response queryWithGetWithPayload(String content, HttpServletRequest request, + HttpHeaders headers, String index, + DocumentStoreInterface documentStore) { + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + logger.info(SearchDbMsgs.PROCESS_PAYLOAD_QUERY, "GET", (request != null) + ? request.getRequestURL().toString() : ""); + if (logger.isDebugEnabled()) { + logger.debug("Request Body: " + content); + } + return processQuery(index, content, request, headers, documentStore); + } + + public Response processSearchWithPost(String content, HttpServletRequest request, + HttpHeaders headers, String index, + DocumentStoreInterface documentStore) { + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + logger.info(SearchDbMsgs.PROCESS_PAYLOAD_QUERY, "POST", (request != null) + ? request.getRequestURL().toString() : ""); + if (logger.isDebugEnabled()) { + logger.debug("Request Body: " + content); + } + + return processQuery(index, content, request, headers, documentStore); + } + + /** + * Common handler for query requests. This is called by both the GET with + * payload and POST with payload variants of the query endpoint. + * + * @param index - The index to be queried against. + * @param content - The payload containing the query structure. + * @param request - The HTTP request. + * @param headers - The HTTP headers. + * @return - A standard HTTP response. + */ + private Response processQuery(String index, String content, HttpServletRequest request, + HttpHeaders headers, DocumentStoreInterface documentStore) { + + try { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + + // Make sure that we were supplied a payload before proceeding. + if (content == null) { + return handleError(request, content, Status.BAD_REQUEST); + } + + // Validate that the request has the appropriate authorization. + boolean isValid; + try { + isValid = searchService.validateRequest(headers, request, ApiUtils.Action.POST, + ApiUtils.SEARCH_AUTH_POLICY_NAME); + + } catch (Exception e) { + logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, + "processQuery", + e.getMessage()); + return handleError(request, content, Status.FORBIDDEN); + } + + if (!isValid) { + return handleError(request, content, Status.FORBIDDEN); + } + + SearchStatement searchStatement; + + try { + // Marshall the supplied request payload into a search statement + // object. + searchStatement = mapper.readValue(content, SearchStatement.class); + + } catch (Exception e) { + return handleError(request, e.getMessage(), Status.BAD_REQUEST); + } + + // Now, submit the search statement, translated into + // ElasticSearch syntax, to the document store DAO. + SearchOperationResult result = documentStore.searchWithPayload(index, + searchStatement.toElasticSearch()); + String output = null; + if (result.getResultCode() >= 200 && result.getResultCode() <= 299) { + output = prepareOutput(mapper, result); + } else { + output = result.getError() != null + ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError()) + : result.getFailureCause(); + } + Response response = Response.status(result.getResultCode()).entity(output).build(); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + return response; + + } catch (Exception e) { + return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR); + } + } + + private String prepareOutput(ObjectMapper mapper, SearchOperationResult result) + throws JsonProcessingException { + StringBuffer output = new StringBuffer(); + output.append("{\r\n\"searchResult\":"); + output.append(mapper.writerWithDefaultPrettyPrinter() + .writeValueAsString(result.getSearchResult())); + AggregationResults aggs = result.getAggregationResult(); + if (aggs != null) { + output.append(",\r\n\"aggregationResult\":"); + output.append(mapper.setSerializationInclusion(Include.NON_NULL) + .writerWithDefaultPrettyPrinter().writeValueAsString(aggs)); + } + output.append("\r\n}"); + return output.toString(); + } + + private Response handleError(HttpServletRequest request, String message, Status status) { + logResult(request, status); + return Response.status(status).entity(message).type(MediaType.APPLICATION_JSON).build(); + } + + void logResult(HttpServletRequest request, Response.Status status) { + + logger.info(SearchDbMsgs.PROCESS_REST_REQUEST, (request != null) ? request.getMethod() : "", + (request != null) ? request.getRequestURL().toString() : "", + (request != null) ? request.getRemoteHost() : "", Integer.toString(status.getStatusCode())); + + auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST, + new LogFields() + .setField(LogLine.DefinedFields.RESPONSE_CODE, status.getStatusCode()) + .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, status.getReasonPhrase()), + (request != null) ? request.getMethod() : "", + (request != null) ? request.getRequestURL().toString() : "", + (request != null) ? request.getRemoteHost() : "", Integer.toString(status.getStatusCode())); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + } +} diff --git a/src/main/java/org/openecomp/sa/rest/IndexApi.java b/src/main/java/org/openecomp/sa/rest/IndexApi.java new file mode 100644 index 0000000..2af2f72 --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/IndexApi.java @@ -0,0 +1,378 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.openecomp.cl.api.LogFields; +import org.openecomp.cl.api.LogLine; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface; +import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException; +import org.openecomp.sa.searchdbabstraction.entity.OperationResult; +import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs; + +import java.io.FileNotFoundException; +import java.io.IOException; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.Response; + + +/** + * This class encapsulates the REST end points associated with manipulating + * indexes in the document store. + */ +public class IndexApi { + + protected SearchServiceApi searchService = null; + + /** + * Configuration for the custom analyzers that will be used for indexing. + */ + protected AnalysisConfiguration analysisConfig; + + // Set up the loggers. + private static Logger logger = LoggerFactory.getInstance() + .getLogger(IndexApi.class.getName()); + private static Logger auditLogger = LoggerFactory.getInstance() + .getAuditLogger(IndexApi.class.getName()); + + + public IndexApi(SearchServiceApi searchService) { + this.searchService = searchService; + init(); + } + + + /** + * Initializes the end point. + * + * @throws FileNotFoundException + * @throws IOException + * @throws DocumentStoreOperationException + */ + public void init() { + + // Instantiate our analysis configuration object. + analysisConfig = new AnalysisConfiguration(); + } + + + /** + * Processes client requests to create a new index and document type in the + * document store. + * + * @param documentSchema - The contents of the request body which is expected + * to be a JSON structure which corresponds to the + * schema defined in document.schema.json + * @param index - The name of the index to create. + * @return - A Standard REST response + */ + public Response processCreateIndex(String documentSchema, + HttpServletRequest request, + HttpHeaders headers, + String index, + DocumentStoreInterface documentStore) { + + int resultCode = 500; + String resultString = "Unexpected error"; + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + // Validate that the request is correctly authenticated before going + // any further. + try { + + if (!searchService.validateRequest(headers, request, + ApiUtils.Action.POST, ApiUtils.SEARCH_AUTH_POLICY_NAME)) { + logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, "Authentication failure."); + return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request); + } + + } catch (Exception e) { + + logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, + "Unexpected authentication failure - cause: " + e.getMessage()); + return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request); + } + + + // We expect a payload containing the document schema. Make sure + // it is present. + if (documentSchema == null) { + logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, "Missing document schema payload"); + return errorResponse(Response.Status.fromStatusCode(resultCode), "Missing payload", request); + } + + try { + + // Marshal the supplied json string into a document schema object. + ObjectMapper mapper = new ObjectMapper(); + DocumentSchema schema = mapper.readValue(documentSchema, DocumentSchema.class); + + // Now, ask the DAO to create the index. + OperationResult result = documentStore.createIndex(index, schema); + + // Extract the result code and string from the OperationResult + // object so that we can use them to generate a standard REST + // response. + // Note that we want to return a 201 result code on a successful + // create, so if we get back a 200 from the document store, + // translate that int a 201. + resultCode = (result.getResultCode() == 200) ? 201 : result.getResultCode(); + resultString = (result.getFailureCause() == null) + ? result.getResult() : result.getFailureCause(); + + } catch (com.fasterxml.jackson.core.JsonParseException + | com.fasterxml.jackson.databind.JsonMappingException e) { + + // We were unable to marshal the supplied json string into a valid + // document schema, so return an appropriate error response. + resultCode = javax.ws.rs.core.Response.Status.BAD_REQUEST.getStatusCode(); + resultString = "Malformed schema: " + e.getMessage(); + + } catch (IOException e) { + + // We'll treat this is a general internal error. + resultCode = javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(); + resultString = "IO Failure: " + e.getMessage(); + } + + Response response = Response.status(resultCode).entity(resultString).build(); + + // Log the result. + if ((response.getStatus() >= 200) && (response.getStatus() < 300)) { + logger.info(SearchDbMsgs.CREATED_INDEX, index); + } else { + logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, resultString); + } + + // Generate our audit log. + auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST, + new LogFields() + .setField(LogLine.DefinedFields.RESPONSE_CODE, resultCode) + .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, + Response.Status.fromStatusCode(resultCode).toString()), + (request != null) ? request.getMethod() : "Unknown", + (request != null) ? request.getRequestURL().toString() : "Unknown", + (request != null) ? request.getRemoteHost() : "Unknown", + Integer.toString(response.getStatus())); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + // Finally, return the response. + return response; + } + + + /** + * Processes a client request to remove an index from the document store. + * Note that this implicitly deletes all documents contained within that index. + * + * @param index - The index to be deleted. + * @return - A standard REST response. + */ + public Response processDelete(String index, + HttpServletRequest request, + HttpHeaders headers, + DocumentStoreInterface documentStore) { + + // Initialize the MDC Context for logging purposes. + ApiUtils.initMdcContext(request, headers); + + // Set a default response in case something unexpected goes wrong. + Response response = Response.status(javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR) + .entity("Unknown") + .build(); + + // Validate that the request is correctly authenticated before going + // any further. + try { + + if (!searchService.validateRequest(headers, request, ApiUtils.Action.POST, + ApiUtils.SEARCH_AUTH_POLICY_NAME)) { + logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, "Authentication failure."); + return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request); + } + + } catch (Exception e) { + + logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, + "Unexpected authentication failure - cause: " + e.getMessage()); + return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request); + } + + + try { + // Send the request to the document store. + response = responseFromOperationResult(documentStore.deleteIndex(index)); + + } catch (DocumentStoreOperationException e) { + response = Response.status(javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR) + .entity(e.getMessage()) + .build(); + } + + + // Log the result. + if ((response.getStatus() >= 200) && (response.getStatus() < 300)) { + logger.info(SearchDbMsgs.DELETED_INDEX, index); + } else { + logger.warn(SearchDbMsgs.INDEX_DELETE_FAILURE, index, (String) response.getEntity()); + } + + // Generate our audit log. + auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST, + new LogFields() + .setField(LogLine.DefinedFields.RESPONSE_CODE, response.getStatus()) + .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, + response.getStatusInfo().getReasonPhrase()), + (request != null) ? request.getMethod() : "Unknown", + (request != null) ? request.getRequestURL().toString() : "Unknown", + (request != null) ? request.getRemoteHost() : "Unknown", + Integer.toString(response.getStatus())); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + return response; + } + + + /** + * This method takes a JSON format document schema and produces a set of + * field mappings in the form that Elastic Search expects. + * + * @param documentSchema - A document schema expressed as a JSON string. + * @return - A JSON string expressing an Elastic Search mapping configuration. + * @throws com.fasterxml.jackson.core.JsonParseException + * @throws com.fasterxml.jackson.databind.JsonMappingException + * @throws IOException + */ + public String generateDocumentMappings(String documentSchema) + throws com.fasterxml.jackson.core.JsonParseException, + com.fasterxml.jackson.databind.JsonMappingException, IOException { + + // Unmarshal the json content into a document schema object. + ObjectMapper mapper = new ObjectMapper(); + DocumentSchema schema = mapper.readValue(documentSchema, DocumentSchema.class); + + // Now, generate the Elastic Search mapping json and return it. + StringBuilder sb = new StringBuilder(); + sb.append("{"); + sb.append("\"properties\": {"); + + boolean first = true; + for (DocumentFieldSchema field : schema.getFields()) { + + if (!first) { + sb.append(","); + } else { + first = false; + } + + sb.append("\"").append(field.getName()).append("\": {"); + + // The field type is mandatory. + sb.append("\"type\": \"").append(field.getDataType()).append("\""); + + // If the index field was specified, then append it. + if (field.getSearchable() != null) { + sb.append(", \"index\": \"").append(field.getSearchable() + ? "analyzed" : "not_analyzed").append("\""); + } + + // If a search analyzer was specified, then append it. + if (field.getSearchAnalyzer() != null) { + sb.append(", \"search_analyzer\": \"").append(field.getSearchAnalyzer()).append("\""); + } + + // If an indexing analyzer was specified, then append it. + if (field.getIndexAnalyzer() != null) { + sb.append(", \"analyzer\": \"").append(field.getIndexAnalyzer()).append("\""); + } else { + sb.append(", \"analyzer\": \"").append("whitespace").append("\""); + } + + sb.append("}"); + } + + sb.append("}"); + sb.append("}"); + + logger.debug("Generated document mappings: " + sb.toString()); + + return sb.toString(); + } + + + /** + * Converts an {@link OperationResult} to a standard REST {@link Response} + * object. + * + * @param result - The {@link OperationResult} to be converted. + * @return - The equivalent {@link Response} object. + */ + public Response responseFromOperationResult(OperationResult result) { + + if ((result.getResultCode() >= 200) && (result.getResultCode() < 300)) { + return Response.status(result.getResultCode()).entity(result.getResult()).build(); + } else { + if (result.getFailureCause() != null) { + return Response.status(result.getResultCode()).entity(result.getFailureCause()).build(); + } else { + return Response.status(result.getResultCode()).entity(result.getResult()).build(); + } + } + } + + public Response errorResponse(Response.Status status, String msg, HttpServletRequest request) { + + // Generate our audit log. + auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST, + new LogFields() + .setField(LogLine.DefinedFields.RESPONSE_CODE, status.getStatusCode()) + .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, status.getReasonPhrase()), + (request != null) ? request.getMethod() : "Unknown", + (request != null) ? request.getRequestURL().toString() : "Unknown", + (request != null) ? request.getRemoteHost() : "Unknown", + Integer.toString(status.getStatusCode())); + + // Clear the MDC context so that no other transaction inadvertently + // uses our transaction id. + ApiUtils.clearMdcContext(); + + return Response.status(status) + .entity(msg) + .build(); + } + + +} diff --git a/src/main/java/org/openecomp/sa/rest/SearchServiceApi.java b/src/main/java/org/openecomp/sa/rest/SearchServiceApi.java new file mode 100644 index 0000000..f2ad6db --- /dev/null +++ b/src/main/java/org/openecomp/sa/rest/SearchServiceApi.java @@ -0,0 +1,259 @@ +/** + * ============LICENSE_START======================================================= + * Search Data Service + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License ati + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sa.rest; + +import org.openecomp.sa.auth.SearchDbServiceAuth; +import org.openecomp.sa.rest.ApiUtils.Action; +import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface; +import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.ElasticSearchHttpController; + +import java.security.cert.X509Certificate; +import javax.security.auth.x500.X500Principal; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + + +public class SearchServiceApi { + + /** + * The Data Access Object that we will use to interact with the + * document store. + */ + protected DocumentStoreInterface documentStore = null; + protected ApiUtils apiUtils = null; + + + /** + * Create a new instance of the end point. + */ + public SearchServiceApi() { + + // Perform one-time initialization. + init(); + } + + + /** + * Performs all one-time initialization required for the end point. + */ + public void init() { + + // Instantiate our Document Store DAO. + documentStore = ElasticSearchHttpController.getInstance(); + + apiUtils = new ApiUtils(); + } + + @PUT + @Path("/indexes/{index}") + @Consumes({MediaType.APPLICATION_JSON}) + public Response processCreateIndex(String requestBody, + @Context HttpServletRequest request, + @Context HttpHeaders headers, + @PathParam("index") String index) { + + // Forward the request to our index API to create the index. + IndexApi indexApi = new IndexApi(this); + return indexApi.processCreateIndex(requestBody, request, headers, index, documentStore); + } + + + @DELETE + @Path("/indexes/{index}") + @Consumes({MediaType.APPLICATION_JSON}) + public Response processDeleteIndex(String requestBody, + @Context HttpServletRequest request, + @Context HttpHeaders headers, + @PathParam("index") String index) { + + // Forward the request to our index API to delete the index. + IndexApi indexApi = new IndexApi(this); + return indexApi.processDelete(index, request, headers, documentStore); + } + + + @GET + @Path("/indexes/{index}/documents/{id}") + @Consumes({MediaType.APPLICATION_JSON}) + public Response processGetDocument(String requestBody, + @Context HttpServletRequest request, + @Context HttpServletResponse httpResponse, + @Context HttpHeaders headers, + @PathParam("index") String index, + @PathParam("id") String id) { + + // Forward the request to our document API to retrieve the document. + DocumentApi documentApi = new DocumentApi(this); + return documentApi.processGet(requestBody, request, headers, httpResponse, + index, id, documentStore); + } + + @POST + @Path("/indexes/{index}/documents") + @Consumes({MediaType.APPLICATION_JSON}) + public Response processCreateDocWithoutId(String requestBody, + @Context HttpServletRequest request, + @Context HttpServletResponse httpResponse, + @Context HttpHeaders headers, + @PathParam("index") String index) { + + // Forward the request to our document API to create the document. + DocumentApi documentApi = new DocumentApi(this); + return documentApi.processPost(requestBody, request, headers, httpResponse, + index, documentStore); + } + + @PUT + @Path("/indexes/{index}/documents/{id}") + @Consumes({MediaType.APPLICATION_JSON}) + public Response processUpsertDoc(String requestBody, + @Context HttpServletRequest request, + @Context HttpServletResponse httpResponse, + @Context HttpHeaders headers, + @PathParam("index") String index, + @PathParam("id") String id) { + + // Forward the request to our document API to upsert the document. + DocumentApi documentApi = new DocumentApi(this); + return documentApi.processPut(requestBody, request, headers, httpResponse, + index, id, documentStore); + } + + @DELETE + @Path("/indexes/{index}/documents/{id}") + @Consumes({MediaType.APPLICATION_JSON}) + public Response processDeleteDoc(String requestBody, + @Context HttpServletRequest request, + @Context HttpServletResponse httpResponse, + @Context HttpHeaders headers, + @PathParam("index") String index, + @PathParam("id") String id) { + + // Forward the request to our document API to delete the document. + DocumentApi documentApi = new DocumentApi(this); + return documentApi.processDelete(requestBody, request, headers, httpResponse, + index, id, documentStore); + } + + + @GET + @Path("/indexes/{index}/query/{queryText}") + @Consumes({MediaType.APPLICATION_JSON}) + public Response processInlineQuery(String requestBody, + @Context HttpServletRequest request, + @Context HttpHeaders headers, + @PathParam("index") String index, + @PathParam("queryText") String queryText) { + + // Forward the request to our document API to delete the document. + DocumentApi documentApi = new DocumentApi(this); + return documentApi.processSearchWithGet(requestBody, request, headers, + index, queryText, documentStore); + } + + + @GET + @Path("/indexes/{index}/query") + @Consumes({MediaType.APPLICATION_JSON}) + public Response processQueryWithGet(String requestBody, + @Context HttpServletRequest request, + @Context HttpHeaders headers, + @PathParam("index") String index) { + + // Forward the request to our document API to delete the document. + DocumentApi documentApi = new DocumentApi(this); + return documentApi.queryWithGetWithPayload(requestBody, request, headers, index, documentStore); + } + + @POST + @Path("/indexes/{index}/query") + @Consumes({MediaType.APPLICATION_JSON}) + public Response processQuery(String requestBody, + @Context HttpServletRequest request, + @Context HttpHeaders headers, + @PathParam("index") String index) { + + // Forward the request to our document API to delete the document. + DocumentApi documentApi = new DocumentApi(this); + return documentApi.processSearchWithPost(requestBody, request, headers, index, documentStore); + } + + + @POST + @Path("/bulk") + @Consumes({MediaType.APPLICATION_JSON}) + public Response processBulkRequest(String requestBody, + @Context HttpServletRequest request, + @Context HttpHeaders headers, + @PathParam("index") String index) { + + // Forward the request to our document API to delete the document. + BulkApi bulkApi = new BulkApi(this); + return bulkApi.processPost(requestBody, request, headers, documentStore, apiUtils); + } + + protected boolean validateRequest(HttpHeaders headers, + HttpServletRequest req, + Action action, + String authPolicyFunctionName) throws Exception { + + SearchDbServiceAuth serviceAuth = new SearchDbServiceAuth(); + + String cipherSuite = (String) req.getAttribute("javax.servlet.request.cipher_suite"); + String authUser = null; + if (cipherSuite != null) { + Object x509CertAttribute = req.getAttribute("javax.servlet.request.X509Certificate"); + if (x509CertAttribute != null) { + X509Certificate[] certChain = (X509Certificate[]) x509CertAttribute; + X509Certificate clientCert = certChain[0]; + X500Principal subjectDn = clientCert.getSubjectX500Principal(); + authUser = subjectDn.toString(); + } + } + + if (authUser == null) { + return false; + } + + String status = serviceAuth.authUser(headers, authUser.toLowerCase(), + action.toString() + ":" + authPolicyFunctionName); + if (!status.equals("OK")) { + return false; + } + + return true; + } +} -- cgit 1.2.3-korg